repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
angelsanz/pysellus | pysellus/integration_config.py | 3 | 8180 | import os
import inspect
import yaml
from pysellus import loader
from pysellus.integrations import loaded_integrations, integration_classes
CONFIGURATION_FILE_NAME = '.ps_integrations.yml'
def load_integrations(path):
"""
load_integrations :: String -> IO
Given a path, find the config file at it and load it.
"""
configuration = _load_config_file(path)
_load_custom_integrations(configuration)
_load_defined_integrations(configuration)
def _load_config_file(path):
"""
_load_config_file :: String -> {}
Given a directory, loads the configuration file.
If given a file, it will try to get the configuration from the
parent directory.
If no configuration file is found, raise a FileNotFound exception.
"""
path_to_configuration_file = _get_path_to_configuration_file(path)
with open(path_to_configuration_file, 'r') as config_file:
try:
configuration = _load_configuration_from_config_file(config_file)
except EmptyConfigurationFileError as error:
exit(
"Error while reading {path}: {reason}"
.format(
path=path_to_configuration_file,
reason=error.message
)
)
return configuration
def _get_path_to_configuration_file(path):
directory_containing_configuration_file = _get_parent_directory_of_path(path)
path_to_configuration_file = os.path.join(
directory_containing_configuration_file,
CONFIGURATION_FILE_NAME
)
if not os.path.exists(path_to_configuration_file):
raise FileNotFoundError
return path_to_configuration_file
def _get_parent_directory_of_path(path):
if os.path.isdir(path):
return path
return os.path.dirname(path)
def _load_configuration_from_config_file(config_file):
loaded_configuration = yaml.load(config_file)
if loaded_configuration is None:
raise EmptyConfigurationFileError()
return loaded_configuration
def _load_custom_integrations(configuration):
"""
_load_custom_integrations :: {} -> IO
Given an integration configuration dict, get their definitions and import them,
then add them to the integrations#loaded_integrations dict.
If the definition is missing, pass.
"""
if 'custom_integrations' not in configuration:
return
custom_integrations_configuration = configuration['custom_integrations']
_load_custom_integrations_classes(custom_integrations_configuration)
def _load_custom_integrations_classes(custom_configuration):
"""
_load_custom_integrations_classes :: {} -> IO
Given a map of configuration definitions, find the integration module, import it,
and then load the appropiate class object into the integration_classes dict.
Fails if it can't find the class object inside the module, or if the integration
name is a duplicate.
"""
for alias, configuration in custom_configuration.items():
if alias in integration_classes.keys():
exit(
"Conflicting integration name '{}'. Integration names must be unique\nAborting..."
.format(alias)
)
try:
integration_class_name = configuration['name']
path_to_integration_module = configuration['path']
except KeyError:
exit("Malformed integration '{}': missing class name and/or module path".format(alias))
classobject = _get_matching_classobject_from_path(
integration_class_name,
path_to_integration_module
)
if classobject is None:
exit(
"Malformed custom integration '{alias}:\n\t'{klass}' class not found in {path_to_integration_module}"
.format(
alias=alias,
klass=integration_class_name,
path_to_integration_module=path_to_integration_module
)
)
integration_classes[alias] = classobject
def _get_matching_classobject_from_path(class_name, path):
"""
_get_matching_classobject_from_path :: String -> String -> python.ClassObject | None
Given a class name, and a module path, search for the given class inside it and return the
first match.
If no match is found, return None.
"""
integration_module = loader.load_modules(path)[0]
for name, classobject in _get_classes_in_module(integration_module):
if name == class_name:
return classobject
def _get_classes_in_module(module):
return inspect.getmembers(module, inspect.isclass)
def _load_defined_integrations(configuration):
"""
_load_defined_integrations :: {} -> IO
Given an integration configuration dict, get their constructors and import them,
then add them to the integrations#loaded_integrations dict.
Fails if the constructor section is missing.
"""
try:
integration_configuration = configuration['notify']
_load_integrations_from_configuration(integration_configuration)
except KeyError:
exit("Malformed configuration file: missing 'notify' section")
def _load_integrations_from_configuration(integrations_configuration):
"""
_load_integrations_from_configuration :: {} -> IO
Given a map of integration constructors, gather the attributes and build an instance
of the integration. Then map their aliases with their instances inside the
loaded_integrations dict
"""
for alias, integration_name, kwargs_for_integration_constructor \
in _unpack_integration_configuration_data(integrations_configuration):
loaded_integrations[alias] = _get_integration_instance(
integration_name,
kwargs_for_integration_constructor
)
def _unpack_integration_configuration_data(integrations_configuration):
"""
_unpack_integration_configuration_data :: {} -> (String, String, {} | None)
Given a map of integration constructors, gather the attributes into an alias, the integration
name and a map of constructor parameters.
Yields a tuple on every call.
"""
for alias, child in integrations_configuration.items():
if child is None:
integration_name = alias
kwargs_for_integration_constructor = None
elif _has_only_one_key_and_a_dict_as_value(child):
integration_name = _get_the_only_key_in(child)
kwargs_for_integration_constructor = child[integration_name]
elif _has_only_one_key_and_None_as_value(child):
integration_name = _get_the_only_key_in(child)
kwargs_for_integration_constructor = None
else:
integration_name = alias
kwargs_for_integration_constructor = child
yield (alias, integration_name, kwargs_for_integration_constructor)
def _has_only_one_key_and_a_dict_as_value(a_dict):
return len(a_dict.keys()) == 1 and type(a_dict[_get_the_only_key_in(a_dict)]) is dict
def _get_the_only_key_in(a_dict):
return list(a_dict.keys())[0]
def _has_only_one_key_and_None_as_value(a_dict):
return list(a_dict.values()) == [None]
def _get_integration_instance(name, kwargs_for_integration_constructor):
"""
_get_integration_instance :: {} -> AbstractIntegration
Given a dictionary containing an integration name,
and another dictionary of arguments, find the class representing that
name, and return an instance of it, passing it all the parameters in the
parameter dictionary.
"""
try:
integration_class = integration_classes[name]
if kwargs_for_integration_constructor is None:
return integration_class()
else:
return integration_class(**kwargs_for_integration_constructor)
except KeyError:
exit("On integration '{}': definition missing\nAborting...".format(name))
class EmptyConfigurationFileError(Exception):
def __init__(self, message="configuration file is empty"):
super(EmptyConfigurationFileError, self).__init__(message)
self.message = message
| mit |
k1643/StratagusAI | projects/player-1/gamelogs.py | 1 | 87828 | import os.path
import csv
import datetime
import glob
import math
import matplotlib.pyplot as plt
import numpy as np
import os
import pickle
import random
import scipy
import scipy.stats # confidence intervals
import sqlite3
import sys
import yaml
# statistics CSV column names and indexes.
cols = {
"event":0, # plan or end (of game)
"player ID":1, # the number of player being evaluated
"player":2, # type of player being evaluated
"strategy":3, # strategy of player being evaluated
"simreplan":4, # is matrix switching player using simulation re-planning?
"opponent":5, # type of opponent
"predicted":6, # predicted winner
"predicted diff":7, #
"actual":8, # actual winner
"diff":9,
"cycle":10, # cycle of this event
"map":11
}
sim_maps = ['2bases-game',
'2bases_switched',
'the-right-strategy-game',
'the-right-strategy-game_switched'
]
# for games against built-in script
script_maps = [
['../../maps/2bases_PvC.smp','../../maps/2bases_switched_PvC.smp'],
['../../maps/the-right-strategy_PvC.smp','../../maps/the-right-strategy_switched_PvC.smp']
]
# planner vs. planner maps
# same order as sim_maps.
planner_maps = [
'../../maps/2bases.smp',
'../../maps/2bases_switched.smp',
'../../maps/the-right-strategy.smp',
'../../maps/the-right-strategy_switched.smp',
]
mapnames = ['2bases','the-right-strategy']
# planner vs. planner maps
engine_maps = [
['../../maps/2bases.smp','../../maps/2bases_switched.smp',],
['../../maps/the-right-strategy.smp','../../maps/the-right-strategy_switched.smp']
]
switching = ['Nash','maximin', 'monotone']
#epochs = [10000,20000,40000,80000] # divide game in 4 epochs
#epochs = [6030,12060,18090,24120,30150,36180,42210,48240,54270,60300,66330,72360,78390
epochs = [6030,12060,18090,24120,80000]
def write_table(data,fmt,rowhdr,colhdr,label,caption,filepath,hline=None,bolddiag=False,colspec=None):
"""write data matrix as LaTeX table"""
today = datetime.date.today()
tex = open(filepath,'w')
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("\\begin{table}[!ht]\n")
tex.write("\\centering\n")
tex.write("\\begin{tabular}")
tex.write("{")
if colspec:
tex.write(colspec)
else:
tex.write("l |")
for j in range(len(colhdr)):
tex.write(" r ") # assume numbers in cells
tex.write("}\n")
# column header
for c in colhdr:
tex.write(" & " + c)
tex.write("\\cr\n")
tex.write("\\hline\n")
for i in range(len(rowhdr)):
tex.write(rowhdr[i])
for j in range(len(colhdr)):
x = data[i][j]
tex.write(" & ")
if bolddiag and i==j:
tex.write("\\textbf{")
if x:
tex.write(fmt(x))
elif x == 0:
tex.write("0")
if bolddiag and i==j:
tex.write("}")
tex.write("\\cr\n")
if hline == i:
tex.write("\\hline\n")
tex.write("\\end{tabular}\n")
tex.write("\\caption{" + caption + "}\n")
tex.write("\\label{" + label + "}\n")
tex.write("\\end{table}\n")
tex.close()
def print_table(data,fmt,rowhdr,colhdr,caption):
"""print data matrix to console"""
colwidth = max([len(c) for c in rowhdr])
colfmt = "{0:"+str(colwidth)+"}"
for c in colhdr:
print colfmt.format(c),
print
for i in range(len(rowhdr)):
print colfmt.format(rowhdr[i]),
for j in range(len(colhdr)):
x = data[i][j]
if x:
print fmt(x),
elif x == 0:
print "0",
print
print caption
def max_index(data):
"""get indexes of max values in data"""
m = max(data)
return [i for i,v in enumerate(data) if v==m]
def count_wins(v):
"""count wins in score sequence"""
return reduce(lambda v1,v2: v1+1 if v2 > 0 else v1,v,0)
def max_star(data):
"""get list with '*' where max value is in data"""
m = max(data)
return ['*' if v == m else ' ' for v in data]
def normal_approx_interval(p,n,bound):
"""get 95% confidence interval around sample success rate sp assuming n bernoulli trials, normal distribution"""
# for a 95% confidence level the error (\alpha) is 5%,
# so 1- \alpha /2=0.975 and z_{1- \alpha /2}=1.96.
z = 1.96 # z=1.0 for 85%, z=1.96 for 95%
n = float(n)
if bound == 'upper':
return p + z*math.sqrt(p*(1-p)/n)
elif bound == 'lower':
return p - z*math.sqrt(p*(1-p)/n)
else:
raise Exception("unknown bound " + bound)
def wilson_score_interval(p,n,bound):
"""get 95% confidence interval around sample success rate sp assuming n bernoulli trials"""
# for a 95% confidence level the error (\alpha) is 5%,
# so 1- \alpha /2=0.975 and z_{1- \alpha /2}=1.96.
z = 1.96 # z=1.0 for 85%, z=1.96 for 95%
n = float(n)
#return z*math.sqrt(sp*(1-sp)/float(n))
#
# Wilson score interval:
#
# z^2 p(1-p) z^2
# p + ---- (+-) z * sqrt( ------ + ------ )
# 2n n 4n^2
# ----------------------------------------------
# z^2
# 1 + ----
# n
if bound == 'upper':
return ((p + z*z/(2*n) + z * math.sqrt((p*(1-p)+z*z/(4*n))/n))/(1+z*z/n))
elif bound == 'lower':
return ((p + z*z/(2*n) - z * math.sqrt((p*(1-p)+z*z/(4*n))/n))/(1+z*z/n))
else:
raise Exception("unknown bound " + bound)
def bernoulli_confidence(v,formula='normal'):
"""turn score sequence into bernoulli trials. return win rate and confidence interval"""
nWins = count_wins(v)
n = len(v)
rate = nWins/float(len(v))
if formula == 'normal':
f = normal_approx_interval
elif formula == 'wilson':
f = wilson_score_interval
else:
raise Exception,"unknown interval formula"+formula
return [rate, [f(rate,n,'lower'),f(rate,n,'upper')]]
def validate_games(curs,scores_dict,strategies):
# calculate expected number of games
stratlist = "(" + reduce(lambda x, y: x+','+y,["'"+s+"'" for s in strategies]) + ")"
cmd = "select count(*) from event where event='end' and player=? and opponent in " + stratlist
# fixed vs. fixed strategy
for player in strategies:
curs.execute(cmd,(player,))
c = curs.fetchone()[0]
print c,"games for",player, "vs. fixed strategy"
# switching vs. fixed strategy
for player in switching:
curs.execute(cmd,(player,))
c = curs.fetchone()[0]
print c,"games for",player, "vs. fixed strategy"
# switching vs. switching
swlist = "(" + reduce(lambda x, y: x+','+y,["'"+s+"'" for s in switching]) + ")"
curs.execute("select count(*) from event where event='end' and player in " + swlist + " and opponent in " + swlist)
print curs.fetchone()[0],"switching vs. switching episodes"
# switching vs. built-in
curs.execute("select count(*) from event where event='end' and player in " + swlist + " and opponent = 'built-in'")
print curs.fetchone()[0],"switching vs. built-in episodes"
# total
curs.execute("select count(*) from event where event='end'")
print curs.fetchone()[0],"total episodes"
# validate scores dict.
total = 0
counts = [0,0,0,0]
for k,v in scores_dict.iteritems():
c = len(v)
if k[0] in strategies and k[1] in strategies:
counts[0] += c
elif (k[0] in switching and k[1] in strategies) or (k[1] in switching and k[0] in strategies):
counts[1] += c
elif k[0] in switching and k[1] in switching:
counts[2] += c
elif k[0] == 'built-in' or k[1] == 'built-in':
counts[3] += c
else:
print "no category for", k
total += c
print "scores dictionary"
print total,"episodes"
print counts[0], "strategy vs. strategy episodes"
print counts[1], "switching vs. strategy episodes"
print counts[2], "switching vs. switching episodes"
print counts[3], "switching vs. built-in"
def load_strategy_defs(d, strategy_set):
print "load_strategy_defs()"
filepath = os.path.join(d, 'sw_strategies_'+ strategy_set + '.yaml')
f = open(filepath,'rb')
strat_data = yaml.load(f)
f.close()
strs = strat_data[0]['matrix']
names = []
for s in strs:
names.append(s[0])
return names
def write_strategy_defs(d, strategy_set):
filepath = os.path.join(d, 'sw_strategies_'+ strategy_set + '.yaml')
f = open(filepath,'rb')
strat_data = yaml.load(f)
f.close()
strs = strat_data[0]['matrix']
for i in range(len(strs)):
strs[i] = strs[i][1:]
# write TEX strategy definitions
fmt = lambda x: str(x)
rowhdr = [str(j) + ". " + strategies[j].replace('_',' ') for j in range(len(strategies))]
colhdr = strat_data[0]['colhdr'][1:]
caption = strat_data[0]['caption']
label = strat_data[0]['label']
outfile = os.path.join(d, 'sw_strategies_'+strategy_set+'.tex')
write_table(strs,fmt,rowhdr,colhdr,label,caption,outfile)
return strategies # return strategy template names
class Medians:
"""calculate medians and confidence intervals"""
def __init__(self,scores,strategies,threshold=.95):
# strategy vs. strategy table of ConfidenceIntervals indexed by mapname
self.s_v_s_intervals = {}
# confidence interval for maximin of strategy vs. strategy table
# indexed by mappath
self.s_v_s_maximin_interval = {}
#
self.sw_v_s_intervals = {}
# switching planner vs. fixed strategy tables.
self.sw_v_s_min_intervals = {} # compare min of switching vs. strategy to maximin
# build tables.
#
self.strategies = strategies
# load median data
#
# load fixed strategy vs. fixed strategy games
for mapname in mapnames: # ['2bases','the-right-strategy']
table = [[None for player in strategies] for opponent in strategies]
interval_table = [[None for player in strategies] for opponent in strategies]
for i in range(len(strategies)):
opponent = strategies[i]
for j in range(len(strategies)):
player = strategies[j]
v = get_scores(player,opponent,mapname,scores)
if len(v) > 0:
interval = get_confidence_interval(v,threshold)
interval.player = player
interval.opponent = opponent
table[i][j] = interval.median
interval_table[i][j] = interval
self.s_v_s_intervals[mapname] = interval_table
# get confidence interval around maximin
mins = np.min(table,axis=0) # column mins
mins_indexes = np.argmin(table,axis=0) # row indexes
maximin_col = np.argmax(mins)
for i in mins_indexes:
if table[i][maximin_col] == mins[maximin_col]:
self.s_v_s_maximin_interval[mapname] = interval_table[i][maximin_col]
assert self.s_v_s_maximin_interval[mapname] and self.s_v_s_maximin_interval[mapname].median == mins[maximin_col]
# load switching planner vs. fixed strategy games
for mapname in mapnames:
self.sw_v_s_min_intervals[mapname] = {}
interval_table = [[None for player in switching] for opponent in strategies]
for j in range(len(switching)):
player = switching[j]
min_interval = None
for i in range(len(strategies)):
opponent = strategies[i]
v = get_scores(player,opponent,mapname,scores)
if len(v) > 0:
interval = get_confidence_interval(v,threshold)
interval.player = player
interval.opponent = opponent
interval_table[i][j] = interval
if (not min_interval) or min_interval.median > interval.median:
min_interval = interval
# get confidence interval around min
assert min_interval, "no minimum found for " + player
self.sw_v_s_min_intervals[mapname][player] = min_interval
self.sw_v_s_intervals[mapname] = interval_table
class Means:
"""calculate means"""
def __init__(self,scores,strategies):
# strategy vs. strategy table of ConfidenceIntervals indexed by mappath
self.s_v_s_means = {}
# maximin value and strategy pair for maximin of strategy vs. strategy table
# indexed by mappath
self.s_v_s_maximin_pair = {} #
# switching planner vs. fixed strategy tables.
self.sw_v_s_min = {} # compare min of switching vs. strategy to maximin
# build tables.
#
self.strategies = strategies
# load mean data
#
# load fixed strategy vs. fixed strategy games
for mapname in mapnames:
table = [[None for player in strategies] for opponent in strategies]
for i in range(len(strategies)):
opponent = strategies[i]
for j in range(len(strategies)):
player = strategies[j]
table[i][j] = get_mean(player,opponent,mapname,scores)
self.s_v_s_means[mapname] = table
# get maximin
mins = np.min(table,axis=0) # column mins
mins_indexes = np.argmin(table,axis=0) # row indexes
maximin_col = np.argmax(mins)
for i in mins_indexes:
# if row i has maximin value in column maximin_col
if table[i][maximin_col] == mins[maximin_col]:
maximin_row = i
self.s_v_s_maximin_pair[mapname] = (table[i][maximin_col],strategies[maximin_col],strategies[maximin_row])
assert self.s_v_s_maximin_pair[mapname] and self.s_v_s_maximin_pair[mapname][0] == mins[maximin_col]
# load switching planner vs. fixed strategy games
for mapname in mapnames:
self.sw_v_s_min[mapname] = {}
for j in range(len(switching)):
player = switching[j]
min_pair = None
for i in range(len(strategies)):
opponent = strategies[i]
v = get_mean(player,opponent,mapname,scores)
if (not min_pair) or min_pair[0] > v:
min_pair = (v,player,opponent)
# get confidence interval around min
assert min_pair, "no minimum found for " + player
self.sw_v_s_min[mapname][player] = min_pair
def show_maximin_compare_errorbars(d,medians):
print "show_maximin_compare_errorbars()"
x = [j+1 for j in range(len(switching)+1)]
xticklabels = ["fixed"]
xticklabels.extend(switching)
for mapname in mapnames:
mins = [] # minimum of medians
upper_conf = [] # difference between upper_confidence and median
lower_conf = []
# get fix strategy maximin of medians
conf = medians.s_v_s_maximin_interval[mapname]
upper_conf.append(conf.interval[1] - conf.median) # upper range
mins.append(conf.median)
lower_conf.append(conf.median - conf.interval[0]) # lower range
# get switching planner mins of medians
for j in range(len(switching)):
player = switching[j]
conf = medians.sw_v_s_min_intervals[mapname][player]
upper_conf.append(conf.interval[1] - conf.median) # upper range
mins.append(conf.median)
lower_conf.append(conf.median - conf.interval[0]) # lower range
y = mins
plt.figure()
plt.xticks(x, xticklabels)
plt.xlabel('Switching Planners')
plt.ylabel('Score')
plt.axvline(x=1.5, color='gray',linestyle='--') #axvline(x=0, ymin=0, ymax=1, **kwargs)
plt.xlim( (.5, len(x)+.5) ) # show results at 1,...,len(switching)
plt.errorbar(x, y, yerr=[lower_conf,upper_conf], fmt='bs')
#plt.show()
fname = os.path.join(d,"maxmin_compare_"+mapname+".png")
plt.savefig(fname, format='png') # png, pdf, ps, eps and svg.
def write_maximin_compare(d,medians):
print "write_maximin_compare()"
rowhdr = []
for mapname in mapnames:
rowhdr.append(mapname)
colhdr = ["Fixed"]
for sw in switching:
colhdr.append("\\texttt{"+sw+"}")
colsubhdr = ["Upper","Median","Lower"]
#colsubhdr = ["NA","Mean","NA"]
data = [[None for j in range(len(switching)+1)] for i in range(len(mapnames)*3)]
#stratgy vs. strategy maximin value
for i in range(len(mapnames)):
mapname = mapnames[i]
row = i*3
conf = medians.s_v_s_maximin_interval[mapname]
#print " maximin at",conf.player,"vs.",conf.opponent,conf
data[row][0] = conf.interval[1]
data[row+1][0] = conf.median
#data[row+1][0] = means.s_v_s_maximin_pair[mapname][0]
data[row+2][0] = conf.interval[0]
# switching player vs. strategy minimum value
for j in range(len(switching)):
player = switching[j]
conf = medians.sw_v_s_min_intervals[mapname][player]
#print " min median at",conf.player,"vs.",conf.opponent,conf
data[row][j+1] = conf.interval[1] # upper range
data[row+1][j+1] = conf.median
#data[row+1][j+1] = means.sw_v_s_min[mapname][player][0]
data[row+2][j+1] = conf.interval[0] # lower range
today = datetime.date.today()
filepath = os.path.join(d,"maximin_compare.tex")
tex = open(filepath,'w')
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("\\begin{table}[!ht]\n")
tex.write("\\centering\n")
tex.write("\\begin{tabular}{l l | ")
for j in range(len(colhdr)):
tex.write(" r ") # assume numbers in cells
tex.write("}\n")
# column header
tex.write(" & ")
for c in colhdr:
tex.write(" & " + c)
tex.write("\\cr\n")
tex.write("\\hline\n")
# write Upper,Median,Lower on first map
for i in range(len(colsubhdr)):
if i == 0:
tex.write("\\texttt{{{0}}} & {1}".format(mapnames[0],colsubhdr[0]))
else:
tex.write(" & {0}".format(colsubhdr[i]))
for j in range(len(colhdr)):
x = data[i][j]
tex.write(" & {0:.0f}".format(x))
#tex.write(" & " + str(x))
tex.write("\\cr\n")
tex.write("\\hline\n")
for i in range(len(colsubhdr)):
if i == 0:
tex.write("\\texttt{{{0}}} & {1}".format(mapnames[1],colsubhdr[0]))
else:
tex.write(" & {0}".format(colsubhdr[i]))
for j in range(len(colhdr)):
x = data[3+i][j]
tex.write(" & {0:.0f}".format(x))
#tex.write(" & " + str(x))
tex.write("\\cr\n")
tex.write("\\hline\n")
tex.write("\end{tabular}\n")
tex.write("\\caption{Fixed Strategy Maximin and Switching Planner Minimum Intervals}\n")
tex.write("\\label{table:maximin_and_minimums}\n")
tex.write("\\end{table}\n")
tex.close()
def plot_rate_v_mean(d,scores):
"""show relationship between win rate and mean score"""
# scores = get_strat_v_strat_scores(curs,strategies)
means = {}
rates = {}
for k,v in scores.iteritems():
if len(v) == 0:
continue
means[k] = np.mean(v)
nWins = 0
nGames = len(v)
for score in v:
if score > 0:
nWins += 1
rates[k] = nWins/float(nGames)
for mapname in mapnames:
keys = rates.keys() # get [player,opponent,mappath]
x = [rates[k] for k in filter(lambda t: t[2] == mapname, keys)]
y = [means[k] for k in filter(lambda t: t[2] == mapname, keys)]
plt.figure() # new graph
plt.xlim( (0, 1) ) # 0-100%
plt.xlabel('win rate')
plt.ylabel('mean score')
plt.scatter(x,y)
plt.show()
fname = os.path.join(d,"rate_v_mean_"+mapname+".png")
#plt.savefig(fname, format='png') # png, pdf, ps, eps and svg.
def strat_vs_strat_rate(d,scores_dict,strategies):
"""write strategy vs. strategy win rate table."""
print "strat_vs_strat_rate()"
# setup Latex table
fmt = lambda x: x if x.__class__ == str else "{0:.0f}\%".format(x*100) # formatter.
rowhdr = [str(j) + "." for j in range(len(strategies))]
hline = None
colhdr = [str(i) + "." for i in range(len(strategies))]
for mapname in mapnames:
table = [[None for p in strategies] for o in strategies]
for i in range(len(strategies)):
o = strategies[i] # opponent
for j in range(len(strategies)):
p = strategies[j] # player
v = get_scores(p,o,mapname,scores_dict)
nWins = count_wins(v)
table[i][j] = nWins/float(len(v))
caption = 'Strategy Win Rate on \\texttt{' + mapname.replace('_',' ') + '}'
label = 'engine_rate_' + mapname
outfile = os.path.join(d, 'engine_rate_'+mapname+'.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile,hline,bolddiag=True)
def strat_vs_strat_score_db(d,curs,strategies,summary='median'):
"""Debugging. write strategy vs. strategy score table from database"""
print "strat_vs_strat_score_db()"
def fmt(x):
if not x:
return " "
elif x.__class__ == str:
return "{0:6}".format(x)
else:
return "{0:6.0f}".format(x)
rowhdr = [str(i) + "." for i in range(len(strategies))]
rowhdr.append("min")
rowhdr.append("mxmn")
cmd = "select diff from event where event='end' and player=? and opponent=? and map=?"
for mappaths in engine_maps:
path,mapname = os.path.split(mappaths[0])
mapname = mapname.replace('.smp','')
table = [[0 for p in strategies] for o in strategies]
for i in range(len(strategies)):
p = strategies[i]
for j in range(i+1):
o = strategies[j]
curs.execute(cmd,(p,o,mappaths[0],)) # from north position
n_scores = [row[0] for row in curs.fetchall()]
scores = n_scores
curs.execute(cmd,(p,o,mappaths[1],)) # from south position
s_scores = [row[0] for row in curs.fetchall()]
scores.extend(s_scores)
if summary == 'median':
stats = np.median
elif summary == 'mean':
stats = np.mean
else:
raise Exception, "unknown summary function", summary
if i == j:
table[j][i] = stats(scores) # transpose for point of view of column player
else:
table[j][i] = stats(scores)
table[i][j] = -stats(scores)
mins = np.min(table,axis=0)
table.append(mins)
table.append(max_star(mins)) # mark the maximin columns)
print mapname
for i in range(len(table)):
print "{0:4}".format(rowhdr[i]),
row = table[i]
for cell in row:
print fmt(cell),
print
def strat_vs_strat_score(d,scores_dict,strategies):
"""write strategy vs. strategy mean score table."""
print "strat_vs_strat_score()"
# setup Latex table
fmt = lambda x: x if x.__class__ == str else "{0:.0f}".format(x) # formatter.
rowhdr = [str(j) + "." for j in range(len(strategies))]
rowhdr.append("min.")
rowhdr.append("maxmin")
hline = len(strategies) - 1 # add horizontal line to table
colhdr = [str(i) + "." for i in range(len(strategies))]
for mapname in mapnames:
table = [[None for p in strategies] for o in strategies]
for i in range(len(strategies)):
o = strategies[i] # opponent
for j in range(len(strategies)):
p = strategies[j] # player
#v = get_scores(p,o,mapname,scores_dict)
table[i][j] = get_mean(p,o,mapname,scores_dict)
mins = np.min(table,axis=0)
table.append(mins)
table.append(max_star(mins)) # mark the maximin columns)
caption = 'Strategy Mean Scores on \\texttt{' + mapname.replace('_',' ') + '}'
label = 'engine_scores_' + mapname
outfile = os.path.join(d, 'engine_scores_'+mapname+'.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile,hline,bolddiag=True)
def strat_vs_strat_median_score(d,medians,strategies):
"""write strategy vs. strategy median score table."""
print "strat_vs_strat_median_score()"
# setup Latex table
fmt = lambda x: x if x.__class__ == str else "{0:.0f}".format(x) # formatter.
rowhdr = [str(j) + "." for j in range(len(strategies))]
rowhdr.append("minimum")
rowhdr.append("maximin")
hline = len(strategies) - 1 # add horizontal line to table
colhdr = [str(i) + "." for i in range(len(strategies))]
for mapname in mapnames:
table = [[None for p in strategies] for o in strategies]
confidence_table = medians.s_v_s_intervals[mapname]
for i in range(len(strategies)): # opponent i
for j in range(len(strategies)): # player j
confidence = confidence_table[i][j]
table[i][j] = confidence.median
mins = np.min(table,axis=0)
table.append(mins)
table.append(max_star(mins)) # mark the maximin columns)
caption = 'Strategy Median Scores on \\texttt{' + mapname.replace('_',' ') + '}'
label = 'engine_median_scores_' + mapname
outfile = os.path.join(d, 'engine_median_scores_'+mapname+'.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile,hline,bolddiag=True)
def sw_vs_strat_scores(d,scores_dict,strategies):
"""write switcher vs. strategy score table."""
print "sw_vs_strat_scores()"
for mapname in mapnames:
sw_vs_strat_map_scores(d,scores_dict,strategies,mapname)
def sw_vs_strat_map_scores(d,scores_dict,strategies,mapname):
"""write switcher vs. strategy score table."""
print "sw_vs_strat_map_scores(" + mapname + ")"
means_table = [[None for p in switching] for o in strategies]
rates_table = [[None for p in switching] for o in strategies]
for i in range(len(strategies)):
o = strategies[i] # opponent
for j in range(len(switching)):
p = switching[j] # player
# get averge for games from both positions on map
means_table[i][j] = get_mean(p,o,mapname,scores_dict)
rates_table[i][j] = get_rate(p,o,mapname,scores_dict)
# add row for mean results
means = np.mean(means_table,axis=0)
mins = np.min(means_table,axis=0)
means_table.append(means)
means_table.append(mins)
rates_table.append([None for p in switching])
rates_table.append([None for p in switching])
write_sw_vs_strat_map_table(d,means_table,rates_table,strategies,mapname)
def write_sw_vs_strat_map_table(d,data,rates,strategies,mapname):
fmt = lambda x: "{0:.0f}".format(x) # formatter.
rowhdr = [str(i)+'. \\texttt{'+strategies[i].replace('_',' ')+'}' for i in range(len(strategies))]
rowhdr.append("mean")
rowhdr.append("minimum")
hline = len(strategies) - 1
colhdr = ['\\texttt{'+s+'}' for s in switching]
label = 'sw_scores_' + mapname
caption = 'Switching Planner Mean Scores on \\texttt{' + mapname + '}'
fn = 'sw_scores_'+mapname+'.tex'
filepath = os.path.join(d, fn)
#write_table(table,fmt,rowhdr,colhdr,label,caption,outfile,hline)
"""write data matrix as LaTeX table"""
today = datetime.date.today()
tex = open(filepath,'w')
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("\\begin{table}[!ht]\n")
tex.write("\\centering\n")
tex.write("\\begin{tabular}")
tex.write("{")
tex.write("l |")
for j in range(len(colhdr)):
tex.write(" r ") # assume numbers in cells
tex.write("|")
for j in range(len(colhdr)):
tex.write(" r ") # assume numbers in cells
tex.write("}\n")
# column header
tex.write(" & \multicolumn{3}{l}{Mean Scores} & \multicolumn{3}{l}{Win Rates}\\cr\n")
for c in colhdr:
tex.write(" & " + c)
for c in colhdr:
tex.write(" & " + c)
tex.write("\\cr\n")
tex.write("\\hline\n")
for i in range(len(rowhdr)):
tex.write(rowhdr[i])
# score table
for j in range(len(colhdr)):
x = data[i][j]
tex.write(" & ")
if x:
tex.write(fmt(x))
elif x == 0:
tex.write("0")
# rate table
for j in range(len(colhdr)):
x = rates[i][j]
tex.write(" & ")
if x:
tex.write(fmt(x*100) + "\%")
elif x == 0:
tex.write("0")
tex.write("\\cr\n")
if hline == i:
tex.write("\\hline\n")
tex.write("\\end{tabular}\n")
tex.write("\\caption{" + caption + "}\n")
tex.write("\\label{" + label + "}\n")
tex.write("\\end{table}\n")
tex.close()
def sw_vs_strat_median_scores(d,medians):
"""write switcher vs. strategy median score table."""
print "sw_vs_strat_median_scores()"
for mapname in mapnames:
sw_vs_strat_median_map_scores(d,medians,mapname)
def sw_vs_strat_median_map_scores(d,medians,mapname):
"""write switcher vs. strategy score table."""
print "sw_vs_strat_map_scores(" + mapname + ")"
table = [[None for p in switching] for o in medians.strategies]
interval_table = medians.sw_v_s_intervals[mapname]
for i in range(len(medians.strategies)):
for j in range(len(switching)):
table[i][j] = interval_table[i][j].median
# add row for min results
mins = np.min(table,axis=0)
table.append(mins)
fmt = lambda x: "{0:.0f}".format(x)
rowhdr = ['\\texttt{'+s.replace('_',' ')+'}' for s in medians.strategies]
rowhdr.append("minimum")
hline = len(medians.strategies) - 1
colhdr = ['\\texttt{'+s+'}' for s in switching]
label = 'sw_median_scores_' + mapname
caption = 'Switching Planner Median Scores on \\texttt{' + mapname + '}'
fn = 'sw_median_scores_'+mapname+".tex"
outfile = os.path.join(d, fn)
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile,hline)
def sw_vs_strat_rates(d,scores_dict,strategies):
"""write switcher vs. strategy win rate table."""
print "sw_vs_strat_rates()"
for mapname in mapnames:
sw_vs_strat_map_rates(d,scores_dict,strategies,mapname)
def sw_vs_strat_map_rates(d,scores_dict,strategies,mapname):
"""write switcher vs. strategy win rate table."""
print "sw_vs_strat_map_rates(" + mapname + ")"
table = [[None for p in switching] for o in strategies]
for i in range(len(strategies)):
o = strategies[i] # opponent
for j in range(len(switching)):
p = switching[j] # player
v = get_scores(p,o,mapname,scores_dict)
nWins = count_wins(v)
table[i][j] = 100*nWins/float(len(v))
fmt = lambda x: "{0:.0f}\%".format(x) # formatter.
rowhdr = [str(i)+'. \\texttt{'+strategies[i].replace('_',' ')+'}' for i in range(len(strategies))]
hline = None
colhdr = ['\\texttt{'+s+'}' for s in switching]
label = 'sw_rates_' + mapname
caption = 'Switching Planner Win Rates on \\texttt{' + mapname + '}'
fn = 'sw_rates_'+mapname+'.tex'
outfile = os.path.join(d, fn)
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile,hline)
def game_duration(d,curs):
"""how many games last thru each period"""
outfile = os.path.join(d, 'game_duration_barchart.tex')
tex = open(outfile,'w')
tex.write("\\begin{figure}[!ht]\n")
tex.write("\\begin{tikzpicture}\n")
tex.write("""\\begin{axis}[ybar stacked,
area legend,
cycle list={
% see pgfplots.pdf barcharts and
% see pgfmanual.pdf 41 Pattern Library
% patterns: crosshatch, north east lines, north west lines,...
{fill=blue},{fill=red},{fill=teal},{fill=gray},{fill=white},{fill=orange},{fill=black},{fill=violet},{pattern color=red,pattern=north east lines},{pattern color=blue,pattern=north west lines},{fill=brown}
},
legend style={at={(2,.95)}}
]
""")
replans = range(0,80000,6000)
tex.write(" \\addplot coordinates\n")
tex.write(" {")
for t in replans:
nGames = curs.execute("select count(*) from event where event='end' and cycle > ?",(t,)).fetchone()[0]
tex.write(" ({0},{1})".format(t,nGames))
tex.write("};\n")
tex.write(" \\legend{")
for i in range(len(replans)):
if i > 0:
tex.write(", ")
tex.write(str(replans[i]))
tex.write("}\n")
tex.write("\\end{axis}\n")
tex.write("\\end{tikzpicture}\n")
tex.write("\\caption{Game Durations}\n")
tex.write("\\label{game_duration}\n")
tex.write("\\end{figure}\n")
def sw_vs_sw(d,scores_dict):
"""write switcher vs. switcher win rate table."""
print "switcher_vs_switcher()"
for mapname in mapnames:
sw_vs_sw_by_map(d,scores_dict,mapname)
def sw_vs_sw_by_map(d,scores_dict,mapname):
players = switching
opponents = switching[:] # copy
opponents.append('built-in')
counts = [[None for p in players] for o in opponents]
for i in range(len(opponents)):
o = opponents[i] # opponent
for j in range(len(players)):
p = players[j] # player
if p != o:
scores = get_scores(p,o,mapname,scores_dict) # combine scores from N. and S. maps
nWins = 0
for score in scores:
if score > 0:
nWins += 1
counts[i][j] = nWins/float(len(scores))
fmt = lambda x: "{0:.0f}\\%".format(100 * x) # formatter. show as percent.
rowhdr = [s for s in opponents]
colhdr = [s for s in players]
outfile = os.path.join(d, 'sw_vs_sw_win_rate_' + mapname + '.tex')
label = 'sw_vs_sw_win_rate_' + mapname
caption = 'Switching vs.~Switching Win Rates on \\texttt{' + mapname + '}'
write_table(counts,fmt,rowhdr,colhdr,label,caption,outfile)
def switcher_choices(d,curs,strategies):
print "switcher_choices()"
counts = [[0 for p in switching] for s in strategies]
nEvents = [0 for p in switching] # number of planning events for switcher
inclause = "("
for i in range(len(strategies)):
if i > 0:
inclause += ","
inclause += "'" + strategies[i] + "'"
inclause += ")"
#print inclause
for j in range(len(switching)):
p = switching[j]
cmd = "select count(*) from event where event='plan' and simreplan=1 and player=? and opponent in " + inclause
c = curs.execute(cmd,(p,)).fetchone()[0]
nEvents[j] = c
# for each fixed strategy, for each switching planner
for i in range(len(strategies)):
s = strategies[i]
for j in range(len(switching)):
if nEvents[j]:
p = switching[j]
cmd = "select count(*) from event where event='plan' and simreplan=1 and player=? and strategy=? and opponent in " + inclause
nUse = curs.execute(cmd,(p,s,)).fetchone()[0]
counts[i][j] = nUse / float(nEvents[j])
fmt = lambda x: "{0:.0f}\%".format(100 * x) # formatter. show as percent.
colhdr = ['\\texttt{'+s.replace('_',' ')+'}' for s in switching]
rowhdr = [str(i)+'. \\texttt{'+strategies[i].replace('_',' ')+'}' for i in range(len(strategies))]
outfile = os.path.join(d, 'switcher_choices.tex')
write_table(counts,fmt,rowhdr,colhdr,'switcher_choices','Strategy Choices of Switching Planners',outfile)
def switcher_choices_by_epoch(d,curs,strategies):
print "switcher_choices_by_epoch()"
table = [[0 for epoch in epochs] for s in strategies]
inclause = "("
for i in range(len(strategies)):
if i > 0:
inclause += ","
inclause += "'" + strategies[i] + "'"
inclause += ")"
#print inclause
player = 'maximin'
for epoch in range(len(epochs)):
if epoch == 0:
start = 0
else:
start = epochs[epoch-1]
end = epochs[epoch]
# total planning events of epoch
cmd = "select count(*) from event where player=? and simreplan=1 and cycle > ? and cycle <= ? " + \
" and opponent in " + inclause
nEvents = curs.execute(cmd,(player,start,end,)).fetchone()[0]
# for each fixed strategy
for i in range(len(strategies)):
s = strategies[i]
if nEvents:
cmd = "select count(*) from event where player=? and simreplan=1 and cycle >= ? and cycle < ? " + \
" and strategy=? and opponent in " + inclause
nUsed = curs.execute(cmd,(player,start,end,s,)).fetchone()[0]
table[i][epoch] = nUsed / float(nEvents)
fmt = lambda x: "{0:.0f}\%".format(100 * x) # formatter. show as percent.
rowhdr = [str(i)+'. \\texttt{'+strategies[i].replace('_',' ')+'}' for i in range(len(strategies))]
colhdr = ['{:,}'.format(e) for e in epochs]
caption = '\\texttt{maximin} Choices by Epoch'
outfile = os.path.join(d, 'maximin_choices_by_epoch.tex')
write_table(table,fmt,rowhdr,colhdr,'maximin_choices_by_epoch',caption,outfile)
def switcher_choices_by_opponent_map_epoch(d,curs,strategies,player,opponent,mapname):
print "switcher_choices_by_opponent_map_epoch()"
i = mapnames.index(mapname)
mappaths = engine_maps[i]
table = [[0 for epoch in epochs] for s in strategies]
for epoch in range(len(epochs)):
if epoch == 0:
start = 0
else:
start = epochs[epoch-1]
end = epochs[epoch]
# for each fixed strategy
nEvents = 0
for i in range(len(strategies)):
s = strategies[i]
cmd = "select count(*) from event where player=? and simreplan=1 " + \
" and opponent=? " + \
" and cycle >= ? and cycle < ? " + \
" and strategy=? " + \
" and (map=? or map=?)"
nUsed = curs.execute(cmd,(player,opponent,start,end,s,mappaths[0],mappaths[1],)).fetchone()[0]
table[i][epoch] = nUsed
nEvents += nUsed
if nEvents:
for i in range(len(strategies)):
table[i][epoch] = table[i][epoch] / float(nEvents)
fmt = lambda x: "{0:.0f}\%".format(100 * x) # formatter. show as percent.
rowhdr = [str(i)+'. \\texttt{'+strategies[i].replace('_',' ')+'}' for i in range(len(strategies))]
colhdr = ['{:,}'.format(e) for e in epochs]
caption = '\\texttt{{{0}}} vs.~\\texttt{{{1}}} Choices'.format(player,opponent.replace('_',' '))
outfile = os.path.join(d, '{0}_v_{1}_{2}_choices_by_epoch.tex'.format(player,opponent,mapname))
label = '{0}_v_{1}_{2}_choices_by_epoch'.format(player,opponent,mapname)
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile)
def switcher_choice_sequence(d,curs,sw,opponent):
"""print sequence of strategy choices"""
cmd = "select event,strategy,predicted_diff,diff,cycle,map from event where player=? and simreplan=1 " + \
" and opponent=? order by map,game,cycle"
curs.execute(cmd,(sw,opponent,))
m = None
nGames = 0
for row in curs.fetchall():
if m != row[5]:
m = row[5]
print m
if row[0] == 'plan':
print "{0} prediction: {1} at: {2}".format(row[1],row[2],row[4])
else:
print row[0],"score",row[3]
nGames += 1
print nGames,"games"
def switcher_choices_sim(d,strategies,mapname,filename):
print "switcher_choices_sim()"
table = [[0 for epoch in epochs] for s in strategies]
sim_epochs = [6000,12000,18000,24000,80000]
player = None
opponent = None
file = open(os.path.join(d,filename), 'rb')
rd = csv.reader(file)
for row in rd:
event = row[0]
if not player:
player = row[cols["player"]]
opponent = row[cols["opponent"]]
else:
assert player == row[cols["player"]]
assert opponent == row[cols["opponent"]]
assert mapname in row[cols["map"]]
if event == "plan":
# count number of strategy choices in epoch
i = strategies.index(row[cols["strategy"]])
cycle = int(row[cols["cycle"]])
for epoch in range(len(sim_epochs)):
if epoch == 0:
start = 0
else:
start = sim_epochs[epoch-1]
end = sim_epochs[epoch]
if cycle >= start and cycle < end:
break
#print player, "choose strategy",strategies[i],"at cycle",row[cols["cycle"]],"epoch", epoch
table[i][epoch] += 1
# normalize
sums = np.sum(table,axis=0)
for j in range(len(sums)):
if sums[j] != 0:
for i in range(len(table)):
table[i][j] = table[i][j]/float(sums[j])
#for i in range(len(table)):
# for j in range(len(table[i])):
# print "{0:2.0f}\% ".format(100*table[i][j]),
# print
fmt = lambda x: "{0:.0f}\%".format(100 * x) # formatter. show as percent.
rowhdr = [str(i)+'. \\texttt{'+strategies[i].replace('_',' ')+'}' for i in range(len(strategies))]
colhdr = ['{:,}'.format(e) for e in epochs]
caption = '\\texttt{{{0}}} vs.~\\texttt{{{1}}} Choices on \\texttt{{{2}}} in Simulation'.format(player,opponent.replace('_',' '),mapname)
outfile = os.path.join(d, '{0}_v_{1}_{2}_sim_choices_by_epoch.tex'.format(player,opponent,mapname))
label = '{0}_v_{1}_{2}_sim_choices_by_epoch'.format(player,opponent,mapname)
write_table(table,fmt,rowhdr,colhdr,label,caption,outfile)
def switcher_win_loss_choices(d,curs):
players = switching
data = [[0 for p in players]*2 for s in strategies] # two columns (win,lose) for each player
for j in range(len(players)):
p = players[j]
nEvents = curs.execute("select count(*) from event where player=? and event='plan'",(p,)).fetchone()[0]
if nEvents == 0:
print "No planning events for player", p
continue
# get game IDs of won games.
for i in range(len(strategies)):
s = strategies[i]
n = curs.execute("select count(*) from event where strategy=? and game in (select game from event where player=? and simreplan=1 and actual=0)",(s,p)).fetchone()[0]
data[i][j*2] = n/float(nEvents)
# get game IDs of lost games
n = curs.execute("select count(*) from event where strategy=? and game in (select game from event where player=? and simreplan=1 and actual=1)",(s,p)).fetchone()[0]
data[i][j*2+1] = n/float(nEvents)
fmt = lambda x: "{0:.1f}".format(100 * x) # formatter. show as percent.
colhdr = players
colhdr2 = ['Win','Lose','Win','Lose','Win','Lose']
rowhdr = [s.replace('_',' ') for s in strategies]
filepath = os.path.join(d, 'switcher_win_choices.tex')
caption = 'Strategy Choices of Switching Planners in Winning Games (with Re-Planning)'
label = 'table:switcher_win_choices'
# copied from write_table. We need a different version of table header.
today = datetime.date.today()
tex = open(filepath,'w')
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("\\begin{table}[!ht]\n")
tex.write("\\begin{tabular}{l | ")
for j in range(len(colhdr2)):
tex.write(" r ") # assume numbers in cells
tex.write("}\n")
# column header
for c in colhdr:
tex.write(" & \multicolumn{2}{c}{" + c + "}")
tex.write("\\cr\n")
for c in colhdr2:
tex.write(" & " + c)
tex.write("\\cr\n")
tex.write("\\hline\n")
for i in range(len(rowhdr)):
tex.write(rowhdr[i])
for j in range(len(colhdr2)):
x = data[i][j]
if x:
tex.write(" & " + fmt(x))
elif x == 0:
tex.write(" & 0 ")
else: # None
tex.write(" & ")
tex.write("\\cr\n")
tex.write("\end{tabular}\n")
tex.write("\\caption{" + caption + "}\n")
tex.write("\\label{" + label + "}\n")
tex.write("\\end{table}\n")
tex.close()
def switcher_choices_barchart(d,curs,strategies):
print "switcher_choices_barchart()"
players = switching
for p in players:
for s in ['balanced_7_mass','balanced_9','balanced_9_mass','rush_9']: # strongest strategies.
tex_choices_barchart(d,curs,p,s,strategies)
tex_choices_barchart(d,curs,p,'built-in',strategies)
def tex_choices_barchart(d,curs, player, opponent,strategies):
"""show choices at each planning event"""
print "tex_choices_barchart(" + player + "," + opponent + ")"
label = '{0}_choices_vs_{1}_barchart'.format(player,opponent)
filepath = os.path.join(d, label+'.tex')
tex = open(filepath,'w')
tex.write("\\begin{figure}[!ht]\n")
tex.write("\\begin{tikzpicture}\n")
# need at least 11 bar styles for 11 strategies
tex.write("""\\begin{axis}[ybar stacked,
area legend,
cycle list={
% see pgfplots.pdf barcharts and
% see pgfmanual.pdf 41 Pattern Library
% patterns: crosshatch, north east lines, north west lines,...
{fill=blue},{fill=red},{fill=teal},{fill=gray},{fill=white},{fill=orange},{fill=black},{fill=violet},{pattern color=red,pattern=north east lines},{pattern color=blue,pattern=north west lines},{fill=brown}
},
legend style={at={(2,.95)}}
]
""")
for s in strategies:
tex.write(" \\addplot coordinates\n")
tex.write(" {")
for epoch in range(len(epochs)):
if epoch == 0:
start = 0
else:
start = epochs[epoch-1]
end = epochs[epoch]
c = curs.execute("select count(*) from event where player=? and opponent=? and strategy=? and simreplan=1 and cycle >= ? and cycle < ?",
(player,opponent,s,start,end,)).fetchone()[0]
tex.write(" ({0},{1})".format(start,c))
tex.write("};\n")
tex.write(" \\legend{")
for i in range(len(strategies)):
if i > 0:
tex.write(", ")
tex.write(strategies[i].replace('_',' '))
tex.write("}\n")
tex.write("\\end{axis}\n")
tex.write("\\end{tikzpicture}\n")
caption = player + " Choices vs. " + opponent.replace("_",' ')
tex.write("\\caption{" + caption + "}\n")
tex.write("\\label{"+ label + "}\n")
tex.write("\\end{figure}\n")
tex.close()
def get_bias(d, curs, strategies):
"""get avg. scores for fixed strategy vs. self games."""
print "get_bias()"
colhdr = []
rowhdr = [str(i)+". "+strategies[i].replace('_',' ') for i in range(len(strategies))]
table = []
for mappaths in engine_maps:
for mappath in mappaths:
path,mapname = os.path.split(mappath)
mapname = mapname.replace('_',' ')
mapname = mapname.replace('.smp','')
if 'switched' in mapname:
mapname = mapname.replace('switched','S.')
else:
mapname = mapname + " N."
colhdr.append(mapname)
bias = get_bias_by_map(curs, strategies, mappath)
table.append(bias)
#print "avg. ", np.mean(bias)
table = np.transpose(table)
fmt = lambda x: "{0:.0f}".format(x) # formatter.
hline = None
label = 'map_bias'
caption = 'Bias by Map and Position'
filename = os.path.join(d, 'map_bias.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,filename,hline)
def get_bias_by_map(curs,strategies,map):
"""get avg. scores for fixed strategy vs. self games."""
cmd = "select diff from event where event='end' and player=? and opponent=player and map=?"
bias = [None for s in strategies]
for i in range(len(strategies)):
curs.execute(cmd,(strategies[i],map,))
scores = [row[0] for row in curs.fetchall()]
bias[i] = np.median(scores)
return bias
class ConfidenceInterval:
def __init__(self,median,confidence,interval):
self.player = None
self.opponent = None
self.median = median
self.confidence = confidence
self.interval = interval
def __str__(self):
return "{0} {1:.4f} [{2},{3}]".format(self.median,self.confidence,self.interval[0],self.interval[1])
def get_confidence_interval(x,threshold=.95):
"""get tightest interval arount median that exceeds .95 confidence."""
x = x[:] # get a copy and sort it.
x.sort()
n = len(x)
median = np.median(x)
cs = []
for k in range(int(math.floor(n/2.0))):
c = 1 - (2 * scipy.stats.binom.cdf(k,n,0.5)) # binomial CDF of k successes in n samples
if c < .999 and c > threshold:
cs.append(ConfidenceInterval(median,c,[x[k],x[-k-1]]))
if len(cs) > 0:
return cs[-1]
else:
raise Exception("no confidence interval meets requirements")
def get_bernoulli_confidence_intervals(scores,episodes):
intervals = []
for n in episodes:
player_scores = random.sample(scores, n)
intervals.append(bernoulli_confidence(player_scores))
return intervals
def compare_sim_engine(d, scores_dict, strategy_set,strategies):
"""compare strategy performace in simulation to performance in engine"""
print "compare_sim_engine()"
for mapname in mapnames:
compare_sim_engine_by_map(d,scores_dict,strategy_set,strategies,mapname)
def compare_sim_engine_by_map(d, scores_dict, strategy_set,strategies,mapname):
# get simulation scores
fn = "sim_scores_{0}_{1}-game.yaml".format(strategy_set, mapname)
filepath = os.path.join(d, fn)
f = open(filepath,'rb')
simdata = yaml.load(f)
f.close()
sv = simdata[0]['matrix']
s = np.mean(sv,axis=0) # mean of columns
sim_coords = ""
for j in range(len(s)):
sim_coords += "({0},{1}) ".format(j, s[j])
# get mean engine scores
coords = ""
for j in range(len(strategies)):
player = strategies[j]
v = []
for opponent in strategies:
v.extend(get_scores(player,opponent,mapname,scores_dict))
assert len(v) > 0, "no scores for " + strategies[j] + " on " + mapname
coords += " ({0},{1:.2f})\n".format(j, np.mean(v))
# write LaTeX graph
label = "compare_sim_engine_" + mapname
caption = "Scores in Simulation and Engine on \\texttt{" + mapname.replace("_"," ") + "}"
filepath = os.path.join(d, 'compare_sim_engine_'+mapname+'.tex')
tex = open(filepath,'w')
# "sharp plot" or "const plot"
# xticklabel={<command>} or xticklabels={<label list>}
#
# error bars/.cd,y explicit. Need "explicit" to put +- range in coordinates.
#
xtick = "{" + reduce(lambda x, y: str(x)+','+str(y), range(len(strategies))) + "}"
xticklabels= "{" + reduce(lambda x, y: str(x)+'.,'+str(y), range(len(strategies))) + ".}"
txt = """
\\begin{figure}[!ht]
\\centering
\\begin{tikzpicture}
\\begin{axis}[
scaled ticks=false, % disallow scaling tick labels in powers of 10
legend entries={Simulation Mean,Engine Mean},
legend style={at={(1.5,.95)}},
ymajorgrids=true,
xlabel=Strategy,
ylabel=Score,
xtick=""" + xtick + "," + """
xticklabels=""" +xticklabels + """
]
\\addplot+[const plot mark mid] coordinates
{""" + sim_coords + """};
\\addplot+[const plot mark mid] coordinates
{""" + coords + """};
\\end{axis}
\\end{tikzpicture}
\\caption{"""+caption+"""}
\\label{"""+label+"""}
\\end{figure}
"""
tex.write(txt)
tex.close()
# \\addplot+[const plot mark mid,mark=none,style=dashed,draw=brown] coordinates
# {""" + coords_plus + """};
# \\addplot+[const plot mark mid,mark=none,style=dashdotted,draw=black] coordinates
# {""" + coords_minus + """};
# \\addplot+[const plot mark mid,mark=none,style=loosely dotted,draw=green] coordinates
# {""" + median + """};
class MatrixTK:
"""game matrix parser states"""
START=0
CYCLE=1
VALUES_KEYWORD=2
ROWS_KEYWORD=3
ROWS=4
COLS_KEYWORD=5
COLS=6
VALUES=7
SOLN_KEYWORD=8
LENGTH_KEYWORD=9
LENGTH=10
SOLN=11
class MatrixHistory:
def __init__(self,maxCycle):
self.maxCycle = maxCycle
self.nEvents = 0
self.values = None
def games_matrices():
"""show average game matrix values over time."""
epoch_matrices = [MatrixHistory(epoch) for epoch in epochs]
gmfiles = glob.glob("*game_matrix0.txt")
for fn in gmfiles:
f = open(fn,'rb')
for line in f.readlines():
update_game_matrices(line,epoch_matrices)
f.close()
# write game matrix TEX files
fmt = lambda x: "{0:.0f}".format(x) # formatter.
rowhdr = [str(i) + "." for i in range(len(strategies))]
colhdr = [str(i) + "." for i in range(len(strategies))]
for i in range(len(epoch_matrices)):
mh = epoch_matrices[i]
if mh.nEvents > 0:
caption = 'Avg. Game Matrix to Cycle ' + str(epochs[i])
filepath = os.path.join(d, 'matrix_history_' + str(i) + '.tex')
write_table(mh.values,fmt,rowhdr,colhdr,'label',caption,filepath)
def update_game_matrices(line,epoch_matrices):
# parse
# cycle 0 values: rows 11 columns 11 -1.00... solution: length 12 0.00 ...
fields = line.split()
state = MatrixTK.START
row = 0
col = 0
solni = 0
matrixHistory = None
for tk in fields:
if state == MatrixTK.START:
assert tk == "cycle"
state = MatrixTK.CYCLE
elif state == MatrixTK.CYCLE:
cycle = int(tk)
state = MatrixTK.VALUES_KEYWORD
elif state == MatrixTK.VALUES_KEYWORD:
assert tk == "values:"
state = MatrixTK.ROWS_KEYWORD
elif state == MatrixTK.ROWS_KEYWORD:
assert tk == "rows"
state = MatrixTK.ROWS
elif state == MatrixTK.ROWS:
rows = int(tk)
state = MatrixTK.COLS_KEYWORD
elif state == MatrixTK.COLS_KEYWORD:
assert tk == 'columns'
state = MatrixTK.COLS
elif state == MatrixTK.COLS:
cols = int(tk)
for i in range(len(epochs)):
if cycle < epochs[i]:
matrixHistory = epoch_matrices[0]
break
if matrixHistory.values:
assert len(matrixHistory.values) == rows
assert len(matrixHistory.values[0]) == cols
else:
matrixHistory.values = [[0 for j in range(cols)] for i in range(rows)]
state = MatrixTK.VALUES
elif state == MatrixTK.VALUES:
matrixHistory.values[row][col] = float(tk)
col += 1
if col >= cols:
col = 0
row += 1
if row >= rows:
state = MatrixTK.SOLN_KEYWORD
elif state == MatrixTK.SOLN_KEYWORD:
assert tk == "solution:"
state = MatrixTK.LENGTH_KEYWORD
elif state == MatrixTK.LENGTH_KEYWORD:
assert tk == "length"
state = MatrixTK.LENGTH
elif state == MatrixTK.LENGTH:
soln_len = int(tk)
soln = [0 for i in range(soln_len)]
state = MatrixTK.SOLN
elif state == MatrixTK.SOLN:
soln[solni] = float(tk)
solni += 1
matrixHistory.nEvents += 1
print "values", matrixHistory.values
def strat_vs_strat_sim_scores(d, strategy_set, strategies):
"""simulated strategy final scores"""
print "strat_vs_strat_sim_scores()"
for mapname in mapnames:
fn = 'sim_scores_'+strategy_set + '_' + mapname+'-game.yaml'
filepath = os.path.join(d, fn)
f = open(filepath,'rb')
simdata = yaml.load(f)
f.close()
strat_vs_strat_sim_scores_map(d,simdata, strategy_set, strategies, mapname)
def strat_vs_strat_sim_scores_map(d,simdata,strategy_set,strategies,mapname):
"""simulated strategy final scores"""
# get YAML source files by running sim-matrix.bat. The BAT file runs
# stratsim WriteGameMatrix.java.
#
# get simulated strategy value matrix
#
sv = simdata[0]['matrix']
mins = np.min(sv,axis=0)
sv.append(mins)
sv.append(max_star(mins)) # mark the maximin columns
fmt = lambda x: "{0:.0f}".format(x) if x.__class__ == float else str(x) # formatter.
rowhdr = [str(j) + "." for j in range(len(strategies))]
#rowhdr.append("average")
rowhdr.append("min.")
rowhdr.append("maxmin")
hline = len(strategies) - 1
colhdr = [str(j) + "." for j in range(len(strategies))]
label = simdata[0]['label']
#caption = simdata[0]['caption']
caption = "Strategy Simulation Scores on \\texttt{" + mapname + "}"
filename = os.path.join(d, 'sim_scores_' + strategy_set + '_' + mapname+'.tex')
write_table(sv,fmt,rowhdr,colhdr,label,caption,filename,hline,bolddiag=True)
def get_sw_vs_strat_sim_scores(d,mapname,position='both'):
"""get sw_vs_strat scores averaged for map and switched position map"""
# the file names aren't systematic, so just map them here.
sim_maps = {
'2bases' :
['sw_vs_strat_sim_2bases-game.yaml',
'sw_vs_strat_sim_2bases_switched.yaml'],
'the-right-strategy' :
['sw_vs_strat_sim_the-right-strategy-game.yaml',
'sw_vs_strat_sim_the-right-strategy-game_switched.yaml']
}
# get map
if position == 'both' or position == 'top':
fn = sim_maps[mapname][0]
else:
fn = sim_maps[mapname][1]
filepath = os.path.join(d, fn)
f = open(filepath,'rb')
simdata = yaml.load(f)
f.close()
sv = simdata[0]['matrix'] # sv: sim values
if position == 'both':
# get switched position map and take average
fn = sim_maps[mapname][1]
filepath = os.path.join(d, fn)
f = open(filepath,'rb')
simdata_switched = yaml.load(f)
f.close()
sv_switched = simdata_switched[0]['matrix']
assert simdata[0]['colhdr'] == simdata_switched[0]['colhdr']
assert len(sv) == len(sv_switched)
for i in range(len(sv)):
for j in range(len(sv[i])):
sv[i][j] = (sv[i][j] + sv_switched[i][j])/2.0
return simdata[0]
def sim_maximin(d, strategy_set):
"""get maximin values for simulated fixed strategies and switching planners"""
print "sim_maximin()"
# table of strategy maximin and switching planner minimums for each map
table = [[None for j in range(len(switching)+1)] for i in range(len(mapnames))]
for i in range(len(mapnames)):
mapname = mapnames[i]
# get strat vs. strat maximin
filepath = os.path.join(d, 'sim_scores_' + strategy_set + '_' + mapname + '-game.yaml')
f = open(filepath,'rb')
simdata = yaml.load(f)
f.close()
sv = simdata[0]['matrix']
table[i][0] = get_maximin(sv)
# get switcher vs. strat mins
simdata = get_sw_vs_strat_sim_scores(d,mapname)
mins = np.min(simdata['matrix'],axis=0)
for j in range(len(switching)):
table[i][j+1] = mins[j]
fmt = lambda x: "{0:.0f}".format(x) # formatter.
rowhdr = ['\\texttt{'+m+'}' for m in mapnames]
hline = None
colhdr = ['Fixed']
colhdr.extend(['\\texttt{'+sw+'}' for sw in switching])
label = 'sim_maximin'
caption = 'Fixed Strategy Maximin and Switching Planner Minimums in Simulation'
filename = os.path.join(d, 'sim_maximin_' + strategy_set + '.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,filename,hline)
def get_maximin(table):
"""get column-wise maximin value"""
mins = np.min(table,axis=0)
return max(mins)
def engine_maximin(d,means):
"""get maximin values for fixed strategies and switching planners on games played in engine"""
print "engine_maximin()"
# fixed Nash maximin monotone
# 2bases x x x x
# the-right-strategy x x x x
table = [[None for j in range(len(switching)+1)] for i in range(len(mapnames))]
for i in range(len(mapnames)):
mapname = mapnames[i]
table[i][0] = means.s_v_s_maximin_pair[mapname][0]
for j in range(len(switching)):
player = switching[j]
table[i][j+1] = means.sw_v_s_min[mapname][player][0]
fmt = lambda x: "{0:.0f}".format(x) if x else "" # formatter.
rowhdr = ['\\texttt{'+m+'}' for m in mapnames]
hline = None
colhdr = ['Fixed']
colhdr.extend(['\\texttt{'+sw+'}' for sw in switching])
label = 'engine_maximin_means'
caption = 'Switching Planner Minimum Means in Engine'
filename = os.path.join(d, 'engine_maximin_means.tex')
if True:
write_table(table,fmt,rowhdr,colhdr,label,caption,filename,hline)
else:
print_table(table,fmt,rowhdr,colhdr,caption)
def engine_maximin_medians(d,medians):
"""get maximin values for fixed strategies and min values for switching planners on games played in engine"""
print "engine_maximin_medians()"
# Fixed Nash maximin monotone
# 2bases x x x x
# the-right-strategy x x x x
table = [[None for j in range(len(switching)+1)] for i in range(len(mapnames))]
for i in range(len(mapnames)):
mapname = mapnames[i]
interval = medians.s_v_s_maximin_interval[mapname]
table[i][0] = interval.median
for j in range(len(switching)):
player = switching[j]
interval = medians.sw_v_s_min_intervals[mapname][player]
table[i][j+1] = interval.median
fmt = lambda x: "{0:.0f}".format(x) if x else "" # formatter.
rowhdr = ['\\texttt{'+m+'}' for m in mapnames]
hline = None
colhdr = ['Fixed']
colhdr.extend(['\\texttt{'+sw+'}' for sw in switching])
label = 'engine_maximin_medians'
caption = 'Fixed Strategy Maximin and Switching Planner Minimum Medians in Engine'
filename = os.path.join(d, 'engine_maximin_medians.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,filename,hline)
def engine_maximin_pairs(d,means,score_dict):
"""get maximin values for fixed strategies and min values for switching planners on games played in engine"""
print "engine_maximin_pairs()"
#
#
# player opponent value confidence
# ------------------------------------------
# maximin x x x
# ------------------------------------------
# minimums Nash x x
# maximin x x
# monotone x x
#
fmt = lambda x: "{0}".format(x) if x.__class__ == str else "{0:.0f}".format(x) # formatter.
rowhdr = ['maximin','minimums','','']
hline = 0
colspec = " l | l l r r"
colhdr = ['Player','Opponent','Score','Rate Confidence']
for i in range(len(mapnames)):
table = [[""]*4 for j in range(len(switching)+1)]
mapname = mapnames[i]
c = means.s_v_s_maximin_pair[mapname]
table[0][0] = c[1].replace('_',' ') # player
table[0][1] = c[2].replace('_',' ') # opponent
table[0][2] = c[0] # mean
# calculate confidence interval
v = get_scores(c[1],c[2],mapname,score_dict)
nWins = count_wins(v)
print "mean of scores",np.mean(v)
print nWins,"wins in",len(v)
interval = bernoulli_confidence(v,'wilson')
table[0][3] = "{0:.0f}\% ({1:.0f}\%,{2:.0f}\%)".format(interval[0]*100,
interval[1][0]*100
,interval[1][1]*100)
for j in range(len(switching)):
player = switching[j]
c = means.sw_v_s_min[mapname][player]
table[j+1][0] = c[1] #player
table[j+1][1] = c[2].replace('_',' ') #opponent
table[j+1][2] = c[0] # mean
# calculate confidence interval
v = get_scores(c[1],c[2],mapname,score_dict)
interval = bernoulli_confidence(v,'wilson')
table[j+1][3] = "{0:.0f}\% ({1:.0f}\%,{2:.0f}\%)".format(interval[0]*100,
interval[1][0]*100
,interval[1][1]*100)
filepath = os.path.join(d, 'engine_maximin_pairs_'+mapname+'.tex')
label = 'engine_maximin_pairs_'+mapname
caption = 'Strategy Pairs on \\texttt{'+mapname+'}'
write_table(table,fmt,rowhdr,colhdr,label,caption,filepath,hline,colspec=colspec)
def sw_vs_strat_sim_scores(d):
"""translate game points YAML tables into LaTeX tables."""
print "sw_vs_strat_sim_score()"
# get YAML source files by running orst.stratagusai.stratsim.analysis.SwitchingPlannerSimulation
#
for m in range(len(mapnames)):
mapname = mapnames[m]
# get score averaged for playing from top and bottom of map
simdata = get_sw_vs_strat_sim_scores(d,mapname,position='both')
sw_vs_strat_sim_scores_by_map(d,simdata,mapname,position='both')
# get score for playing from top of map
simdata = get_sw_vs_strat_sim_scores(d,mapname,position='top')
sw_vs_strat_sim_scores_by_map(d,simdata,mapname,position='top')
# get scores for playing from bottom of map
simdata = get_sw_vs_strat_sim_scores(d,mapname,position='bottom')
sw_vs_strat_sim_scores_by_map(d,simdata,mapname,position='bottom')
def sw_vs_strat_sim_scores_by_map(d, simdata, mapname, position):
rowhdr = [str(i)+'. \\texttt{'+simdata['rowhdr'][i]+'}' for i in range(len(simdata['rowhdr']))]
colhdr = ['\\texttt{'+s+'}' for s in simdata['colhdr']]
sv = simdata['matrix']
means = np.mean(sv,axis=0)
mins = np.min(sv,axis=0)
sv.append(means)
sv.append(mins)
fmt = lambda x: "{0:.0f}".format(x) # formatter. show as percent.
hline = len(rowhdr) - 1
rowhdr.append("mean")
rowhdr.append("minimum")
caption = 'Switching Planner Scores in Simulation on \\texttt{' + mapname + "}"
if position == 'top':
caption += ' from North'
elif position == 'bottom':
caption += ' from South'
label = 'sw_vs_strat_sim_score_' + mapname
fn = 'sw_vs_strat_sim_score_' + mapname
if position == 'top' or position == 'bottom':
label += '_' + position
fn += '_' + position
fn += '.tex'
filepath = os.path.join(d, fn)
write_table(sv,fmt,rowhdr,colhdr,label,caption,filepath,hline)
def sw_vs_strat_scores_by_epoch(d,curs,player,opponent,mapname):
i = mapnames.index(mapname)
mappaths = engine_maps[i]
table = [[0 for epoch in epochs] for i in range(len(mappaths))]
rowhdr = []
for i in range(len(mappaths)):
mappath = mappaths[i]
p,m = os.path.split(mappath)
m = m.replace('_',' ')
rowhdr.append("\\texttt{"+player+"} on " + m)
for epoch in range(len(epochs)):
if epoch == 0:
start = 0
else:
start = epochs[epoch-1]
end = epochs[epoch]
cmd = "select avg(diff) from event where player=? and simreplan=1 " + \
" and opponent=? " + \
" and cycle >= ? and cycle < ? " + \
" and map=? "
mean = curs.execute(cmd,(player,opponent,start,end,mappath,)).fetchone()[0]
table[i][epoch] = mean
fmt = lambda x: "{0:.0f}".format(x) # formatter.
colhdr = ["{0:,}".format(s) for s in epochs]
caption = '\\texttt{{{0}}} vs.~\\texttt{{{1}}} Score by Epoch on \\texttt{{{2}}}'.format(player,opponent.replace('_',' '),mapname)
label = '{0}_v_{1}_score_by_epoch_on_{2}'.format(player,opponent,mapname)
filepath = os.path.join(d, label + '.tex')
write_table(table,fmt,rowhdr,colhdr,label,caption,filepath,None)
def sim_minus_engine_scores(d,curs,strategy_set,strategies):
"""sim score matrix - engine score matrix"""
sim_minus_engine_scores_map(d,curs,strategy_set,strategies,None,None)
for i in range(len(planner_maps)):
simmap = sim_maps[i]
mappath = planner_maps[i]
sim_minus_engine_scores_map(d,curs,strategy_set,strategies,simmap,mappath)
def sim_minus_engine_scores_map(d,curs,strategy_set,strategies,simmap, mappath):
# simulation data
if simmap:
fn = 'sim_scores_'+strategy_set+'_'+simmap+'.yaml'
else:
fn = 'sim_scores_'+strategy_set + '.yaml'
filepath = os.path.join(d, fn)
f = open(filepath,'rb')
simdata = yaml.load(f)
f.close()
sv = simdata[0]['matrix']
# engine data
hp = strat_vs_strat_avg_score_data(curs,strategies,mappath)
data = [row[:] for row in sv] # copy sim matrix
for i in range(len(hp)):
for j in range(len(hp[i])):
data[i][j] = data[i][j] - hp[i][j] # minus engine data
fmt = lambda x: "{0:.0f}".format(x) # formatter. show as percent.
rowhdr = [s.replace('_',' ') for s in strategies]
hline = None
colhdr = [str(i) + '.' for i in range(len(strategies))]
if mappath:
path, mapname = os.path.split(mappath)
mapname = mapname.replace('.smp','')
caption = 'Simulation Minus Engine Scores on ' + mapname.replace('_',' ')
label = 'sim_minus_engine_'+mapname
outpath = os.path.join(d,'sim_minus_engine_scores_'+mapname+'.tex')
else:
caption = 'Simulation Minus Engine Scores'
label = 'sim_minus_engine'
outpath = os.path.join(d,'sim_minus_engine_scores.tex')
write_table(data,fmt,rowhdr,colhdr,label,caption,outpath,hline)
def write_game_matrices(d,filename):
f = open(filename,'rb')
matrices = yaml.load(f)
f.close()
for m in matrices:
write_game_matrix(d,m,filename)
def write_game_matrix(d,data,filename):
cycle = data['cycle']
caption = data['caption'].replace("_"," ")
label = data['label']
matrix = data['matrix']
mins = np.min(matrix,axis=0)
matrix.append(mins)
matrix.append(max_star(mins)) # mark the maximin columns
fmt = lambda x: str(x) # formatter.
rowhdr = data['rowhdr']
colhdr = data['colhdr']
hline = len(rowhdr)
rowhdr.append('mins')
rowhdr.append('maximin')
filepath = os.path.join(d, filename.replace(".yaml",'') + "_" + str(cycle) + ".tex")
print filepath
write_table(matrix,fmt,rowhdr,colhdr,label,caption,filepath,hline)
def write_game_choices(d, curs, player, opponent, map):
print "write_game_choices({0},{1},{2})".format(player,opponent,map)
cmd = """select cycle,strategy from event
where player=? and opponent=? and map=? and event='plan' order by cycle"""
curs.execute(cmd,(player,opponent,map+".txt",))
label = "{0}_{1}_choices_{2}".format(player,opponent,map)
filepath = os.path.join(d,label + ".tex")
tex = open(filepath,'w')
today = datetime.date.today()
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("""\\begin{table}[!ht]
\\centering
\\begin{tabular}{l | l}
cycle & strategy\\cr
\\hline
""")
for row in curs.fetchall():
tex.write("{0} & {1}\\cr\n".format(row[0],row[1].replace('_',' ')))
tex.write("""
\\end{tabular}
\\caption{""" + "{0} Choices against {1} on {2}".format(player,opponent.replace('_',' '),map.replace('_',' ')) + """}
\\label{""" + label + """}
\\end{table}
""")
tex.close()
def write_confidence_tables(d, medians):
print "write_confidence_tables()"
for mapname in mapnames:
write_confidence_table(d,medians,mapname)
write_sw_confidence_table(d,medians,mapname)
def write_confidence_table(d, medians, mapname):
"""for each fixed strategy vs. fixed strategy write confidence around mean"""
# using multirows, so can't use write_table()
rowhdr = [str(j) + "." for j in range(len(medians.strategies))]
colhdr = rowhdr
filepath = os.path.join(d, 's_v_s_confidence_' + mapname + '.tex')
today = datetime.date.today()
tex = open(filepath,'w')
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("\\begin{table}[!ht]\n")
tex.write("\\centering\n")
tex.write("\\begin{tabular}{l | ")
for j in range(len(colhdr)):
tex.write(" r ") # assume numbers in cells
tex.write("}\n")
# column header
for c in colhdr:
tex.write(" & " + c)
tex.write("\\cr\n")
tex.write("\\hline\n")
interval_table = medians.s_v_s_intervals[mapname]
median_table = [[None for o in medians.strategies] for p in medians.strategies]
for i in range(len(medians.strategies)):
tex.write("\\multirow{3}{*}{"+ rowhdr[i] + "}")
# write high of confidence interval
for j in range(len(medians.strategies)):
confidence = interval_table[i][j]
tex.write("& {0:.0f}".format(confidence.interval[1]))
tex.write("\\\\")
# write median of confidence interval
for j in range(len(medians.strategies)):
confidence = interval_table[i][j]
median_table[i][j] = confidence.median
tex.write(" & {0:.0f}".format(confidence.median))
tex.write("\\\\")
# write low of confidence interval
for j in range(len(medians.strategies)):
confidence = interval_table[i][j]
tex.write(" & {0:.0f}".format(confidence.interval[0]))
tex.write("\\\\")
tex.write("\n")
tex.write("\\hline\n")
# add minimum
mins = np.min(median_table,axis=0) # column mins
tex.write("\\hline\n")
tex.write("minimums")
for m in mins:
tex.write(" & {0:.0f}".format(m))
tex.write("\\cr\n")
tex.write("maximin")
for m in max_star(mins):
tex.write(" & {0}".format(m))
tex.write("\\cr\n")
label = 's_v_s_confidence_' + mapname
caption = 'Fixed Strategy Confidence on ' + mapname
tex.write("\end{tabular}\n")
tex.write("\\caption{" + caption + "}\n")
tex.write("\\label{" + label + "}\n")
tex.write("\\end{table}\n")
tex.close()
print '\\input{' + filepath.replace('.tex','') + '}'
def write_sw_confidence_table(d, medians, mapname):
"""for each switching vs. fixed strategy write confidence around mean"""
# using multirows, so can't use write_table()
rowhdr = [str(j) + ". " + medians.strategies[j].replace('_',' ') for j in range(len(medians.strategies))]
colhdr = ["\\texttt{"+sw+"}" for sw in switching]
filepath = os.path.join(d, 'sw_v_s_confidence_' + mapname + '.tex')
today = datetime.date.today()
tex = open(filepath,'w')
tex.write("% table written on {0} by {1}\n".format(today.strftime('%Y-%m-%d'),sys.argv[0]))
tex.write("\\begin{table}[!ht]\n")
tex.write("\\centering\n")
tex.write("\\begin{tabular}{l | ")
for j in range(len(colhdr)):
tex.write(" r ") # assume numbers in cells
tex.write("}\n")
# column header
for c in colhdr:
tex.write(" & " + c)
tex.write("\\cr\n")
tex.write("\\hline\n")
interval_table = medians.sw_v_s_intervals[mapname]
median_table = [[None for sw in switching] for s in medians.strategies]
for i in range(len(medians.strategies)):
tex.write("\\multirow{3}{*}{"+ rowhdr[i] + "}")
# write high of confidence interval
for j in range(len(switching)):
confidence = interval_table[i][j]
tex.write("& {0:.0f}".format(confidence.interval[1]))
tex.write("\\\\")
# write median of confidence interval
for j in range(len(switching)):
confidence = interval_table[i][j]
median_table[i][j] = confidence.median
tex.write(" & {0:.0f}".format(confidence.median))
tex.write("\\\\")
# write low of confidence interval
for j in range(len(switching)):
confidence = interval_table[i][j]
tex.write(" & {0:.0f}".format(confidence.interval[0]))
tex.write("\\\\")
tex.write("\n")
tex.write("\\hline\n")
# add minimum
mins = np.min(median_table,axis=0) # column mins
tex.write("\\hline\n")
tex.write("minimums")
for m in mins:
tex.write(" & {0:.0f}".format(m))
tex.write("\\cr\n")
label = 'sw_v_s_confidence_' + mapname
caption = 'Switching Planner Confidence on ' + mapname
tex.write("\end{tabular}\n")
tex.write("\\caption{" + caption + "}\n")
tex.write("\\label{" + label + "}\n")
tex.write("\\end{table}\n")
tex.close()
print '\\input{' + filepath.replace('.tex','') + '}'
def get_classification_rate(scores_dict,strategies):
"""what percentage of confidence intervals fall fully positive or fully negative?"""
n = 0 # number of confidence intervals fall fully positive or fully negative
nIntervals = 0
for player in strategies:
for opponent in strategies:
for mapname in mapnames:
scores = scores_dict[(player,opponent,mapname)] # get_strat_v_strat_scores2(curs,player,opponent,mappath)
assert len(scores) == 50, str(len(scores))+" scores for "+player+" vs. "+opponent+" on " + mapname
#intervals = get_confidence_intervals(player,scores,[50])
intervals = [] # fix
assert len(intervals) == 1
i = intervals[0]
nIntervals += 1
if np.sign(i.interval[0]) == np.sign(i.interval[1]):
n += 1
print "percent of confidence intervals fall fully positive or fully negative is {0:.2f}.".format(n/float(nIntervals))
def get_scores(player,opponent,mapname,scores_dict,combine=True):
"""get scores on forward and switched maps"""
v = scores_dict[(player,opponent,mapname)][:] # make copy
assert v, "No games for {0} vs. {1} on {2}".format(player,opponent,mapname)
if combine and player != opponent:
v_switched = scores_dict[(opponent,player,mapname)]
assert v_switched
v.extend([-x for x in v_switched])
return v
def get_mean(player,opponent,mapname,scores_dict,combine=True):
v = get_scores(player,opponent,mapname,scores_dict,combine)
return np.mean(v)
def get_median(player,opponent,mapname,scores_dict,combine=True):
v = get_scores(player,opponent,mapname,scores_dict,combine)
return np.median(v)
def get_rate(player,opponent,mapname,scores_dict,combine=True):
v = get_scores(player,opponent,mapname,scores_dict,combine)
return count_wins(v)/float(len(v))
def get_score_dict(curs,strategies):
"""get dictionary of scores for player vs. opponent on map """
scores = {}
cmd = "select diff from event where event='end' and player=? and opponent=? and map=?"
for mappaths in engine_maps:
path,mapname = os.path.split(mappaths[0])
mapname = mapname.replace('.smp','')
# fixed strat vs. fixed strat
# match pairs defined in configs.py
for i in range(len(strategies)):
player = strategies[i]
for j in range(i+1):
opponent = strategies[j]
# get player vs. opponent on map scores
curs.execute(cmd,(player,opponent,mappaths[0],))
pair_scores = [row[0] for row in curs.fetchall()]
scores[(player,opponent,mapname)] = pair_scores
# get player vs. opponent on switched map scores
curs.execute(cmd,(player,opponent,mappaths[1],))
if player == opponent:
pair_scores = [row[0] for row in curs.fetchall()]
scores[(opponent,player,mapname)].extend(pair_scores)
else:
pair_scores = [-row[0] for row in curs.fetchall()]
scores[(opponent,player,mapname)] = pair_scores
# switching vs. fixed strat games
for player in switching:
for opponent in strategies:
# get player vs. opponent on map scores
curs.execute(cmd,(player,opponent,mappaths[0],))
pair_scores = [row[0] for row in curs.fetchall()]
scores[(player,opponent,mapname)] = pair_scores
# get player vs. opponent on switched map scores
curs.execute(cmd,(player,opponent,mappaths[1],))
pair_scores = [-row[0] for row in curs.fetchall()]
scores[(opponent,player,mapname)] = pair_scores
# switching vs. switching
for i in range(len(switching)):
player = switching[i]
for j in range(i): # [0,...,i-1]
opponent = switching[j]
# get player vs. opponent on map scores
curs.execute(cmd,(player,opponent,mappaths[0],))
pair_scores = [row[0] for row in curs.fetchall()]
key = (player,opponent,mapname)
scores[key] = pair_scores
# get player vs. opponent on switched map scores
curs.execute(cmd,(player,opponent,mappaths[1],))
pair_scores = [-row[0] for row in curs.fetchall()]
key = (opponent,player,mapname)
scores[key] = pair_scores
# switching vs. builtin
for mappaths in script_maps:
path,mapname = os.path.split(mappaths[0])
mapname = mapname.replace('_PvC.smp','')
for player in switching:
opponent = 'built-in'
# get player vs. opponent on map scores
curs.execute(cmd,(player,opponent,mappaths[0],))
pair_scores = [row[0] for row in curs.fetchall()]
scores[(player,opponent,mapname)] = pair_scores
# get player vs. opponent on switched map scores
curs.execute(cmd,(player,opponent,mappaths[1],))
pair_scores = [-row[0] for row in curs.fetchall()]
scores[(opponent,player,mapname)] = pair_scores
return scores
def build_db(d):
"""open event database and return connection."""
dbpath = os.path.join(d, 'events.db')
# connect to database and create table.
if os.path.exists(dbpath):
os.remove(dbpath)
conn = sqlite3.connect(dbpath)
curs = conn.cursor()
curs.execute('''create table event
(game int,
event text,
playerId text,
player text,
strategy text,
simreplan int,
opponent text,
predicted text,
predicted_diff int,
actual text,
diff int,
cycle int,
map text)''')
csvfiles = glob.glob(d + '/*_0.csv') # non-simulation files. sim files end in *_sim.csv
if len(csvfiles) == 0:
msg = "No input files found."
raise Exception(msg)
game = 0
for filename in csvfiles:
file = open(filename, 'rb')
rd = csv.reader(file)
for row in rd:
event = row[0]
row.insert(0,game) # add game ID
curs.execute("""insert into event
values (?,?,?,?,?,?,?,?,?,?,?,?,?)""", row)
if event == 'end':
game += 1
file.close()
conn.commit()
return conn
def open_db(d):
"""open event database and return connection."""
dbpath = os.path.join(d, 'events.db')
if not os.path.exists(dbpath):
msg = "Error: database file", dbpath, "does not exist."
raise Error(msg)
conn = sqlite3.connect(dbpath)
return conn
def build_score_dictionary(d,curs,strategies):
# get dictionary of score arrays indexed by (player,opponent,mappath) tuples
scores = get_score_dict(curs,strategies)
mfile = open(os.path.join(d,'score_dict.pkl'),'wb')
pickle.dump(scores,mfile)
mfile.close()
return scores
def open_score_dictionary(d,curs,strategies):
fn = os.path.join(d,'score_dict.pkl')
if not os.path.exists(fn):
return build_score_dictionary(d,curs,strategies)
else:
mfile = open(fn,'rb')
scores = pickle.load(mfile)
mfile.close()
return scores
| apache-2.0 |
unseenlaser/python-for-android | python-modules/zope/zope/interface/tests/test_element.py | 50 | 1380 | ##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test Element meta-class.
$Id: test_element.py 110536 2010-04-06 02:59:44Z tseaver $
"""
import unittest
from zope.interface.interface import Element
class TestElement(unittest.TestCase):
def test_taggedValues(self):
"""Test that we can update tagged values of more than one element
"""
e1 = Element("foo")
e2 = Element("bar")
e1.setTaggedValue("x", 1)
e2.setTaggedValue("x", 2)
self.assertEqual(e1.getTaggedValue("x"), 1)
self.assertEqual(e2.getTaggedValue("x"), 2)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestElement))
return suite
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| apache-2.0 |
sanjeevtripurari/hue | desktop/libs/libopenid/src/libopenid/conf.py | 35 | 2032 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os
from django.utils.translation import ugettext_lazy as _t, ugettext as _
from desktop.lib.conf import Config, coerce_bool, coerce_csv
BASEDIR = os.path.dirname(os.path.abspath(__file__))
def dict_list_map(value):
if isinstance(value, str):
d = {}
for k, v in json.loads(value).iteritems():
d[k] = (v,)
return d
elif isinstance(value, dict):
return value
return None
SERVER_ENDPOINT_URL = Config(
key="server_endpoint_url",
default="https://www.google.com/accounts/o8/id",
type=str,
help=_t("OpenId SSO endpoint url"))
IDENTITY_URL_PREFIX = Config(
key="identity_url_prefix",
default="https://app.onelogin.com/openid/your_company.com/",
type=str,
help=_t("Openid identity url prefix"))
CREATE_USERS_ON_LOGIN = Config(
key="create_users_on_login",
default=True,
type=coerce_bool,
help=_t("Create users from IdP on login."))
USE_EMAIL_FOR_USERNAME = Config(
key="use_email_for_username",
default=True,
type=coerce_bool,
help=_t("Use email for username."))
def config_validator(user):
res = []
if not SERVER_ENDPOINT_URL.get():
res.append(("libopenid.server_endpoint_url", _("Required OPENID SSO endpoint URL is not provided.")))
return res
| apache-2.0 |
40223136/20150616test1 | static/Brython3.1.3-20150514-095342/Lib/_abcoll.py | 688 | 5155 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
via collections; they are defined here only to alleviate certain
bootstrapping issues. Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
"ByteString",
]
"""
### collection related types which are not exposed through builtin ###
## iterators ##
#fixme brython
#bytes_iterator = type(iter(b''))
bytes_iterator = type(iter(''))
#fixme brython
#bytearray_iterator = type(iter(bytearray()))
#callable_iterator = ???
dict_keyiterator = type(iter({}.keys()))
dict_valueiterator = type(iter({}.values()))
dict_itemiterator = type(iter({}.items()))
list_iterator = type(iter([]))
list_reverseiterator = type(iter(reversed([])))
range_iterator = type(iter(range(0)))
set_iterator = type(iter(set()))
str_iterator = type(iter(""))
tuple_iterator = type(iter(()))
zip_iterator = type(iter(zip()))
## views ##
dict_keys = type({}.keys())
dict_values = type({}.values())
dict_items = type({}.items())
## misc ##
dict_proxy = type(type.__dict__)
"""
def abstractmethod(self):
return self
### ONE-TRICK PONIES ###
#class Iterable(metaclass=ABCMeta):
class Iterable:
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
#class Sized(metaclass=ABCMeta):
class Sized:
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
#class Container(metaclass=ABCMeta):
class Container:
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def keys(self):
return KeysView(self)
def items(self):
return ItemsView(self)
def values(self):
return ValuesView(self)
def __eq__(self, other):
if not isinstance(other, Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MutableMapping(Mapping):
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
#MutableMapping.register(dict)
| gpl-3.0 |
brchiu/tensorflow | tensorflow/python/estimator/__init__.py | 18 | 1259 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""estimator python module.
Importing from tensorflow.python.estimator is unsupported
and will soon break!
"""
# pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow_estimator.python import estimator
# Include attrs that start with single underscore.
_HAS_DYNAMIC_ATTRIBUTES = True
estimator.__all__ = [s for s in dir(estimator) if not s.startswith('__')]
from tensorflow_estimator.python.estimator import *
| apache-2.0 |
EmreAtes/spack | var/spack/repos/builtin/packages/minixyce/package.py | 5 | 2926 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Minixyce(MakefilePackage):
"""Proxy Application. A portable proxy of some of the key
capabilities in the electrical modeling Xyce.
"""
homepage = "https://mantevo.org"
url = "http://mantevo.org/downloads/releaseTarballs/miniapps/MiniXyce/miniXyce_1.0.tar.gz"
tags = ['proxy-app']
version('1.0', '6fc0e5a561af0b8ff581d9f704194133')
variant('mpi', default=True, description='Build with MPI Support')
depends_on('mpi', when='+mpi')
@property
def build_targets(self):
targets = []
if '+mpi' in self.spec:
targets.append('CXX={0}'.format(self.spec['mpi'].mpicxx))
targets.append('LINKER={0}'.format(self.spec['mpi'].mpicxx))
targets.append('USE_MPI=-DHAVE_MPI -DMPICH_IGNORE_CXX_SEEK')
else:
targets.append('CXX=c++')
targets.append('LINKER=c++')
targets.append('USE_MPI=')
# Remove Compiler Specific Optimization Flags
if '%gcc' not in self.spec:
targets.append('CPP_OPT_FLAGS=')
return targets
def build(self, spec, prefix):
with working_dir('miniXyce_ref'):
# Call Script Targets First to Generate Needed Files
make('generate_info')
make('common_files')
make(*self.build_targets)
def install(self, spec, prefix):
# Manual Installation
mkdirp(prefix.bin)
mkdirp(prefix.doc)
install('miniXyce_ref/miniXyce.x', prefix.bin)
install('miniXyce_ref/default_params.txt', prefix.bin)
install('README', prefix.doc)
install_tree('miniXyce_ref/tests/', prefix.doc.tests)
| lgpl-2.1 |
zymtech/Scrapiders | jd_58/jd_58/pipelines.py | 1 | 1282 | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymongo
from scrapy.conf import settings
class Jd58Pipeline(object):
def __init__(self):
host = settings['MONGODB_HOST']
port = settings['MONGODB_PORT']
dbname = settings['MONGODB_NAME']
clint = pymongo.MongoClient(host, port)
tdb = clint[dbname]
self.post = tdb[settings['MONGODB_TABLE']]
def process_item(self, item, spider):
bookinfo = dict(item)
self.post.update(
{"joblink": bookinfo['joblink']},
{"$set": {
"title": bookinfo['title'],
"company": bookinfo['company'],
"salary": bookinfo['salary'],
"jobdetail": bookinfo['jobdetail'],
"updatetime": bookinfo['updatetime'],
"education": bookinfo['education'],
"experience" : bookinfo['experience'],
"companyintro":bookinfo['companyintro'],
"crawltime": bookinfo['crawltime']}
},
upsert=True
)
print "write to db successfully"
return item
| mit |
piffey/ansible | test/units/modules/network/f5/test_bigip_gtm_wide_ip.py | 27 | 12370 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
from nose.plugins.skip import SkipTest
if sys.version_info < (2, 7):
raise SkipTest("F5 Ansible modules require Python >= 2.7")
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import Mock
from ansible.compat.tests.mock import patch
from ansible.module_utils.basic import AnsibleModule
try:
from library.bigip_gtm_wide_ip import ApiParameters
from library.bigip_gtm_wide_ip import ModuleParameters
from library.bigip_gtm_wide_ip import ModuleManager
from library.bigip_gtm_wide_ip import ArgumentSpec
from library.bigip_gtm_wide_ip import UntypedManager
from library.bigip_gtm_wide_ip import TypedManager
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
from test.unit.modules.utils import set_module_args
except ImportError:
try:
from ansible.modules.network.f5.bigip_gtm_wide_ip import ApiParameters
from ansible.modules.network.f5.bigip_gtm_wide_ip import ModuleParameters
from ansible.modules.network.f5.bigip_gtm_wide_ip import ModuleManager
from ansible.modules.network.f5.bigip_gtm_wide_ip import ArgumentSpec
from ansible.modules.network.f5.bigip_gtm_wide_ip import UntypedManager
from ansible.modules.network.f5.bigip_gtm_wide_ip import TypedManager
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
from units.modules.utils import set_module_args
except ImportError:
raise SkipTest("F5 Ansible modules require the f5-sdk Python library")
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='foo.baz.bar',
lb_method='round-robin',
)
p = ModuleParameters(params=args)
assert p.name == 'foo.baz.bar'
assert p.pool_lb_method == 'round-robin'
def test_module_pools(self):
args = dict(
pools=[
dict(
name='foo',
ratio='100'
)
]
)
p = ModuleParameters(params=args)
assert len(p.pools) == 1
def test_api_parameters(self):
args = dict(
name='foo.baz.bar',
poolLbMode='round-robin'
)
p = ApiParameters(params=args)
assert p.name == 'foo.baz.bar'
assert p.pool_lb_method == 'round-robin'
def test_api_pools(self):
args = load_fixture('load_gtm_wide_ip_with_pools.json')
p = ApiParameters(params=args)
assert len(p.pools) == 1
assert 'name' in p.pools[0]
assert 'ratio' in p.pools[0]
assert p.pools[0]['name'] == '/Common/baz'
assert p.pools[0]['ratio'] == 10
def test_module_not_fqdn_name(self):
args = dict(
name='foo.baz',
lb_method='round-robin'
)
with pytest.raises(F5ModuleError) as excinfo:
p = ModuleParameters(params=args)
assert p.name == 'foo.baz'
assert 'The provided name must be a valid FQDN' in str(excinfo)
class TestUntypedManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_wideip(self, *args):
set_module_args(dict(
name='foo.baz.bar',
lb_method='round-robin',
password='passsword',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = UntypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=False)
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=True)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'foo.baz.bar'
assert results['state'] == 'present'
assert results['lb_method'] == 'round-robin'
class TestTypedManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create_wideip(self, *args):
set_module_args(dict(
name='foo.baz.bar',
lb_method='round-robin',
type='a',
password='passsword',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = TypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=False)
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=False)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'foo.baz.bar'
assert results['state'] == 'present'
assert results['lb_method'] == 'round-robin'
def test_create_wideip_deprecated_lb_method1(self, *args):
set_module_args(dict(
name='foo.baz.bar',
lb_method='round_robin',
type='a',
password='passsword',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = TypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=False)
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=False)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'foo.baz.bar'
assert results['state'] == 'present'
assert results['lb_method'] == 'round-robin'
def test_create_wideip_deprecated_lb_method2(self, *args):
set_module_args(dict(
name='foo.baz.bar',
lb_method='global_availability',
type='a',
password='passsword',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = TypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=False)
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=False)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'foo.baz.bar'
assert results['state'] == 'present'
assert results['lb_method'] == 'global-availability'
def test_create_wideip_with_pool(self, *args):
set_module_args(dict(
name='foo.baz.bar',
lb_method='round-robin',
type='a',
pools=[
dict(
name='foo',
ratio=10
)
],
password='passsword',
server='localhost',
user='admin'
))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = TypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=False)
tm.create_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=False)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert results['name'] == 'foo.baz.bar'
assert results['state'] == 'present'
assert results['lb_method'] == 'round-robin'
def test_create_wideip_with_pool_idempotent(self, *args):
set_module_args(dict(
name='foo.bar.com',
lb_method='round-robin',
type='a',
pools=[
dict(
name='baz',
ratio=10
)
],
password='passsword',
server='localhost',
user='admin'
))
current = ApiParameters(params=load_fixture('load_gtm_wide_ip_with_pools.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = TypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=True)
tm.read_current_from_device = Mock(return_value=current)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=False)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is False
def test_update_wideip_with_pool(self, *args):
set_module_args(dict(
name='foo.bar.com',
lb_method='round-robin',
type='a',
pools=[
dict(
name='baz',
ratio=10
),
dict(
name='alec',
ratio=100
)
],
password='passsword',
server='localhost',
user='admin'
))
current = ApiParameters(params=load_fixture('load_gtm_wide_ip_with_pools.json'))
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
# Override methods in the specific type of manager
tm = TypedManager(module=module, params=module.params)
tm.exists = Mock(return_value=True)
tm.read_current_from_device = Mock(return_value=current)
tm.update_on_device = Mock(return_value=True)
# Override methods to force specific logic in the module to happen
mm = ModuleManager(module=module)
mm.version_is_less_than_12 = Mock(return_value=False)
mm.get_manager = Mock(return_value=tm)
results = mm.exec_module()
assert results['changed'] is True
assert 'pools' in results
| gpl-3.0 |
StormTrooper/osmc | package/mediacenter-skin-next-osmc/files/usr/share/kodi/addons/script.module.unidecode/lib/unidecode/x07d.py | 253 | 4678 | data = (
'Ji ', # 0x00
'Cha ', # 0x01
'Zhou ', # 0x02
'Xun ', # 0x03
'Yue ', # 0x04
'Hong ', # 0x05
'Yu ', # 0x06
'He ', # 0x07
'Wan ', # 0x08
'Ren ', # 0x09
'Wen ', # 0x0a
'Wen ', # 0x0b
'Qiu ', # 0x0c
'Na ', # 0x0d
'Zi ', # 0x0e
'Tou ', # 0x0f
'Niu ', # 0x10
'Fou ', # 0x11
'Jie ', # 0x12
'Shu ', # 0x13
'Chun ', # 0x14
'Pi ', # 0x15
'Yin ', # 0x16
'Sha ', # 0x17
'Hong ', # 0x18
'Zhi ', # 0x19
'Ji ', # 0x1a
'Fen ', # 0x1b
'Yun ', # 0x1c
'Ren ', # 0x1d
'Dan ', # 0x1e
'Jin ', # 0x1f
'Su ', # 0x20
'Fang ', # 0x21
'Suo ', # 0x22
'Cui ', # 0x23
'Jiu ', # 0x24
'Zha ', # 0x25
'Kinu ', # 0x26
'Jin ', # 0x27
'Fu ', # 0x28
'Zhi ', # 0x29
'Ci ', # 0x2a
'Zi ', # 0x2b
'Chou ', # 0x2c
'Hong ', # 0x2d
'Zha ', # 0x2e
'Lei ', # 0x2f
'Xi ', # 0x30
'Fu ', # 0x31
'Xie ', # 0x32
'Shen ', # 0x33
'Bei ', # 0x34
'Zhu ', # 0x35
'Qu ', # 0x36
'Ling ', # 0x37
'Zhu ', # 0x38
'Shao ', # 0x39
'Gan ', # 0x3a
'Yang ', # 0x3b
'Fu ', # 0x3c
'Tuo ', # 0x3d
'Zhen ', # 0x3e
'Dai ', # 0x3f
'Zhuo ', # 0x40
'Shi ', # 0x41
'Zhong ', # 0x42
'Xian ', # 0x43
'Zu ', # 0x44
'Jiong ', # 0x45
'Ban ', # 0x46
'Ju ', # 0x47
'Mo ', # 0x48
'Shu ', # 0x49
'Zui ', # 0x4a
'Wata ', # 0x4b
'Jing ', # 0x4c
'Ren ', # 0x4d
'Heng ', # 0x4e
'Xie ', # 0x4f
'Jie ', # 0x50
'Zhu ', # 0x51
'Chou ', # 0x52
'Gua ', # 0x53
'Bai ', # 0x54
'Jue ', # 0x55
'Kuang ', # 0x56
'Hu ', # 0x57
'Ci ', # 0x58
'Geng ', # 0x59
'Geng ', # 0x5a
'Tao ', # 0x5b
'Xie ', # 0x5c
'Ku ', # 0x5d
'Jiao ', # 0x5e
'Quan ', # 0x5f
'Gai ', # 0x60
'Luo ', # 0x61
'Xuan ', # 0x62
'Bing ', # 0x63
'Xian ', # 0x64
'Fu ', # 0x65
'Gei ', # 0x66
'Tong ', # 0x67
'Rong ', # 0x68
'Tiao ', # 0x69
'Yin ', # 0x6a
'Lei ', # 0x6b
'Xie ', # 0x6c
'Quan ', # 0x6d
'Xu ', # 0x6e
'Lun ', # 0x6f
'Die ', # 0x70
'Tong ', # 0x71
'Si ', # 0x72
'Jiang ', # 0x73
'Xiang ', # 0x74
'Hui ', # 0x75
'Jue ', # 0x76
'Zhi ', # 0x77
'Jian ', # 0x78
'Juan ', # 0x79
'Chi ', # 0x7a
'Mian ', # 0x7b
'Zhen ', # 0x7c
'Lu ', # 0x7d
'Cheng ', # 0x7e
'Qiu ', # 0x7f
'Shu ', # 0x80
'Bang ', # 0x81
'Tong ', # 0x82
'Xiao ', # 0x83
'Wan ', # 0x84
'Qin ', # 0x85
'Geng ', # 0x86
'Xiu ', # 0x87
'Ti ', # 0x88
'Xiu ', # 0x89
'Xie ', # 0x8a
'Hong ', # 0x8b
'Xi ', # 0x8c
'Fu ', # 0x8d
'Ting ', # 0x8e
'Sui ', # 0x8f
'Dui ', # 0x90
'Kun ', # 0x91
'Fu ', # 0x92
'Jing ', # 0x93
'Hu ', # 0x94
'Zhi ', # 0x95
'Yan ', # 0x96
'Jiong ', # 0x97
'Feng ', # 0x98
'Ji ', # 0x99
'Sok ', # 0x9a
'Kase ', # 0x9b
'Zong ', # 0x9c
'Lin ', # 0x9d
'Duo ', # 0x9e
'Li ', # 0x9f
'Lu ', # 0xa0
'Liang ', # 0xa1
'Chou ', # 0xa2
'Quan ', # 0xa3
'Shao ', # 0xa4
'Qi ', # 0xa5
'Qi ', # 0xa6
'Zhun ', # 0xa7
'Qi ', # 0xa8
'Wan ', # 0xa9
'Qian ', # 0xaa
'Xian ', # 0xab
'Shou ', # 0xac
'Wei ', # 0xad
'Qi ', # 0xae
'Tao ', # 0xaf
'Wan ', # 0xb0
'Gang ', # 0xb1
'Wang ', # 0xb2
'Beng ', # 0xb3
'Zhui ', # 0xb4
'Cai ', # 0xb5
'Guo ', # 0xb6
'Cui ', # 0xb7
'Lun ', # 0xb8
'Liu ', # 0xb9
'Qi ', # 0xba
'Zhan ', # 0xbb
'Bei ', # 0xbc
'Chuo ', # 0xbd
'Ling ', # 0xbe
'Mian ', # 0xbf
'Qi ', # 0xc0
'Qie ', # 0xc1
'Tan ', # 0xc2
'Zong ', # 0xc3
'Gun ', # 0xc4
'Zou ', # 0xc5
'Yi ', # 0xc6
'Zi ', # 0xc7
'Xing ', # 0xc8
'Liang ', # 0xc9
'Jin ', # 0xca
'Fei ', # 0xcb
'Rui ', # 0xcc
'Min ', # 0xcd
'Yu ', # 0xce
'Zong ', # 0xcf
'Fan ', # 0xd0
'Lu ', # 0xd1
'Xu ', # 0xd2
'Yingl ', # 0xd3
'Zhang ', # 0xd4
'Kasuri ', # 0xd5
'Xu ', # 0xd6
'Xiang ', # 0xd7
'Jian ', # 0xd8
'Ke ', # 0xd9
'Xian ', # 0xda
'Ruan ', # 0xdb
'Mian ', # 0xdc
'Qi ', # 0xdd
'Duan ', # 0xde
'Zhong ', # 0xdf
'Di ', # 0xe0
'Min ', # 0xe1
'Miao ', # 0xe2
'Yuan ', # 0xe3
'Xie ', # 0xe4
'Bao ', # 0xe5
'Si ', # 0xe6
'Qiu ', # 0xe7
'Bian ', # 0xe8
'Huan ', # 0xe9
'Geng ', # 0xea
'Cong ', # 0xeb
'Mian ', # 0xec
'Wei ', # 0xed
'Fu ', # 0xee
'Wei ', # 0xef
'Yu ', # 0xf0
'Gou ', # 0xf1
'Miao ', # 0xf2
'Xie ', # 0xf3
'Lian ', # 0xf4
'Zong ', # 0xf5
'Bian ', # 0xf6
'Yun ', # 0xf7
'Yin ', # 0xf8
'Ti ', # 0xf9
'Gua ', # 0xfa
'Zhi ', # 0xfb
'Yun ', # 0xfc
'Cheng ', # 0xfd
'Chan ', # 0xfe
'Dai ', # 0xff
)
| gpl-2.0 |
jj918160/cocos2d-x-samples | samples/SwiftTetris/cocos2d/plugin/tools/toolsForGame/main.py | 265 | 3576 | import sys, string, os
from Tkinter import *
import steps
Plugins = sys.argv[1]
print Plugins
pluginList = Plugins.split(':')
maxStep = 2
curStep = 1
stepList = []
# functions
# show step on the num index
def showStep(num):
global stepList
stepNum = len(stepList)
if num >= stepNum or num <= 0 :
pass
i = 0
while i < stepNum:
if i == num:
stepList[i].stepFrame.pack(fill=BOTH, anchor='nw')
else:
stepList[i].stepFrame.pack_forget()
i += 1
# update the pre & next buttons status
def updateBtnState():
global curStep
global btnNextStep
global btnPreStep
if curStep == 1:
btnPreStep['state'] = DISABLED
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'Next'
elif curStep == maxStep:
btnPreStep['state'] = NORMAL
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'Finish'
else:
btnPreStep['state'] = NORMAL
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'Next'
# next button clicked
def nextStep():
if btnNextStep['text'] == 'close':
root.quit()
return
global curStep
nowStepObj = stepList[curStep - 1]
bRet = nowStepObj.checkStep()
if bRet != None:
stepError['text'] = bRet
return
else:
stepError['text'] = ''
if curStep < maxStep:
curStep += 1
showStep(curStep - 1)
updateBtnState()
elif curStep == maxStep:
# disable buttons when process
btnPreStep['state'] = DISABLED
btnNextStep['state'] = DISABLED
# get user input arguments
projPath = stepList[0].getPath()
plugins = stepList[1].getSelectedPlugins()
strPlugins = ''
i = 0
while i < len(plugins):
strPlugins += "plugins/"
strPlugins += plugins[i]
if i != (len(plugins) - 1):
strPlugins += ':'
i += 1
# process shell script to modify the game project
ret = os.system('bash ./toolsForGame/addPluginForGame.sh ' + projPath + ' ' + strPlugins)
if ret != 0:
# enable buttons after process
btnPreStep['state'] = NORMAL
btnNextStep['state'] = NORMAL
stepError['text'] = 'Error during process'
else:
# enable next button & change text to close
btnNextStep['state'] = NORMAL
btnNextStep['text'] = 'close'
stepError['text'] = 'Process Successful!'
# pre button clicked
def preStep():
global curStep
global stepError
stepError['text'] = ''
if curStep > 1:
curStep -= 1
showStep(curStep - 1)
updateBtnState()
# init root view
root = Tk()
root.title('Plugin-x Integration Guide')
root.geometry("600x400")
rootFrame = Frame(root)
rootFrame.pack(fill=BOTH)
# steps view
MyStep1 = steps.step1()
MyStep1.initStep(rootFrame)
MyStep2 = steps.step2()
MyStep2.initStep(rootFrame, pluginList)
stepList.append(MyStep1)
stepList.append(MyStep2)
MyStep1.stepFrame.pack(fill=BOTH, anchor='nw')
# add step error message
controlFrame = Frame(root)
controlFrame.pack(side=BOTTOM, fill=X, anchor='s')
stepError = Label(controlFrame)
stepError.pack(side=LEFT, padx=30)
# add step button
btnNextStep = Button(controlFrame, text='Next', command=nextStep)
btnPreStep = Button(controlFrame, text='Back', command=preStep, state=DISABLED)
btnNextStep.pack(side=RIGHT, padx=30)
btnPreStep.pack(side=RIGHT)
root.mainloop()
| mit |
gtfx/dkc | dkc/cloudwatch.py | 1 | 4684 | import boto
import boto.exception
import boto.ec2.cloudwatch
import datetime
from config import get_global_option, get_logging_option
from dkc.logger import get_logger
import pytz
class Cloudwatch(object):
__conn = None
def __init__(self, stream, namespace='AWS/Kinesis'):
self.dimensions = {'StreamName': stream}
self.namespace = namespace
self.logger = get_logger(self, get_logging_option('level'))
self.connect()
def connect(self):
self.__conn = boto.ec2.cloudwatch.connect_to_region(get_global_option('region'))
if not self.__conn:
raise ConnectionException
def valid_period(self, period):
if period <= 0:
self.logger.debug('Period is less or equal to zero. Setting it to one minute')
period = 60
if period % 60 != 0:
wrong_period = period
self.logger.debug('Period must be multiply of 60. Fixing')
period = wrong_period - (wrong_period % 60)
self.logger.debug('Fixed period to [%s] from [%s]' % (period, wrong_period))
return period
def calculate_time(self, period):
# Period must be a multiply of 60
end_time = datetime.datetime.now(pytz.utc)
start_time = end_time - datetime.timedelta(seconds=period)
return start_time, end_time
def get_stream_metrics(self):
try:
return self.__conn.list_metrics(dimensions=self.dimensions, namespace=self.namespace)
except boto.exception.BotoServerError, e:
self.logger.error('%s Could not get metrics %s' % (e.reason, e.message))
return
def get_metric(self, period, start_time, end_time, metric_name, statistics):
try:
return self.__conn.get_metric_statistics(period,
start_time,
end_time,
metric_name,
namespace=self.namespace,
statistics=statistics,
dimensions=self.dimensions,
unit=None)
except boto.exception.BotoServerError, e:
self.logger.error('%s Could not get metric %s' % (e.reason, e.message))
return
def get_output_latency(self, period, statistics='Average'):
metric_name = 'PutRecord.Latency'
period = self.valid_period(period)
start_time, end_time = self.calculate_time(period)
return self.get_metric(period, start_time, end_time, metric_name, statistics)
def get_input_latency(self, period, statistics='Average'):
metric_name = 'GetRecords.Latency'
period = self.valid_period(period)
start_time, end_time = self.calculate_time(period)
return self.get_metric(period, start_time, end_time, metric_name, statistics)
def get_input_bytes(self, period, statistics='Average'):
metric_name = 'PutRecord.Bytes'
period = self.valid_period(period)
start_time, end_time = self.calculate_time(period)
metric = self.get_metric(period, start_time, end_time, metric_name, statistics)
if not metric:
return -1
return metric[0].get(statistics)
def get_output_bytes(self, period, statistics='Average'):
metric_name = 'GetRecords.Bytes'
period = self.valid_period(period)
start_time, end_time = self.calculate_time(period)
metric = self.get_metric(period, start_time, end_time, metric_name, statistics)
if not metric:
return -1
return metric[0].get(statistics)
def get_input_transactions(self, period, statistics='SampleCount'):
metric_name = 'GetRecords.Success'
period = self.valid_period(period)
start_time, end_time = self.calculate_time(period)
metric = self.get_metric(period, start_time, end_time, metric_name, statistics)
if not metric:
return -1
return metric[0].get(statistics)
def get_output_transactions(self, period, statistics='SampleCount'):
metric_name = 'PutRecord.Success'
period = self.valid_period(period)
start_time, end_time = self.calculate_time(period)
metric = self.get_metric(period, start_time, end_time, metric_name, statistics)
if not metric:
return -1
return metric[0].get(statistics)
class ConnectionException(Exception):
"Connection to Cloudwatch failed"
| apache-2.0 |
Rupan/winscp | libs/apr/build/gen-build.py | 65 | 6784 | #!/usr/bin/env python
#
# USAGE: gen-build.py TYPE
#
# where TYPE is one of: make, dsp, vcproj
#
# It reads build.conf from the current directory, and produces its output
# into the current directory.
#
import os
import ConfigParser
import getopt
import string
import glob
import re
#import ezt
#
# legal platforms: aix, beos, netware, os2, os390, unix, win32
# 'make' users: aix, beos, os2, os390, unix, win32 (mingw)
#
PLATFORMS = [ 'aix', 'beos', 'netware', 'os2', 'os390', 'unix', 'win32' ]
MAKE_PLATFORMS = [
('unix', None),
('aix', 'unix'),
('beos', 'unix'),
('os2', 'unix'),
('os390', 'unix'),
('win32', 'unix'),
]
# note: MAKE_PLATFORMS is an ordered set. we want to generate unix symbols
# first, so that the later platforms can reference them.
def main():
parser = ConfigParser.ConfigParser()
parser.read('build.conf')
if parser.has_option('options', 'dsp'):
dsp_file = parser.get('options', 'dsp')
else:
dsp_file = None
headers = get_files(parser.get('options', 'headers'))
# compute the relevant headers, along with the implied includes
legal_deps = { }
for fname in headers:
legal_deps[os.path.basename(fname)] = fname
h_deps = { }
for fname in headers:
h_deps[os.path.basename(fname)] = extract_deps(fname, legal_deps)
resolve_deps(h_deps)
f = open('build-outputs.mk', 'w')
f.write('# DO NOT EDIT. AUTOMATICALLY GENERATED.\n\n')
# write out the platform-independent files
files = get_files(parser.get('options', 'paths'))
objects, dirs = write_objects(f, legal_deps, h_deps, files)
f.write('\nOBJECTS_all = %s\n\n' % string.join(objects))
# for each platform and each subdirectory holding platform-specific files,
# write out their compilation rules, and an OBJECT_<subdir>_<plat> symbol.
for platform, parent in MAKE_PLATFORMS:
# record the object symbols to build for each platform
group = [ '$(OBJECTS_all)' ]
# If we're doing win32, we're going to look in the libapr.dsp file
# for those files that we have to manually add to our list.
inherit_parent = { }
if platform == 'win32' and dsp_file:
for line in open(dsp_file).readlines():
if line[:7] != 'SOURCE=':
continue
if line[7:].find('unix') != -1:
# skip the leading .\ and split it out
inherit_files = line[9:].strip().split('\\')
# change the .c to .lo
assert inherit_files[-1][-2:] == '.c'
inherit_files[-1] = inherit_files[-1][:-2] + '.lo'
# replace the \\'s with /'s
inherit_line = '/'.join(inherit_files)
if not inherit_parent.has_key(inherit_files[0]):
inherit_parent[inherit_files[0]] = []
inherit_parent[inherit_files[0]].append(inherit_line)
for subdir in string.split(parser.get('options', 'platform_dirs')):
path = '%s/%s' % (subdir, platform)
if not os.path.exists(path):
# this subdir doesn't have a subdir for this platform, so we'll
# use the parent-platform's set of symbols
if parent:
group.append('$(OBJECTS_%s_%s)' % (subdir, parent))
continue
# remember that this directory has files/objects
dirs[path] = None
# write out the compilation lines for this subdir
files = get_files(path + '/*.c')
objects, _unused = write_objects(f, legal_deps, h_deps, files)
if inherit_parent.has_key(subdir):
objects = objects + inherit_parent[subdir]
symname = 'OBJECTS_%s_%s' % (subdir, platform)
objects.sort()
# and write the symbol for the whole group
f.write('\n%s = %s\n\n' % (symname, string.join(objects)))
# and include that symbol in the group
group.append('$(%s)' % symname)
group.sort()
# write out a symbol which contains the necessary files
f.write('OBJECTS_%s = %s\n\n' % (platform, string.join(group)))
f.write('HEADERS = $(top_srcdir)/%s\n\n' % string.join(headers, ' $(top_srcdir)/'))
f.write('SOURCE_DIRS = %s $(EXTRA_SOURCE_DIRS)\n\n' % string.join(dirs.keys()))
if parser.has_option('options', 'modules'):
modules = parser.get('options', 'modules')
for mod in string.split(modules):
files = get_files(parser.get(mod, 'paths'))
objects, _unused = write_objects(f, legal_deps, h_deps, files)
flat_objects = string.join(objects)
f.write('OBJECTS_%s = %s\n' % (mod, flat_objects))
if parser.has_option(mod, 'target'):
target = parser.get(mod, 'target')
f.write('MODULE_%s = %s\n' % (mod, target))
f.write('%s: %s\n' % (target, flat_objects))
f.write('\t$(LINK_MODULE) -o $@ $(OBJECTS_%s) $(LDADD_%s)\n' % (mod, mod))
f.write('\n')
# Build a list of all necessary directories in build tree
alldirs = { }
for dir in dirs.keys():
d = dir
while d:
alldirs[d] = None
d = os.path.dirname(d)
# Sort so 'foo' is before 'foo/bar'
keys = alldirs.keys()
keys.sort()
f.write('BUILD_DIRS = %s\n\n' % string.join(keys))
f.write('.make.dirs: $(srcdir)/build-outputs.mk\n' \
'\t@for d in $(BUILD_DIRS); do test -d $$d || mkdir $$d; done\n' \
'\t@echo timestamp > $@\n')
def write_objects(f, legal_deps, h_deps, files):
dirs = { }
objects = [ ]
for file in files:
if file[-10:] == '/apr_app.c':
continue
assert file[-2:] == '.c'
obj = file[:-2] + '.lo'
objects.append(obj)
dirs[os.path.dirname(file)] = None
# what headers does this file include, along with the implied headers
deps = extract_deps(file, legal_deps)
for hdr in deps.keys():
deps.update(h_deps.get(hdr, {}))
vals = deps.values()
vals.sort()
f.write('%s: %s .make.dirs %s\n' % (obj, file, string.join(vals)))
objects.sort()
return objects, dirs
def extract_deps(fname, legal_deps):
"Extract the headers this file includes."
deps = { }
for line in open(fname).readlines():
if line[:8] != '#include':
continue
inc = _re_include.match(line).group(1)
if inc in legal_deps.keys():
deps[inc] = legal_deps[inc]
return deps
_re_include = re.compile('#include *["<](.*)[">]')
def resolve_deps(header_deps):
"Alter the provided dictionary to flatten includes-of-includes."
altered = 1
while altered:
altered = 0
for hdr, deps in header_deps.items():
# print hdr, deps
start = len(deps)
for dep in deps.keys():
deps.update(header_deps.get(dep, {}))
if len(deps) != start:
altered = 1
def clean_path(path):
return path.replace("\\", "/")
def get_files(patterns):
files = [ ]
for pat in string.split(patterns):
files.extend(map(clean_path, glob.glob(pat)))
files.sort()
return files
if __name__ == '__main__':
main()
| gpl-3.0 |
techdragon/django | tests/forms_tests/widget_tests/test_textarea.py | 331 | 1313 | from django.forms import Textarea
from django.utils.safestring import mark_safe
from .base import WidgetTest
class TextareaTest(WidgetTest):
widget = Textarea()
def test_render(self):
self.check_html(self.widget, 'msg', 'value', html=(
'<textarea rows="10" cols="40" name="msg">value</textarea>'
))
def test_render_required(self):
widget = Textarea()
widget.is_required = True
self.check_html(widget, 'msg', 'value', html='<textarea rows="10" cols="40" name="msg">value</textarea>')
def test_render_empty(self):
self.check_html(self.widget, 'msg', '', html='<textarea rows="10" cols="40" name="msg"></textarea>')
def test_render_none(self):
self.check_html(self.widget, 'msg', None, html='<textarea rows="10" cols="40" name="msg"></textarea>')
def test_escaping(self):
self.check_html(self.widget, 'msg', 'some "quoted" & ampersanded value', html=(
'<textarea rows="10" cols="40" name="msg">some "quoted" & ampersanded value</textarea>'
))
def test_mark_safe(self):
self.check_html(self.widget, 'msg', mark_safe('pre "quoted" value'), html=(
'<textarea rows="10" cols="40" name="msg">pre "quoted" value</textarea>'
))
| bsd-3-clause |
FrancescAlted/datashape | datashape/coercion.py | 1 | 8292 | """Implements type coercion rules for data shapes.
Note that transitive coercions could be supported, but we decide not to since
it may involve calling a whole bunch of functions with a whole bunch of types
to figure out whether this is possible in the face of polymorphic overloads.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import defaultdict
from itertools import chain, product
from .error import CoercionError, UnificationError
from .coretypes import CType, TypeVar, Mono
from .typesets import boolean, complexes, floating, integral, signed, unsigned
from .coretypes import Implements, Fixed, Var, Ellipsis, DataShape
from .util import verify
from . import coretypes
inf = float('inf')
class CoercionTable(object):
"""Table to hold coercion rules"""
def __init__(self):
self.table = {}
self.srcs = defaultdict(set)
self.dsts = defaultdict(set)
def _reflexivity(self, a):
if (a, a) not in self.table:
self.table[a, a] = 0
def add_coercion(self, src, dst, cost, transitive=True):
"""
Add a coercion rule
"""
assert cost >= 0, 'Raw coercion costs must be nonnegative'
if (src, dst) not in self.table:
self.srcs[dst].add(src)
self.dsts[src].add(dst)
self._reflexivity(src)
self._reflexivity(dst)
if src != dst:
self.table[src, dst] = cost
if transitive:
transitivity(src, dst, self)
else:
# Add the cost for the shortest path for the coercion
self.table[src, dst] = min(self.table[src, dst], cost)
def coercion_cost(self, src, dst):
"""
Determine a coercion cost for coercing type `a` to type `b`
"""
return self.table[src, dst]
_table = CoercionTable()
add_coercion = _table.add_coercion
coercion_cost_table = _table.coercion_cost
#------------------------------------------------------------------------
# Coercion invariants
#------------------------------------------------------------------------
def transitivity(a, b, table=_table):
"""Enforce coercion rule transitivity"""
# (src, a) in R and (a, b) in R => (src, b) in R
for src in table.srcs[a]:
table.add_coercion(src, b, table.coercion_cost(src, a) +
table.coercion_cost(a, b))
# (a, b) in R and (b, dst) in R => (a, dst) in R
for dst in table.dsts[b]:
table.add_coercion(a, dst, table.coercion_cost(a, b) +
table.coercion_cost(b, dst))
#------------------------------------------------------------------------
# Coercion function
#------------------------------------------------------------------------
def dimlist_coercion_cost(src, dst):
"""
Cost of broadcasting one list of dimensions to another
"""
# TODO: This is not handling ellipsis
if len(src) > len(dst):
return inf
# Cost for adding dimensions is 0.1 for a size-one Fixed
# dim, 0.2 for anything else
leading = len(dst) - len(src)
cost = sum(0.1 if x == Fixed(1) else 0.2 for x in dst[:leading])
return cost + sum(dim_coercion_cost(x, y)
for x, y in zip(src, dst[leading:]))
def dim_coercion_cost(src, dst):
"""
Cost of coercing one dimension type to another.
"""
if isinstance(dst, Fixed):
if isinstance(src, Var):
return 0.1 # broadcasting penalty
elif not isinstance(src, Fixed):
return inf
if src.val != dst.val:
# broadcasting penalty
return 0.1 if src.val == 1 else inf
return 0
elif isinstance(dst, Var):
assert type(src) in [Var, Fixed]
if isinstance(src, Fixed):
return 0.1 # broadcasting penalty
return 0
elif isinstance(dst, TypeVar):
return 0
else:
return inf
def dtype_coercion_cost(src, dst):
"""
Cost of coercing one data type to another
"""
if src == dst:
return 0
elif isinstance(src, CType) and isinstance(dst, CType):
try:
return coercion_cost_table(src, dst)
except KeyError:
return inf
else:
return inf
def _strip_datashape(a):
"""Strips off the outer DataShape(...) if a is zero-dimensional."""
if isinstance(a, DataShape) and len(a) == 1:
a = a[0]
return a
def coercion_cost(a, b, seen=None):
"""
Determine a coercion cost from type `a` to type `b`.
Type `a` and `b'` must be unifiable and normalized.
"""
return _coercion_cost(_strip_datashape(a), _strip_datashape(b), seen)
def _coercion_cost(a, b, seen=None):
# TODO: Cost functions for conversion between type constructors in the
# lattice (implement a "type join")
if seen is None:
seen = set()
if a == b or isinstance(a, TypeVar):
return 0
elif isinstance(a, CType) and isinstance(b, CType):
try:
return coercion_cost_table(a, b)
except KeyError:
raise CoercionError(a, b)
elif isinstance(b, TypeVar):
visited = b not in seen
seen.add(b)
return 0.1 * visited
elif isinstance(b, Implements):
if a in b.typeset:
return 0.1 - (0.1 / len(b.typeset.types))
else:
raise CoercionError(a, b)
elif isinstance(b, Fixed):
if isinstance(a, Var):
return 0.1 # broadcasting penalty
assert isinstance(a, Fixed)
if a.val != b.val:
assert a.val == 1 or b.val == 1
return 0.1 # broadcasting penalty
return 0
elif isinstance(b, Var):
assert type(a) in [Var, Fixed]
if isinstance(a, Fixed):
return 0.1 # broadcasting penalty
return 0
elif isinstance(a, DataShape) and isinstance(b, DataShape):
return (dimlist_coercion_cost(a[:-1], b[:-1]) +
dtype_coercion_cost(a[-1], b[-1]))
else:
verify(a, b)
return max([_coercion_cost(x, y, seen) for x, y in zip(a.parameters,
b.parameters)])
def termsize(term):
"""Determine the size of a type term"""
if isinstance(term, Mono):
return sum(termsize(p) for p in term.parameters) + 1
return 0
#------------------------------------------------------------------------
# Default coercion rules
#------------------------------------------------------------------------
def add_numeric_rule(types, cost=1):
types = list(types)
for src, dst in zip(types[:-1], types[1:]):
add_coercion(src, dst, cost)
promotable_unsigned = [coretypes.uint8, coretypes.uint16, coretypes.uint32]
promoted_signed = [coretypes.int16, coretypes.int32, coretypes.int64]
add_numeric_rule(signed)
add_numeric_rule(unsigned)
add_numeric_rule(floating)
add_numeric_rule(complexes)
add_numeric_rule([coretypes.bool_, coretypes.int8])
add_numeric_rule([coretypes.uint8, coretypes.int16])
add_numeric_rule([coretypes.uint16, coretypes.int32])
add_numeric_rule([coretypes.uint32, coretypes.int64])
add_numeric_rule([coretypes.int16, coretypes.float32], 1.2)
add_numeric_rule([coretypes.int32, coretypes.float64], 1.2)
add_numeric_rule([coretypes.float32, coretypes.complex_float32], 1.2)
add_numeric_rule([coretypes.float64, coretypes.complex_float64], 1.2)
# Potentially lossy conversions
# unsigned -> signed
add_numeric_rule([coretypes.uint8, coretypes.int8], 1.5)
add_numeric_rule([coretypes.uint16, coretypes.int16], 1.5)
add_numeric_rule([coretypes.uint32, coretypes.int32], 1.5)
add_numeric_rule([coretypes.uint64, coretypes.int64], 1.5)
# signed -> unsigned
add_numeric_rule([coretypes.int8, coretypes.uint8], 1.5)
add_numeric_rule([coretypes.int16, coretypes.uint16], 1.5)
add_numeric_rule([coretypes.int32, coretypes.uint32], 1.5)
add_numeric_rule([coretypes.int64, coretypes.uint64], 1.5)
# int -> float
add_numeric_rule([coretypes.int32, coretypes.float32], 1.5)
add_numeric_rule([coretypes.int64, coretypes.float64], 1.5)
# float -> complex
add_numeric_rule([coretypes.float64, coretypes.complex_float32], 1.5)
| bsd-2-clause |
goyalankit/ride-agg | scripts/olacabs_scraper.py | 2 | 3088 | import config
import re
import urllib
from bs4 import BeautifulSoup
from base_scraper import BaseScraper
import httplib
from itertools import imap,ifilter
import os
numparse = lambda x: map(float,re.findall(r'\d+\.?\d*',x))
class OlacabsScraper(BaseScraper):
service_types = ("standard-rate","luxury-rate")
_table_types = map(lambda s: s+" fare-table", service_types)
@classmethod
def map_to_template(cls, service):
if not isinstance(service,dict) and hasattr(service,'__iter__'):
return imap(cls.map_to_template, service)
std_svc = {}
std_svc['currency_code'] = u'IND'
std_svc['city'] = unicode(service['city'])
std_svc['service_type'] = unicode(service['service_type'].rstrip('-rate').capitalize())
std_svc['vehicle_type'] = unicode(service['category'])
min_bill = numparse(service['minimum_bill'])
std_svc['fixed_fare'] = min_bill[0]
if len(min_bill)>1:
std_svc['fixed_fare_dist_km'] = min_bill[1]
std_svc['fare_per_km'] = numparse(service['extra_km_charges'])[0]
if 'wait_time_charges' in service:
std_svc['wait_charge_per_min'] = numparse(service['wait_time_charges'])[0]
elif 'ride_time_charges' in service:
std_svc['fare_per_min'] = numparse(service['ride_time_charges'])[0]
return std_svc
@staticmethod
def get_cities(conn=None):
if conn is None:
conn = httplib.HTTPConnection("www.olacabs.com")
conn.request("GET", "/fares")
soup = BeautifulSoup(conn.getresponse().read())
cities = soup.find('div',attrs={'id':"faresCityList"})
return cities.stripped_strings
# TODO
# add extra charges information
@classmethod
def scrape_fares(cls, cities=None):
conn = httplib.HTTPConnection("www.olacabs.com")
if cities is None:
cities = cls.get_cities(conn)
elif not hasattr(cities,'__iter__'):
cities = [cities]
fmt = lambda x: re.sub('\s','_', x.get_text().lower().split('(')[0])
for city in cities:
conn.request("GET", ("/fares" + '/' + city.lower()).strip())
soup = BeautifulSoup(conn.getresponse().read())
tables = [soup.find('div', class_=tbltype) for tbltype in cls._table_types]
for i,tablesoup in enumerate(filter(None,tables)):
soup_ptr = tablesoup.find('tr')
if not soup_ptr: continue
ncols = int(soup_ptr.th['colspan'])
soup_ptr = soup_ptr.find_next('tr')
headers = [fmt(th) for th in soup_ptr.find_all('th')]
content = tablesoup.find_all('td')
nrows = len(content)/ncols
for cols in (content[i*ncols:(i+1)*ncols] for i in range(nrows)):
svc = dict(zip(headers, [c.get_text() for c in cols]))
svc['city'] = city
svc['service_type'] = cls.service_types[i]
yield cls.map_to_template(svc)
| apache-2.0 |
fiber-space/pip | pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py | 23 | 5719 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
import sys
# ipaddress has been backported to 2.6+ in pypi. If it is installed on the
# system, use it to handle IPAddress ServerAltnames (this was added in
# python-3.5) otherwise only do DNS matching. This allows
# backports.ssl_match_hostname to continue to be used all the way back to
# python-2.4.
try:
from pip._vendor import ipaddress
except ImportError:
ipaddress = None
__version__ = '3.5.0.1'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def _to_unicode(obj):
if isinstance(obj, str) and sys.version_info < (3,):
obj = unicode(obj, encoding='ascii', errors='strict')
return obj
def _ipaddress_match(ipname, host_ip):
"""Exact matching of IP addresses.
RFC 6125 explicitly doesn't define an algorithm for this
(section 1.7.2 - "Out of Scope").
"""
# OpenSSL may add a trailing newline to a subjectAltName's IP address
# Divergence from upstream: ipaddress can't handle byte str
ip = ipaddress.ip_address(_to_unicode(ipname).rstrip())
return ip == host_ip
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate, match_hostname needs a "
"SSL socket or SSL context with either "
"CERT_OPTIONAL or CERT_REQUIRED")
try:
# Divergence from upstream: ipaddress can't handle byte str
host_ip = ipaddress.ip_address(_to_unicode(hostname))
except ValueError:
# Not an IP address (common case)
host_ip = None
except UnicodeError:
# Divergence from upstream: Have to deal with ipaddress not taking
# byte strings. addresses should be all ascii, so we consider it not
# an ipaddress in this case
host_ip = None
except AttributeError:
# Divergence from upstream: Make ipaddress library optional
if ipaddress is None:
host_ip = None
else:
raise
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if host_ip is None and _dnsname_match(value, hostname):
return
dnsnames.append(value)
elif key == 'IP Address':
if host_ip is not None and _ipaddress_match(value, host_ip):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| mit |
eugenejen/AutobahnPython | examples/twisted/wamp1/authentication/client.py | 17 | 2416 | ###############################################################################
##
## Copyright (C) 2012-2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import sys
from pprint import pprint
from twisted.python import log
from twisted.internet import reactor
from autobahn.twisted.websocket import connectWS
from autobahn.wamp1.protocol import WampClientFactory, \
WampCraClientProtocol
class MyClientProtocol(WampCraClientProtocol):
"""
Authenticated WAMP client using WAMP-Challenge-Response-Authentication ("WAMP-CRA").
"""
def onSessionOpen(self):
## "authenticate" as anonymous
##
#d = self.authenticate()
## authenticate as "foobar" with password "secret"
##
d = self.authenticate(authKey = "foobar",
authExtra = None,
authSecret = "secret")
d.addCallbacks(self.onAuthSuccess, self.onAuthError)
def onClose(self, wasClean, code, reason):
reactor.stop()
def onAuthSuccess(self, permissions):
print "Authentication Success!", permissions
self.publish("http://example.com/topics/mytopic1", "Hello, world!")
d = self.call("http://example.com/procedures/hello", "Foobar")
d.addBoth(pprint)
d.addBoth(self.sendClose)
def onAuthError(self, e):
uri, desc, details = e.value.args
print "Authentication Error!", uri, desc, details
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
log.startLogging(sys.stdout)
factory = WampClientFactory("ws://localhost:9000", debugWamp = debug)
factory.protocol = MyClientProtocol
connectWS(factory)
reactor.run()
| apache-2.0 |
moylop260/odoo-dev | addons/crm/base_partner_merge.py | 42 | 30021 | #!/usr/bin/env python
from __future__ import absolute_import
from email.utils import parseaddr
import functools
import htmlentitydefs
import itertools
import logging
import operator
import re
from ast import literal_eval
from openerp.tools import mute_logger
# Validation Library https://pypi.python.org/pypi/validate_email/1.1
from .validate_email import validate_email
import openerp
from openerp.osv import osv, orm
from openerp.osv import fields
from openerp.osv.orm import browse_record
from openerp.tools.translate import _
pattern = re.compile("&(\w+?);")
_logger = logging.getLogger('base.partner.merge')
# http://www.php2python.com/wiki/function.html-entity-decode/
def html_entity_decode_char(m, defs=htmlentitydefs.entitydefs):
try:
return defs[m.group(1)]
except KeyError:
return m.group(0)
def html_entity_decode(string):
return pattern.sub(html_entity_decode_char, string)
def sanitize_email(email):
assert isinstance(email, basestring) and email
result = re.subn(r';|/|:', ',',
html_entity_decode(email or ''))[0].split(',')
emails = [parseaddr(email)[1]
for item in result
for email in item.split()]
return [email.lower()
for email in emails
if validate_email(email)]
def is_integer_list(ids):
return all(isinstance(i, (int, long)) for i in ids)
class ResPartner(osv.Model):
_inherit = 'res.partner'
_columns = {
'id': fields.integer('Id', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
}
class MergePartnerLine(osv.TransientModel):
_name = 'base.partner.merge.line'
_columns = {
'wizard_id': fields.many2one('base.partner.merge.automatic.wizard',
'Wizard'),
'min_id': fields.integer('MinID'),
'aggr_ids': fields.char('Ids', required=True),
}
_order = 'min_id asc'
class MergePartnerAutomatic(osv.TransientModel):
"""
The idea behind this wizard is to create a list of potential partners to
merge. We use two objects, the first one is the wizard for the end-user.
And the second will contain the partner list to merge.
"""
_name = 'base.partner.merge.automatic.wizard'
_columns = {
# Group by
'group_by_email': fields.boolean('Email'),
'group_by_name': fields.boolean('Name'),
'group_by_is_company': fields.boolean('Is Company'),
'group_by_vat': fields.boolean('VAT'),
'group_by_parent_id': fields.boolean('Parent Company'),
'state': fields.selection([('option', 'Option'),
('selection', 'Selection'),
('finished', 'Finished')],
'State',
readonly=True,
required=True),
'number_group': fields.integer("Group of Contacts", readonly=True),
'current_line_id': fields.many2one('base.partner.merge.line', 'Current Line'),
'line_ids': fields.one2many('base.partner.merge.line', 'wizard_id', 'Lines'),
'partner_ids': fields.many2many('res.partner', string='Contacts'),
'dst_partner_id': fields.many2one('res.partner', string='Destination Contact'),
'exclude_contact': fields.boolean('A user associated to the contact'),
'exclude_journal_item': fields.boolean('Journal Items associated to the contact'),
'maximum_group': fields.integer("Maximum of Group of Contacts"),
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
res = super(MergePartnerAutomatic, self).default_get(cr, uid, fields, context)
if context.get('active_model') == 'res.partner' and context.get('active_ids'):
partner_ids = context['active_ids']
res['state'] = 'selection'
res['partner_ids'] = partner_ids
res['dst_partner_id'] = self._get_ordered_partner(cr, uid, partner_ids, context=context)[-1].id
return res
_defaults = {
'state': 'option'
}
def get_fk_on(self, cr, table):
q = """ SELECT cl1.relname as table,
att1.attname as column
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
pg_attribute as att1, pg_attribute as att2
WHERE con.conrelid = cl1.oid
AND con.confrelid = cl2.oid
AND array_lower(con.conkey, 1) = 1
AND con.conkey[1] = att1.attnum
AND att1.attrelid = cl1.oid
AND cl2.relname = %s
AND att2.attname = 'id'
AND array_lower(con.confkey, 1) = 1
AND con.confkey[1] = att2.attnum
AND att2.attrelid = cl2.oid
AND con.contype = 'f'
"""
return cr.execute(q, (table,))
def _update_foreign_keys(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_foreign_keys for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
# find the many2one relation to a partner
proxy = self.pool.get('res.partner')
self.get_fk_on(cr, 'res_partner')
# ignore two tables
for table, column in cr.fetchall():
if 'base_partner_merge_' in table:
continue
partner_ids = tuple(map(int, src_partners))
query = "SELECT column_name FROM information_schema.columns WHERE table_name LIKE '%s'" % (table)
cr.execute(query, ())
columns = []
for data in cr.fetchall():
if data[0] != column:
columns.append(data[0])
query_dic = {
'table': table,
'column': column,
'value': columns[0],
}
if len(columns) <= 1:
# unique key treated
query = """
UPDATE "%(table)s" as ___tu
SET %(column)s = %%s
WHERE
%(column)s = %%s AND
NOT EXISTS (
SELECT 1
FROM "%(table)s" as ___tw
WHERE
%(column)s = %%s AND
___tu.%(value)s = ___tw.%(value)s
)""" % query_dic
for partner_id in partner_ids:
cr.execute(query, (dst_partner.id, partner_id, dst_partner.id))
else:
cr.execute("SAVEPOINT recursive_partner_savepoint")
try:
query = 'UPDATE "%(table)s" SET %(column)s = %%s WHERE %(column)s IN %%s' % query_dic
cr.execute(query, (dst_partner.id, partner_ids,))
if column == proxy._parent_name and table == 'res_partner':
query = """
WITH RECURSIVE cycle(id, parent_id) AS (
SELECT id, parent_id FROM res_partner
UNION
SELECT cycle.id, res_partner.parent_id
FROM res_partner, cycle
WHERE res_partner.id = cycle.parent_id AND
cycle.id != cycle.parent_id
)
SELECT id FROM cycle WHERE id = parent_id AND id = %s
"""
cr.execute(query, (dst_partner.id,))
if cr.fetchall():
cr.execute("ROLLBACK TO SAVEPOINT recursive_partner_savepoint")
finally:
cr.execute("RELEASE SAVEPOINT recursive_partner_savepoint")
def _update_reference_fields(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_reference_fields for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
def update_records(model, src, field_model='model', field_id='res_id', context=None):
proxy = self.pool.get(model)
if proxy is None:
return
domain = [(field_model, '=', 'res.partner'), (field_id, '=', src.id)]
ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context)
return proxy.write(cr, openerp.SUPERUSER_ID, ids, {field_id: dst_partner.id}, context=context)
update_records = functools.partial(update_records, context=context)
for partner in src_partners:
update_records('calendar', src=partner, field_model='model_id.model')
update_records('ir.attachment', src=partner, field_model='res_model')
update_records('mail.followers', src=partner, field_model='res_model')
update_records('mail.message', src=partner)
update_records('marketing.campaign.workitem', src=partner, field_model='object_id.model')
update_records('ir.model.data', src=partner)
proxy = self.pool['ir.model.fields']
domain = [('ttype', '=', 'reference')]
record_ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context)
for record in proxy.browse(cr, openerp.SUPERUSER_ID, record_ids, context=context):
try:
proxy_model = self.pool[record.model]
field_type = proxy_model._columns[record.name].__class__._type
except KeyError:
# unknown model or field => skip
continue
if field_type == 'function':
continue
for partner in src_partners:
domain = [
(record.name, '=', 'res.partner,%d' % partner.id)
]
model_ids = proxy_model.search(cr, openerp.SUPERUSER_ID, domain, context=context)
values = {
record.name: 'res.partner,%d' % dst_partner.id,
}
proxy_model.write(cr, openerp.SUPERUSER_ID, model_ids, values, context=context)
def _update_values(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_values for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
columns = dst_partner._columns
def write_serializer(column, item):
if isinstance(item, browse_record):
return item.id
else:
return item
values = dict()
for column, field in columns.iteritems():
if field._type not in ('many2many', 'one2many') and not isinstance(field, fields.function):
for item in itertools.chain(src_partners, [dst_partner]):
if item[column]:
values[column] = write_serializer(column, item[column])
values.pop('id', None)
parent_id = values.pop('parent_id', None)
dst_partner.write(values)
if parent_id and parent_id != dst_partner.id:
try:
dst_partner.write({'parent_id': parent_id})
except (osv.except_osv, orm.except_orm):
_logger.info('Skip recursive partner hierarchies for parent_id %s of partner: %s', parent_id, dst_partner.id)
@mute_logger('openerp.osv.expression', 'openerp.osv.orm')
def _merge(self, cr, uid, partner_ids, dst_partner=None, context=None):
proxy = self.pool.get('res.partner')
partner_ids = proxy.exists(cr, uid, list(partner_ids), context=context)
if len(partner_ids) < 2:
return
if len(partner_ids) > 3:
raise osv.except_osv(_('Error'), _("For safety reasons, you cannot merge more than 3 contacts together. You can re-open the wizard several times if needed."))
if openerp.SUPERUSER_ID != uid and len(set(partner.email for partner in proxy.browse(cr, uid, partner_ids, context=context))) > 1:
raise osv.except_osv(_('Error'), _("All contacts must have the same email. Only the Administrator can merge contacts with different emails."))
if dst_partner and dst_partner.id in partner_ids:
src_partners = proxy.browse(cr, uid, [id for id in partner_ids if id != dst_partner.id], context=context)
else:
ordered_partners = self._get_ordered_partner(cr, uid, partner_ids, context)
dst_partner = ordered_partners[-1]
src_partners = ordered_partners[:-1]
_logger.info("dst_partner: %s", dst_partner.id)
if openerp.SUPERUSER_ID != uid and self._model_is_installed(cr, uid, 'account.move.line', context=context) and \
self.pool.get('account.move.line').search(cr, openerp.SUPERUSER_ID, [('partner_id', 'in', [partner.id for partner in src_partners])], context=context):
raise osv.except_osv(_('Error'), _("Only the destination contact may be linked to existing Journal Items. Please ask the Administrator if you need to merge several contacts linked to existing Journal Items."))
call_it = lambda function: function(cr, uid, src_partners, dst_partner,
context=context)
call_it(self._update_foreign_keys)
call_it(self._update_reference_fields)
call_it(self._update_values)
_logger.info('(uid = %s) merged the partners %r with %s', uid, list(map(operator.attrgetter('id'), src_partners)), dst_partner.id)
dst_partner.message_post(body='%s %s'%(_("Merged with the following partners:"), ", ".join('%s<%s>(ID %s)' % (p.name, p.email or 'n/a', p.id) for p in src_partners)))
for partner in src_partners:
partner.unlink()
def clean_emails(self, cr, uid, context=None):
"""
Clean the email address of the partner, if there is an email field with
a mimum of two addresses, the system will create a new partner, with the
information of the previous one and will copy the new cleaned email into
the email field.
"""
if context is None:
context = {}
proxy_model = self.pool['ir.model.fields']
field_ids = proxy_model.search(cr, uid, [('model', '=', 'res.partner'),
('ttype', 'like', '%2many')],
context=context)
fields = proxy_model.read(cr, uid, field_ids, context=context)
reset_fields = dict((field['name'], []) for field in fields)
proxy_partner = self.pool['res.partner']
context['active_test'] = False
ids = proxy_partner.search(cr, uid, [], context=context)
fields = ['name', 'var' 'partner_id' 'is_company', 'email']
partners = proxy_partner.read(cr, uid, ids, fields, context=context)
partners.sort(key=operator.itemgetter('id'))
partners_len = len(partners)
_logger.info('partner_len: %r', partners_len)
for idx, partner in enumerate(partners):
if not partner['email']:
continue
percent = (idx / float(partners_len)) * 100.0
_logger.info('idx: %r', idx)
_logger.info('percent: %r', percent)
try:
emails = sanitize_email(partner['email'])
head, tail = emails[:1], emails[1:]
email = head[0] if head else False
proxy_partner.write(cr, uid, [partner['id']],
{'email': email}, context=context)
for email in tail:
values = dict(reset_fields, email=email)
proxy_partner.copy(cr, uid, partner['id'], values,
context=context)
except Exception:
_logger.exception("There is a problem with this partner: %r", partner)
raise
return True
def close_cb(self, cr, uid, ids, context=None):
return {'type': 'ir.actions.act_window_close'}
def _generate_query(self, fields, maximum_group=100):
group_fields = ', '.join(fields)
filters = []
for field in fields:
if field in ['email', 'name']:
filters.append((field, 'IS NOT', 'NULL'))
criteria = ' AND '.join('%s %s %s' % (field, operator, value)
for field, operator, value in filters)
text = [
"SELECT min(id), array_agg(id)",
"FROM res_partner",
]
if criteria:
text.append('WHERE %s' % criteria)
text.extend([
"GROUP BY %s" % group_fields,
"HAVING COUNT(*) >= 2",
"ORDER BY min(id)",
])
if maximum_group:
text.extend([
"LIMIT %s" % maximum_group,
])
return ' '.join(text)
def _compute_selected_groupby(self, this):
group_by_str = 'group_by_'
group_by_len = len(group_by_str)
fields = [
key[group_by_len:]
for key in self._columns.keys()
if key.startswith(group_by_str)
]
groups = [
field
for field in fields
if getattr(this, '%s%s' % (group_by_str, field), False)
]
if not groups:
raise osv.except_osv(_('Error'),
_("You have to specify a filter for your selection"))
return groups
def next_cb(self, cr, uid, ids, context=None):
"""
Don't compute any thing
"""
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
if this.current_line_id:
this.current_line_id.unlink()
return self._next_screen(cr, uid, this, context)
def _get_ordered_partner(self, cr, uid, partner_ids, context=None):
partners = self.pool.get('res.partner').browse(cr, uid, list(partner_ids), context=context)
ordered_partners = sorted(sorted(partners,
key=operator.attrgetter('create_date'), reverse=True),
key=operator.attrgetter('active'), reverse=True)
return ordered_partners
def _next_screen(self, cr, uid, this, context=None):
this.refresh()
values = {}
if this.line_ids:
# in this case, we try to find the next record.
current_line = this.line_ids[0]
current_partner_ids = literal_eval(current_line.aggr_ids)
values.update({
'current_line_id': current_line.id,
'partner_ids': [(6, 0, current_partner_ids)],
'dst_partner_id': self._get_ordered_partner(cr, uid, current_partner_ids, context)[-1].id,
'state': 'selection',
})
else:
values.update({
'current_line_id': False,
'partner_ids': [],
'state': 'finished',
})
this.write(values)
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def _model_is_installed(self, cr, uid, model, context=None):
proxy = self.pool.get('ir.model')
domain = [('model', '=', model)]
return proxy.search_count(cr, uid, domain, context=context) > 0
def _partner_use_in(self, cr, uid, aggr_ids, models, context=None):
"""
Check if there is no occurence of this group of partner in the selected
model
"""
for model, field in models.iteritems():
proxy = self.pool.get(model)
domain = [(field, 'in', aggr_ids)]
if proxy.search_count(cr, uid, domain, context=context):
return True
return False
def compute_models(self, cr, uid, ids, context=None):
"""
Compute the different models needed by the system if you want to exclude
some partners.
"""
assert is_integer_list(ids)
this = self.browse(cr, uid, ids[0], context=context)
models = {}
if this.exclude_contact:
models['res.users'] = 'partner_id'
if self._model_is_installed(cr, uid, 'account.move.line', context=context) and this.exclude_journal_item:
models['account.move.line'] = 'partner_id'
return models
def _process_query(self, cr, uid, ids, query, context=None):
"""
Execute the select request and write the result in this wizard
"""
proxy = self.pool.get('base.partner.merge.line')
this = self.browse(cr, uid, ids[0], context=context)
models = self.compute_models(cr, uid, ids, context=context)
cr.execute(query)
counter = 0
for min_id, aggr_ids in cr.fetchall():
if models and self._partner_use_in(cr, uid, aggr_ids, models, context=context):
continue
values = {
'wizard_id': this.id,
'min_id': min_id,
'aggr_ids': aggr_ids,
}
proxy.create(cr, uid, values, context=context)
counter += 1
values = {
'state': 'selection',
'number_group': counter,
}
this.write(values)
_logger.info("counter: %s", counter)
def start_process_cb(self, cr, uid, ids, context=None):
"""
Start the process.
* Compute the selected groups (with duplication)
* If the user has selected the 'exclude_XXX' fields, avoid the partners.
"""
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
groups = self._compute_selected_groupby(this)
query = self._generate_query(groups, this.maximum_group)
self._process_query(cr, uid, ids, query, context=context)
return self._next_screen(cr, uid, this, context)
def automatic_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
this = self.browse(cr, uid, ids[0], context=context)
this.start_process_cb()
this.refresh()
for line in this.line_ids:
partner_ids = literal_eval(line.aggr_ids)
self._merge(cr, uid, partner_ids, context=context)
line.unlink()
cr.commit()
this.write({'state': 'finished'})
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def parent_migration_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
query = """
SELECT
min(p1.id),
array_agg(DISTINCT p1.id)
FROM
res_partner as p1
INNER join
res_partner as p2
ON
p1.email = p2.email AND
p1.name = p2.name AND
(p1.parent_id = p2.id OR p1.id = p2.parent_id)
WHERE
p2.id IS NOT NULL
GROUP BY
p1.email,
p1.name,
CASE WHEN p1.parent_id = p2.id THEN p2.id
ELSE p1.id
END
HAVING COUNT(*) >= 2
ORDER BY
min(p1.id)
"""
self._process_query(cr, uid, ids, query, context=context)
for line in this.line_ids:
partner_ids = literal_eval(line.aggr_ids)
self._merge(cr, uid, partner_ids, context=context)
line.unlink()
cr.commit()
this.write({'state': 'finished'})
cr.execute("""
UPDATE
res_partner
SET
is_company = NULL,
parent_id = NULL
WHERE
parent_id = id
""")
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def update_all_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
# WITH RECURSIVE cycle(id, parent_id) AS (
# SELECT id, parent_id FROM res_partner
# UNION
# SELECT cycle.id, res_partner.parent_id
# FROM res_partner, cycle
# WHERE res_partner.id = cycle.parent_id AND
# cycle.id != cycle.parent_id
# )
# UPDATE res_partner
# SET parent_id = NULL
# WHERE id in (SELECT id FROM cycle WHERE id = parent_id);
this = self.browse(cr, uid, ids[0], context=context)
self.parent_migration_process_cb(cr, uid, ids, context=None)
list_merge = [
{'group_by_vat': True, 'group_by_email': True, 'group_by_name': True},
# {'group_by_name': True, 'group_by_is_company': True, 'group_by_parent_id': True},
# {'group_by_email': True, 'group_by_is_company': True, 'group_by_parent_id': True},
# {'group_by_name': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True},
# {'group_by_email': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True},
# {'group_by_email': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True},
# {'group_by_name': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True}
]
for merge_value in list_merge:
id = self.create(cr, uid, merge_value, context=context)
self.automatic_process_cb(cr, uid, [id], context=context)
cr.execute("""
UPDATE
res_partner
SET
is_company = NULL
WHERE
parent_id IS NOT NULL AND
is_company IS NOT NULL
""")
# cr.execute("""
# UPDATE
# res_partner as p1
# SET
# is_company = NULL,
# parent_id = (
# SELECT p2.id
# FROM res_partner as p2
# WHERE p2.email = p1.email AND
# p2.parent_id != p2.id
# LIMIT 1
# )
# WHERE
# p1.parent_id = p1.id
# """)
return self._next_screen(cr, uid, this, context)
def merge_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
partner_ids = set(map(int, this.partner_ids))
if not partner_ids:
this.write({'state': 'finished'})
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
self._merge(cr, uid, partner_ids, this.dst_partner_id, context=context)
if this.current_line_id:
this.current_line_id.unlink()
return self._next_screen(cr, uid, this, context)
def auto_set_parent_id(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
# select partner who have one least invoice
partner_treated = ['@gmail.com']
cr.execute(""" SELECT p.id, p.email
FROM res_partner as p
LEFT JOIN account_invoice as a
ON p.id = a.partner_id AND a.state in ('open','paid')
WHERE p.grade_id is NOT NULL
GROUP BY p.id
ORDER BY COUNT(a.id) DESC
""")
re_email = re.compile(r".*@")
for id, email in cr.fetchall():
# check email domain
email = re_email.sub("@", email or "")
if not email or email in partner_treated:
continue
partner_treated.append(email)
# don't update the partners if they are more of one who have invoice
cr.execute(""" SELECT *
FROM res_partner as p
WHERE p.id != %s AND p.email LIKE '%%%s' AND
EXISTS (SELECT * FROM account_invoice as a WHERE p.id = a.partner_id AND a.state in ('open','paid'))
""" % (id, email))
if len(cr.fetchall()) > 1:
_logger.info("%s MORE OF ONE COMPANY", email)
continue
# to display changed values
cr.execute(""" SELECT id,email
FROM res_partner
WHERE parent_id != %s AND id != %s AND email LIKE '%%%s'
""" % (id, id, email))
_logger.info("%r", cr.fetchall())
# upgrade
cr.execute(""" UPDATE res_partner
SET parent_id = %s
WHERE id != %s AND email LIKE '%%%s'
""" % (id, id, email))
return False
| agpl-3.0 |
jack51706/viper | modules/verifysigs/asn1/spc.py | 22 | 3061 | #!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: caronni@google.com (Germano Caronni)
"""Authenticode-specific ASN.1 data structures."""
from pkcs7 import DigestInfo
from pyasn1.type import char
from pyasn1.type import namedtype
from pyasn1.type import tag
from pyasn1.type import univ
import x509
class SpcAttributeTypeAndOptionalValue(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('type', x509.AttributeType()),
namedtype.OptionalNamedType('value', x509.AttributeValue()))
class SpcIndirectDataContent(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('data', SpcAttributeTypeAndOptionalValue()),
namedtype.NamedType('messageDigest', DigestInfo()))
class SpcUuid(univ.OctetString):
pass
class SpcSerializedObject(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.NamedType('classId', SpcUuid()),
namedtype.NamedType('serializedData', univ.OctetString()))
class SpcString(univ.Choice):
componentType = namedtype.NamedTypes(
namedtype.NamedType('unicode', char.BMPString().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 0))),
namedtype.NamedType('ascii', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 1))))
class SpcLink(univ.Choice):
"""According to Authenticode specification."""
componentType = namedtype.NamedTypes(
namedtype.NamedType('url', char.IA5String().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 0))),
namedtype.NamedType('moniker', SpcSerializedObject().subtype(
implicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 1))),
namedtype.NamedType('file', SpcString().subtype(
explicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 2))))
class SpcSpOpusInfo(univ.Sequence):
componentType = namedtype.NamedTypes(
namedtype.OptionalNamedType('programName', SpcString().subtype(
explicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 0))),
namedtype.OptionalNamedType('moreInfo', SpcLink().subtype(
explicitTag=tag.Tag(tag.tagClassContext,
tag.tagFormatConstructed, 1))))
| bsd-3-clause |
qwertyjune/BethSaidaBible | venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/jpcntx.py | 1777 | 19348 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
| gpl-3.0 |
cuboxi/android_external_chromium_org | tools/perf/measurements/endure.py | 23 | 6579 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import optparse
import re
import time
from metrics import v8_object_stats
from telemetry.page import page_measurement
_V8_BYTES_COMMITTED = [
'V8.MemoryNewSpaceBytesCommitted',
'V8.MemoryOldPointerSpaceBytesCommitted',
'V8.MemoryOldDataSpaceBytesCommitted',
'V8.MemoryCodeSpaceBytesCommitted',
'V8.MemoryMapSpaceBytesCommitted',
'V8.MemoryCellSpaceBytesCommitted',
'V8.MemoryPropertyCellSpaceBytesCommitted',
'V8.MemoryLoSpaceBytesCommitted'
]
_V8_BYTES_USED = [
'V8.MemoryNewSpaceBytesUsed',
'V8.MemoryOldPointerSpaceBytesUsed',
'V8.MemoryOldDataSpaceBytesUsed',
'V8.MemoryCodeSpaceBytesUsed',
'V8.MemoryMapSpaceBytesUsed',
'V8.MemoryCellSpaceBytesUsed',
'V8.MemoryPropertyCellSpaceBytesUsed',
'V8.MemoryLoSpaceBytesUsed'
]
_V8_MEMORY_ALLOCATED = [
'V8.OsMemoryAllocated'
]
class Endure(page_measurement.PageMeasurement):
def __init__(self):
super(Endure, self).__init__('endure')
# Browser object, saved so that memory stats can be gotten later.
self._browser = None
# Timestamp for the time when the test starts.
self._start_time = None
# Timestamp of the last statistics sample.
self._last_sample_time = 0
# Number of page repetitions that have currently been done.
self._iterations = 0
# Number of page repetitions at the point of the last statistics sample.
self._last_sample_iterations = 0
# One of these variables will be set when the perf stats interval option
# is parsed, and the other shall remain as None.
self._interval_seconds = None
self._interval_iterations = None
def AddCommandLineOptions(self, parser):
# TODO(tdu): When ProcessCommandLine is added to replace this method,
# move the logic in _ParseIntervalOption there to ProcessCommandLine.
group = optparse.OptionGroup(parser, 'Endure options')
group.add_option('--perf-stats-interval',
dest='perf_stats_interval',
default='20s',
type='string',
help='Interval between sampling of statistics, either in '
'seconds (specified by appending \'s\') or in number '
'of iterations')
parser.add_option_group(group)
def DidStartBrowser(self, browser):
# Save the Browser object so that memory_stats can be gotten later.
self._browser = browser
def CustomizeBrowserOptions(self, options):
v8_object_stats.V8ObjectStatsMetric.CustomizeBrowserOptions(options)
def CanRunForPage(self, page):
return hasattr(page, 'endure')
def WillRunPageRepeats(self, page):
"""Set-up before starting a new page."""
# Reset the starting time for each new page.
self._start_time = time.time()
# Prefix the page name so it can be picked up by the buildbot script that
# parses Endure output.
if page.name and not page.display_name.startswith('endure_'):
page.name = 'endure_' + page.name
def MeasurePage(self, page, tab, results):
"""Sample perf information if enough seconds or iterations have passed."""
# Parse the interval option, setting either or seconds or iterations.
# This is done here because self.options is not set when any of the above
# methods are run.
self._ParseIntervalOption()
# Check whether the sample interval is specified in seconds or iterations,
# and take a sample if it's time.
self._iterations += 1
if self._interval_seconds:
now = time.time()
seconds_elapsed = int(round(now - self._last_sample_time))
# Note: the time since last sample must be at least as many seconds
# as specified; it will usually be more, it will never be less.
if seconds_elapsed >= self._interval_seconds:
total_seconds = int(round(now - self._start_time))
self._SampleStats(tab, results, seconds=total_seconds)
self._last_sample_time = now
else:
iterations_elapsed = self._iterations - self._last_sample_iterations
if iterations_elapsed >= self._interval_iterations:
self._SampleStats(tab, results, iterations=self._iterations)
self._last_sample_iterations = self._iterations
def _ParseIntervalOption(self):
"""Parse the perf stats interval option that was passed in."""
if self._interval_seconds or self._interval_iterations:
return
interval = self.options.perf_stats_interval
match = re.match('([0-9]+)([sS]?)$', interval)
assert match, ('Invalid value for --perf-stats-interval: %s' % interval)
if match.group(2):
self._interval_seconds = int(match.group(1))
else:
self._interval_iterations = int(match.group(1))
assert self._interval_seconds or self._interval_iterations
def _SampleStats(self, tab, results, seconds=None, iterations=None):
"""Record memory information and add it to the results."""
def AddPoint(trace_name, units_y, value_y):
"""Add one data point to the results object."""
if seconds:
results.Add(trace_name + '_X', 'seconds', seconds)
else:
assert iterations, 'Neither seconds nor iterations given.'
results.Add(trace_name + '_X', 'iterations', iterations)
results.Add(trace_name + '_Y', units_y, value_y)
# DOM nodes and event listeners
dom_stats = tab.dom_stats
dom_node_count = dom_stats['node_count']
event_listener_count = dom_stats['event_listener_count']
AddPoint('dom_nodes', 'count', dom_node_count)
AddPoint('event_listeners', 'count', event_listener_count)
# Browser and renderer virtual memory stats
memory_stats = self._browser.memory_stats
def BrowserVMStats(statistic_name):
"""Get VM stats from the Browser object in KB."""
return memory_stats[statistic_name].get('VM', 0) / 1024.0
AddPoint('browser_vm', 'KB', BrowserVMStats('Browser'))
AddPoint('renderer_vm', 'KB', BrowserVMStats('Renderer'))
AddPoint('gpu_vm', 'KB', BrowserVMStats('Gpu'))
# V8 stats
def V8StatsSum(counters):
"""Given a list of V8 counter names, get the sum of the values in KB."""
stats = v8_object_stats.V8ObjectStatsMetric.GetV8StatsTable(tab, counters)
return sum(stats.values()) / 1024.0
AddPoint('v8_memory_committed', 'KB', V8StatsSum(_V8_BYTES_COMMITTED))
AddPoint('v8_memory_used', 'KB', V8StatsSum(_V8_BYTES_USED))
AddPoint('v8_memory_allocated', 'KB', V8StatsSum(_V8_MEMORY_ALLOCATED))
| bsd-3-clause |
varunagrawal/azure-services | varunagrawal/VarunWeb/env/Lib/site-packages/django/contrib/auth/tests/urls.py | 105 | 4961 | from django.conf.urls import patterns, url
from django.contrib.auth import context_processors
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.auth.urls import urlpatterns
from django.contrib.auth.views import password_reset, login
from django.contrib.auth.decorators import login_required
from django.contrib.messages.api import info
from django.http import HttpResponse, HttpRequest
from django.shortcuts import render_to_response
from django.template import Template, RequestContext
from django.views.decorators.cache import never_cache
class CustomRequestAuthenticationForm(AuthenticationForm):
def __init__(self, request, *args, **kwargs):
assert isinstance(request, HttpRequest)
super(CustomRequestAuthenticationForm, self).__init__(request, *args, **kwargs)
@never_cache
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
def auth_processor_no_attr_access(request):
r1 = render_to_response('context_processors/auth_attrs_no_access.html',
RequestContext(request, {}, processors=[context_processors.auth]))
# *After* rendering, we check whether the session was accessed
return render_to_response('context_processors/auth_attrs_test_access.html',
{'session_accessed':request.session.accessed})
def auth_processor_attr_access(request):
r1 = render_to_response('context_processors/auth_attrs_access.html',
RequestContext(request, {}, processors=[context_processors.auth]))
return render_to_response('context_processors/auth_attrs_test_access.html',
{'session_accessed':request.session.accessed})
def auth_processor_user(request):
return render_to_response('context_processors/auth_attrs_user.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def auth_processor_perms(request):
return render_to_response('context_processors/auth_attrs_perms.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def auth_processor_perm_in_perms(request):
return render_to_response('context_processors/auth_attrs_perm_in_perms.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def auth_processor_messages(request):
info(request, "Message 1")
return render_to_response('context_processors/auth_attrs_messages.html',
RequestContext(request, {}, processors=[context_processors.auth]))
def userpage(request):
pass
def custom_request_auth_login(request):
return login(request, authentication_form=CustomRequestAuthenticationForm)
# special urls for auth test cases
urlpatterns = urlpatterns + patterns('',
(r'^logout/custom_query/$', 'django.contrib.auth.views.logout', dict(redirect_field_name='follow')),
(r'^logout/next_page/$', 'django.contrib.auth.views.logout', dict(next_page='/somewhere/')),
(r'^logout/next_page/named/$', 'django.contrib.auth.views.logout', dict(next_page='password_reset')),
(r'^remote_user/$', remote_user_auth_view),
(r'^password_reset_from_email/$', 'django.contrib.auth.views.password_reset', dict(from_email='staffmember@example.com')),
(r'^password_reset/custom_redirect/$', 'django.contrib.auth.views.password_reset', dict(post_reset_redirect='/custom/')),
(r'^password_reset/custom_redirect/named/$', 'django.contrib.auth.views.password_reset', dict(post_reset_redirect='password_reset')),
(r'^reset/custom/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
'django.contrib.auth.views.password_reset_confirm',
dict(post_reset_redirect='/custom/')),
(r'^reset/custom/named/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
'django.contrib.auth.views.password_reset_confirm',
dict(post_reset_redirect='password_reset')),
(r'^password_change/custom/$', 'django.contrib.auth.views.password_change', dict(post_change_redirect='/custom/')),
(r'^password_change/custom/named/$', 'django.contrib.auth.views.password_change', dict(post_change_redirect='password_reset')),
(r'^admin_password_reset/$', 'django.contrib.auth.views.password_reset', dict(is_admin_site=True)),
(r'^login_required/$', login_required(password_reset)),
(r'^login_required_login_url/$', login_required(password_reset, login_url='/somewhere/')),
(r'^auth_processor_no_attr_access/$', auth_processor_no_attr_access),
(r'^auth_processor_attr_access/$', auth_processor_attr_access),
(r'^auth_processor_user/$', auth_processor_user),
(r'^auth_processor_perms/$', auth_processor_perms),
(r'^auth_processor_perm_in_perms/$', auth_processor_perm_in_perms),
(r'^auth_processor_messages/$', auth_processor_messages),
(r'^custom_request_auth_login/$', custom_request_auth_login),
url(r'^userpage/(.+)/$', userpage, name="userpage"),
)
| gpl-2.0 |
KDB2/veusz | veusz/plugins/importplugin.py | 3 | 25778 | # Copyright (C) 2010 Jeremy S. Sanders
# Email: Jeremy Sanders <jeremy@jeremysanders.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
##############################################################################
"""Import plugin base class and helpers."""
from __future__ import division
import os.path
import numpy as N
from ..compat import crange, cstr, cstrerror
from .. import utils
from .. import qtall as qt4
from . import field
from . import datasetplugin
def _(text, disambiguation=None, context='ImportPlugin'):
"""Translate text."""
return qt4.QCoreApplication.translate(context, text, disambiguation)
# add an instance of your class to this list to get it registered
importpluginregistry = []
class ImportPluginParams(object):
"""Parameters to plugin are passed in this object."""
def __init__(self, filename, encoding, field_results):
self.filename = filename
self.encoding = encoding
self.field_results = field_results
def openFileWithEncoding(self):
"""Helper to open filename but respecting encoding."""
return utils.openEncoding(self.filename, self.encoding)
class ImportPluginException(RuntimeError):
"""An exception to return errors about importing or previewing data."""
class ImportPlugin(object):
"""Define a plugin to read data in a particular format.
Override doImport and optionally getPreview to define a new plugin.
Register the class by adding it to the importpluginregistry list.
Of promote_tab is set to some text, put the plugin on its own tab
in the import dialog using that text as the tab name.
"""
name = 'Import plugin'
author = ''
description = ''
# if set to some text, use this plugin on its own tab
promote_tab = None
# set these to get focus if a file is selected with these extensions
# include the dot in the extension names
file_extensions = set()
def __init__(self):
"""Override this to declare a list of input fields if required."""
# a list of Field objects to display
self.fields = []
def getPreview(self, params):
"""Get data to show in a text box to show a preview.
params is a ImportPluginParams object.
Returns (text, okaytoimport)
"""
f = params.openFileWithEncoding()
return f.read(4096), True
def doImport(self, params):
"""Actually import data
params is a ImportPluginParams object.
Return a list of datasetplugin.Dataset1D, datasetplugin.Dataset2D objects
"""
return []
#################################################################
class ImportPluginExample(ImportPlugin):
"""An example plugin for reading a set of unformatted numbers
from a file."""
name = "Example plugin"
author = "Jeremy Sanders"
description = _("Reads a list of numbers in a text file")
def __init__(self):
self.fields = [
field.FieldText("name", descr=_("Dataset name"), default="name"),
field.FieldBool("invert", descr=_("invert values")),
field.FieldFloat("mult", descr=_("Multiplication factor"), default=1),
field.FieldInt("skip", descr=_("Skip N lines"),
default=0, minval=0),
field.FieldCombo("subtract", items=("0", "1", "2"),
editable=False, default="0")
]
def doImport(self, params):
"""Actually import data
params is a ImportPluginParams object.
Return a list of datasetplugin.Dataset1D, datasetplugin.Dataset2D objects
"""
try:
f = params.openFileWithEncoding()
data = []
mult = params.field_results["mult"]
sub = float(params.field_results["subtract"])
if params.field_results["invert"]:
mult *= -1
for i in crange(params.field_results["skip"]):
f.readline()
for line in f:
data += [float(x)*mult-sub for x in line.split()]
return [datasetplugin.Dataset1D(params.field_results["name"], data),
datasetplugin.Constant("testconst", "42"),
datasetplugin.Function("testfunc(x)", "testconst*x**2")]
except Exception as e:
raise ImportPluginException(cstr(e))
class ImportPluginDateTime(ImportPlugin):
"""An example plugin for reading a set of iso date-times from a
file."""
name = "Example plugin for date/times"
author = "Jeremy Sanders"
description = _("Reads a list of ISO date times in a text file")
def __init__(self):
self.fields = [
field.FieldText("name", descr=_("Dataset name"), default="name"),
]
def doImport(self, params):
"""Actually import data
params is a ImportPluginParams object.
Return a list of datasetplugin.Dataset1D, datasetplugin.Dataset2D objects
"""
f = params.openFileWithEncoding()
data = []
for line in f:
data.append( datasetplugin.DatasetDateTime.
dateStringToFloat(line.strip()) )
return [ datasetplugin.DatasetDateTime(params.field_results["name"],
data) ]
#importpluginregistry.append( ImportPluginDateTime )
class QdpFile(object):
"""Handle reading of a Qdp file."""
def __init__(self, colnames):
self.colmodes = {}
self.skipmode = 'none'
self.retndata = []
# store read in data here
self.data = []
# index of max vector
self.dataindex = 1
self.colnames = colnames
# list of data groups for 2d objects
self.datagroup2d = []
# axis ranges for 2d objects
self.axis2d = [None, None]
def handleRead(self, p):
"""Handle read command."""
try:
mode = {'t': 'terr', 's': 'serr'}[p[1][:1]]
except (IndexError, KeyError):
raise ImportPluginException(_("read command takes terr/serr"))
try:
cols = [int(x) for x in p[2:]]
except ValueError:
raise ImportPluginException(_("read command takes list of columns separated by spaces"))
for c in cols:
self.colmodes[c] = mode
def handleSkip(self, p):
"""Handle skip command."""
try:
self.skipmode = {'o': 'off', 's': 'single', 'd': 'double'}[p[1][:1]]
except (IndexError, KeyError):
raise ImportPluginException(_("skip command takes single/double/off"))
def handleNO(self, p, lastp):
"""Handle no command, meaning no data."""
if self.skipmode == 'none':
self.addNans( len(p) )
elif self.skipmode == 'single':
self.pushData()
del self.data[:]
self.dataindex += 1
elif self.skipmode == 'double':
if lastp[0] == 'no':
self.pushData()
del self.data[:]
self.dataindex += 1
else:
self.addNans( len(p) )
def addNans(self, num):
"""Add a blank set of data to output."""
col = 0
ds = 0
while col < num or ds < len(self.data):
if ds >= len(self.data):
self.data.append([])
m = self.colmodes.get(ds+1)
if m == 'serr':
self.data[ds].append( (N.nan, N.nan) )
col += 2
elif m == 'terr':
self.data[ds].append( (N.nan, N.nan, N.nan) )
col += 3
else:
self.data[ds].append( N.nan )
col += 1
ds += 1
def pushData2D(self):
"""Handle 2D data groups."""
for num, r1, c1, r2, c2 in self.datagroup2d:
arr = []
for c in crange(c1-1,c2-1+1):
arr.append( self.data[c][r1-1:r2-1+1] )
# make data as "used"
self.data[c] = None
arr = N.array(arr)
if num-1 < len(self.colnames):
name = self.colnames[num-1]
else:
name = 'vec2d%i' % num
rangex = rangey = None
if self.axis2d[0] is not None:
minval, pixsize = self.axis2d[0]
rangex = (minval - pixsize*0.5,
minval+(arr.shape[1]-0.5)*pixsize )
if self.axis2d[1] is not None:
minval, pixsize = self.axis2d[1]
rangey = (minval - pixsize*0.5,
minval+(arr.shape[0]-0.5)*pixsize )
ds = datasetplugin.Dataset2D(name, data=arr,
rangex=rangex, rangey=rangey)
self.retndata.append(ds)
def pushData(self):
"""Add data to output array.
"""
for i in crange(len(self.data)):
if self.data[i] is None:
continue
# get dataset name
if i < len(self.colnames):
name = self.colnames[i]
else:
name = 'vec%i' % (i+1)
if self.skipmode == 'single' or self.skipmode == 'double':
name = name + '_' + str(self.dataindex)
# convert data
a = N.array(self.data[i])
if len(a.shape) == 1:
# no error bars
ds = datasetplugin.Dataset1D(name, data=a)
elif a.shape[1] == 2:
# serr
ds = datasetplugin.Dataset1D(name, data=a[:,0], serr=a[:,1])
elif a.shape[1] == 3:
# perr/nerr
p = N.where(a[:,1] < a[:,2], a[:,2], a[:,1])
n = N.where(a[:,1] < a[:,2], a[:,1], a[:,2])
ds = datasetplugin.Dataset1D(name, data=a[:,0], perr=p, nerr=n)
else:
raise RuntimeError
self.retndata.append(ds)
def handleDataGroup(self, p):
"""Handle data groups."""
if len(p) == 3:
# we don't support the renaming thing
pass
elif len(p) == 6:
# 2d data
try:
pint = [int(x) for x in p[1:]]
except ValueError:
raise ImportPluginException(_("invalid 2d datagroup command"))
self.datagroup2d.append(pint)
def handleAxis(self, p):
"""Axis command gives range of axes (used for 2d)."""
try:
minval, maxval = float(p[2]), float(p[3])
except ValueError:
raise ImportPluginException(_("invalid axis range"))
self.axis2d[ p[0][0] == 'y' ] = (minval, maxval)
def handleNum(self, p):
"""Handle set of numbers."""
nums = []
try:
for n in p:
if n.lower() == 'no':
nums.append(N.nan)
else:
nums.append(float(n))
except ValueError:
raise ImportPluginException(_("Cannot convert '%s' to numbers") %
(' '.join(p)))
col = 0
ds = 0
while col < len(nums):
if ds >= len(self.data):
self.data.append([])
m = self.colmodes.get(ds+1)
if m == 'serr':
self.data[ds].append( (nums[col], nums[col+1]) )
col += 2
elif m == 'terr':
self.data[ds].append( (nums[col], nums[col+1], nums[col+2]) )
col += 3
else:
self.data[ds].append( nums[col] )
col += 1
ds += 1
def importFile(self, fileobj, dirname):
"""Read data from file object.
dirname is the directory in which the file is located
"""
contline = None
lastp = []
for line in fileobj:
# strip comments
if line.find("!") >= 0:
line = line[:line.find("!")]
if line[:1] == '@':
# read another file
fname = os.path.join(dirname, line[1:].strip())
try:
newf = open(fname)
self.importFile(newf, dirname)
except EnvironmentError:
pass
continue
p = [x.lower() for x in line.split()]
if contline:
# add on previous continuation if existed
p = contline + p
contline = None
if len(p) > 0 and p[-1][-1] == '-':
# continuation
p[-1] = p[-1][:-1]
contline = p
continue
if len(p) == 0:
# nothing
continue
v0 = p[0]
if v0[0] in '0123456789-.':
self.handleNum(p)
elif v0 == 'no':
self.handleNO(p, lastp)
elif v0 == 'read':
self.handleRead(p)
elif v0[:2] == 'sk':
self.handleSkip(p)
elif v0[:2] == 'dg':
self.handleDataGroup(p)
elif v0[:1] == 'x' or v0[:2] == 'ya':
self.handleAxis(p)
else:
# skip everything else (for now)
pass
lastp = p
class ImportPluginQdp(ImportPlugin):
"""An example plugin for reading data from QDP files."""
name = "QDP import"
author = "Jeremy Sanders"
description = _("Reads datasets from QDP files")
file_extensions = set(['.qdp'])
def __init__(self):
self.fields = [
field.FieldTextMulti("names", descr=_("Vector name list "),
default=['']),
]
def doImport(self, params):
"""Actually import data
params is a ImportPluginParams object.
Return a list of datasetplugin.Dataset1D, datasetplugin.Dataset2D objects
"""
names = [x.strip() for x in params.field_results["names"]
if x.strip()]
f = params.openFileWithEncoding()
rqdp = QdpFile(names)
rqdp.importFile(f, os.path.dirname(params.filename))
rqdp.pushData2D()
rqdp.pushData()
f.close()
return rqdp.retndata
def cnvtImportNumpyArray(name, val, errorsin2d=True):
"""Convert a numpy array to plugin returns."""
try:
val.shape
except AttributeError:
raise ImportPluginException(_("Not the correct format file"))
try:
val + 0.
val = val.astype(N.float64)
except TypeError:
raise ImportPluginException(_("Unsupported array type"))
if val.ndim == 1:
return datasetplugin.Dataset1D(name, val)
elif val.ndim == 2:
if errorsin2d and val.shape[1] in (2, 3):
# return 1d array
if val.shape[1] == 2:
# use as symmetric errors
return datasetplugin.Dataset1D(name, val[:,0], serr=val[:,1])
else:
# asymmetric errors
# unclear on ordering here...
return datasetplugin.Dataset1D(name, val[:,0], perr=val[:,1],
nerr=val[:,2])
else:
return datasetplugin.Dataset2D(name, val)
else:
raise ImportPluginException(_("Unsupported dataset shape"))
class ImportPluginNpy(ImportPlugin):
"""For reading single datasets from NPY numpy saved files."""
name = "Numpy NPY import"
author = "Jeremy Sanders"
description = _("Reads a 1D/2D numeric dataset from a Numpy NPY file")
file_extensions = set(['.npy'])
def __init__(self):
self.fields = [
field.FieldText("name", descr=_("Dataset name"),
default=''),
field.FieldBool("errorsin2d",
descr=_("Treat 2 and 3 column 2D arrays as\n"
"data with error bars"),
default=True),
]
def getPreview(self, params):
"""Get data to show in a text box to show a preview.
params is a ImportPluginParams object.
Returns (text, okaytoimport)
"""
try:
retn = N.load(params.filename)
except Exception:
return _("Cannot read file"), False
try:
text = _('Array shape: %s\n') % str(retn.shape)
text += _('Array datatype: %s (%s)\n') % (retn.dtype.str,
str(retn.dtype))
text += str(retn)
return text, True
except AttributeError:
return _("Not an NPY file"), False
def doImport(self, params):
"""Actually import data.
"""
name = params.field_results["name"].strip()
if not name:
raise ImportPluginException(_("Please provide a name for the dataset"))
try:
retn = N.load(params.filename)
except Exception as e:
raise ImportPluginException(_("Error while reading file: %s") %
cstr(e))
return [ cnvtImportNumpyArray(
name, retn, errorsin2d=params.field_results["errorsin2d"]) ]
class ImportPluginNpz(ImportPlugin):
"""For reading single datasets from NPY numpy saved files."""
name = "Numpy NPZ import"
author = "Jeremy Sanders"
description = _("Reads datasets from a Numpy NPZ file.")
file_extensions = set(['.npz'])
def __init__(self):
self.fields = [
field.FieldBool("errorsin2d",
descr=_("Treat 2 and 3 column 2D arrays as\n"
"data with error bars"),
default=True),
]
def getPreview(self, params):
"""Get data to show in a text box to show a preview.
params is a ImportPluginParams object.
Returns (text, okaytoimport)
"""
try:
retn = N.load(params.filename)
except Exception:
return _("Cannot read file"), False
# npz files should define this attribute
try:
retn.files
except AttributeError:
return _("Not an NPZ file"), False
text = []
for f in sorted(retn.files):
a = retn[f]
text.append(_('Name: %s') % f)
text.append(_(' Shape: %s') % str(a.shape))
text.append(_(' Datatype: %s (%s)') % (a.dtype.str, str(a.dtype)))
text.append('')
return '\n'.join(text), True
def doImport(self, params):
"""Actually import data.
"""
try:
retn = N.load(params.filename)
except Exception as e:
raise ImportPluginException(_("Error while reading file: %s") %
cstr(e))
try:
retn.files
except AttributeError:
raise ImportPluginException(_("File is not in NPZ format"))
# convert each of the imported arrays
out = []
for f in sorted(retn.files):
out.append( cnvtImportNumpyArray(
f, retn[f], errorsin2d=params.field_results["errorsin2d"]) )
return out
class ImportPluginBinary(ImportPlugin):
name = "Binary import"
author = "Jeremy Sanders"
description = _("Reads numerical binary files.")
file_extensions = set(['.bin'])
def __init__(self):
self.fields = [
field.FieldText("name", descr=_("Dataset name"),
default=""),
field.FieldCombo("datatype", descr=_("Data type"),
items = ("float32", "float64",
"int8", "int16", "int32", "int64",
"uint8", "uint16", "uint32", "uint64"),
default="float64", editable=False),
field.FieldCombo("endian", descr=_("Endian (byte order)"),
items = ("little", "big"), editable=False),
field.FieldInt("offset", descr=_("Offset (bytes)"), default=0, minval=0),
field.FieldInt("length", descr=_("Length (values)"), default=-1)
]
def getNumpyDataType(self, params):
"""Convert params to numpy datatype."""
t = N.dtype(str(params.field_results["datatype"]))
return t.newbyteorder( {"little": "<", "big": ">"} [
params.field_results["endian"]] )
def getPreview(self, params):
"""Preview of data files."""
try:
f = open(params.filename, "rb")
data = f.read()
f.close()
except EnvironmentError as e:
return _("Cannot read file (%s)") % cstrerror(e), False
text = [_('File length: %i bytes') % len(data)]
def filtchr(c):
"""Filtered character to ascii range."""
if ord(c) <= 32 or ord(c) > 127:
return '.'
else:
return c
# do a hex dump (like in CP/M)
for i in crange(0, min(65536, len(data)), 16):
hdr = '%04X ' % i
subset = data[i:i+16]
hexdata = ('%02X '*len(subset)) % tuple([ord(x) for x in subset])
chrdata = ''.join([filtchr(c) for c in subset])
text.append(hdr+hexdata + ' ' + chrdata)
return '\n'.join(text), True
def doImport(self, params):
"""Import the data."""
name = params.field_results["name"].strip()
if not name:
raise ImportPluginException(_("Please provide a name for the dataset"))
try:
f = open(params.filename, "rb")
f.seek( params.field_results["offset"] )
retn = f.read()
f.close()
except EnvironmentError as e:
raise ImportPluginException(_("Error while reading file '%s'\n\n%s") %
(params.filename, cstrerror(e)))
try:
data = N.fromstring(retn, dtype=self.getNumpyDataType(params),
count=params.field_results["length"])
except ValueError as e:
raise ImportPluginException(_("Error converting data for file '%s'\n\n%s") %
(params.filename, cstr(e)))
data = data.astype(N.float64)
return [ datasetplugin.Dataset1D(name, data) ]
class ImportPluginGnuplot2D(ImportPlugin):
"""A Veusz plugin for reading data in Gnuplot 2D data format from a file."""
name = "Gnuplot 2D data import plugin"
author = "Joerg Meyer, j.meyer@chem.leidenuniv.nl"
description = "Reads data in Gnuplot 2D format from a text file."
file_extensions = set(['.data','.elbow'])
def __init__(self):
ImportPlugin.__init__(self)
self.fields = [
field.FieldText(
"name", descr="Dataset name", default="name"),
field.FieldFloat(
"subtract", descr="Offset to subtract", default=0.0),
field.FieldFloat(
"mult", descr="Multiplication factor", default=1),
]
def doImport(self, params):
"""Actually import data
params is a ImportPluginParams object.
Return a list of ImportDataset1D, ImportDataset2D objects
"""
sub = float(params.field_results["subtract"])
mult = params.field_results["mult"]
f = params.openFileWithEncoding()
data_gp = []
data_gp_block = []
for line in f:
fields = line.split()
if not fields:
if data_gp_block:
data_gp.append( data_gp_block )
data_gp_block = []
else: # ignore initial blank lines
continue
elif '#' in fields[0]: # ignore comment lines
continue
else:
x,y,z = map(float, fields[0:3])
data_gp_block.append( [x,y,(z-sub)*mult] )
if data_gp_block: # append last block if necessary
data_gp.append( data_gp_block )
data_gp_block = []
data = N.array(data_gp)
S = data.shape
data_for_sorting = data.reshape((S[0]*S[1],S[2]))
ind = N.lexsort( [data_for_sorting[:,0], data_for_sorting[:,1]] )
data_veusz = data_for_sorting[ind].reshape(S)[:,:,2]
rangex = (data[:,:,0].min(),data[:,:,0].max())
rangey = (data[:,:,1].min(),data[:,:,1].max())
return [
datasetplugin.Dataset2D(
params.field_results["name"],
data=data_veusz, rangex=rangex, rangey=rangey)
]
importpluginregistry += [
ImportPluginNpy,
ImportPluginNpz,
ImportPluginQdp,
ImportPluginBinary,
ImportPluginExample,
ImportPluginGnuplot2D,
]
| gpl-2.0 |
barzan/dbseer | middleware_old/dstat_for_server/plugins/dstat_battery.py | 4 | 1607 | ### Author: Dag Wieers <dag$wieers,com>
class dstat_plugin(dstat):
"""
Percentage of remaining battery power as reported by ACPI.
"""
def __init__(self):
self.name = 'battery'
self.type = 'p'
self.width = 4
self.scale = 34
def check(self):
if not os.path.exists('/proc/acpi/battery/'):
raise Exception, "No ACPI battery information found."
def vars(self):
ret = []
for battery in os.listdir('/proc/acpi/battery/'):
for line in dopen('/proc/acpi/battery/'+battery+'/state').readlines():
l = line.split()
if len(l) < 2: continue
if l[0] == 'present:' and l[1] == 'yes':
ret.append(battery)
ret.sort()
return ret
def nick(self):
return [name.lower() for name in self.vars]
def extract(self):
for battery in self.vars:
for line in dopen('/proc/acpi/battery/'+battery+'/info').readlines():
l = line.split()
if len(l) < 4: continue
if l[0] == 'last':
full = int(l[3])
break
for line in dopen('/proc/acpi/battery/'+battery+'/state').readlines():
l = line.split()
if len(l) < 3: continue
if l[0] == 'remaining':
current = int(l[2])
break
if current:
self.val[battery] = current * 100.0 / full
else:
self.val[battery] = -1
# vim:ts=4:sw=4:et
| apache-2.0 |
marcellodesales/svnedge-console | ext/windows/pkg-toolkit/pkg/vendor-packages/pkg/client/transport/engine.py | 4 | 31312 | #!/usr/bin/python
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
#
# Copyright 2009 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
import errno
import httplib
import os
import pycurl
import urlparse
# Need to ignore SIGPIPE if using pycurl in NOSIGNAL mode.
try:
import signal
if hasattr(signal, "SIGPIPE"):
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
except ImportError:
pass
import pkg.client.api_errors as api_errors
import pkg.client.transport.exception as tx
import pkg.client.transport.fileobj as fileobj
from collections import deque
from pkg.client import global_settings
class TransportEngine(object):
"""This is an abstract class. It shouldn't implement any
of the methods that it contains. Leave that to transport-specific
implementations."""
class CurlTransportEngine(TransportEngine):
"""Concrete class of TransportEngine for libcurl transport."""
def __init__(self, transport, max_conn=10):
# Backpointer to transport object
self.__xport = transport
# Curl handles
self.__mhandle = pycurl.CurlMulti()
self.__chandles = []
self.__active_handles = 0
self.__max_handles = max_conn
# Request queue
self.__req_q = deque()
# List of failures
self.__failures = []
# Set default file buffer size at 128k, callers override
# this setting after looking at VFS block size.
self.__file_bufsz = 131072
# Header bits and pieces
self.__user_agent = None
self.__common_header = {}
# Set options on multi-handle
self.__mhandle.setopt(pycurl.M_PIPELINING, 1)
# initialize easy handles
for i in range(self.__max_handles):
eh = pycurl.Curl()
eh.url = None
eh.repourl = None
eh.fobj = None
eh.filepath = None
eh.success = False
eh.fileprog = None
eh.filetime = -1
self.__chandles.append(eh)
# copy handles into handle freelist
self.__freehandles = self.__chandles[:]
def __call_perform(self):
"""An internal method that invokes the multi-handle's
perform method."""
while 1:
ret, active_handles = self.__mhandle.perform()
if ret != pycurl.E_CALL_MULTI_PERFORM:
break
self.__active_handles = active_handles
return ret
def add_url(self, url, filepath=None, writefunc=None, header=None,
progtrack=None, sslcert=None, sslkey=None, repourl=None):
"""Add a URL to the transport engine. Caller must supply
either a filepath where the file should be downloaded,
or a callback to a function that will peform the write.
It may also optionally supply header information
in a dictionary. If the caller has a ProgressTracker,
supply the object in the progtrack argument."""
t = TransportRequest(url, filepath=filepath,
writefunc=writefunc, header=header, progtrack=progtrack,
sslcert=sslcert, sslkey=sslkey, repourl=repourl)
self.__req_q.appendleft(t)
def __cleanup_requests(self):
"""Cleanup handles that have finished their request.
Return the handles to the freelist. Generate any
relevant error information."""
count, good, bad = self.__mhandle.info_read()
failures = self.__failures
done_handles = []
ex_to_raise = None
for h, en, em in bad:
# Get statistics for each handle.
repostats = self.__xport.stats[h.repourl]
repostats.record_tx()
bytes = h.getinfo(pycurl.SIZE_DOWNLOAD)
seconds = h.getinfo(pycurl.TOTAL_TIME)
repostats.record_progress(bytes, seconds)
httpcode = h.getinfo(pycurl.RESPONSE_CODE)
url = h.url
urlstem = h.repourl
proto = urlparse.urlsplit(url)[0]
# All of these are errors
repostats.record_error()
# If we were cancelled, raise an API error.
# Otherwise fall through to transport's exception
# generation.
if en == pycurl.E_ABORTED_BY_CALLBACK:
ex = None
ex_to_raise = api_errors.CanceledException
elif en == pycurl.E_HTTP_RETURNED_ERROR:
ex = tx.TransportProtoError(proto, httpcode,
url, repourl=urlstem)
else:
ex = tx.TransportFrameworkError(en, url, em,
repourl=urlstem)
if ex and ex.retryable:
failures.append(ex)
elif ex and not ex_to_raise:
ex_to_raise = ex
done_handles.append(h)
for h in good:
# Get statistics for each handle.
repostats = self.__xport.stats[h.repourl]
repostats.record_tx()
bytes = h.getinfo(pycurl.SIZE_DOWNLOAD)
seconds = h.getinfo(pycurl.TOTAL_TIME)
h.filetime = h.getinfo(pycurl.INFO_FILETIME)
repostats.record_progress(bytes, seconds)
httpcode = h.getinfo(pycurl.RESPONSE_CODE)
url = h.url
urlstem = h.repourl
proto = urlparse.urlsplit(url)[0]
if httpcode == httplib.OK:
h.success = True
else:
ex = tx.TransportProtoError(proto,
httpcode, url, repourl=urlstem)
# If code >= 400, record this as an error.
# Handlers above the engine get to decide
# for 200/300 codes that aren't OK
if httpcode >= 400:
repostats.record_error()
# If code == 0, libcurl failed to read
# any HTTP status. Response is almost
# certainly corrupted.
elif httpcode == 0:
reason = "Invalid HTTP status code " \
"from server"
ex = tx.TransportProtoError(proto,
url=url, reason=reason,
repourl=urlstem)
ex.retryable = True
# Stash retryable failures, arrange
# to raise first fatal error after
# cleanup.
if ex.retryable:
failures.append(ex)
elif not ex_to_raise:
ex_to_raise = ex
done_handles.append(h)
# Call to remove_handle must be separate from info_read()
for h in done_handles:
self.__mhandle.remove_handle(h)
self.__teardown_handle(h)
self.__freehandles.append(h)
self.__failures = failures
if ex_to_raise:
raise ex_to_raise
def check_status(self, urllist=None):
"""Return information about retryable failures that occured
during the request.
This is a list of transport exceptions. Caller
may raise these, or process them for failure information.
Urllist is an optional argument to return only failures
for a specific URLs. Not all callers of check status
want to claim the error state of all pending transactions.
Transient errors are part of standard control flow.
The caller will look at these and decide whether
to throw them or not. Permanent failures are raised
by the transport engine as soon as they occur."""
# if list not specified, return all failures
if not urllist:
rf = self.__failures
self.__failures = []
return rf
# otherwise, look for failures that match just the URLs
# in urllist.
rf = []
for tf in self.__failures:
if hasattr(tf, "url") and tf.url in urllist:
rf.append(tf)
# remove failues in separate pass, or else for loop gets
# confused.
for f in rf:
self.__failures.remove(f)
return rf
def get_url(self, url, header=None, sslcert=None, sslkey=None,
repourl=None, compressible=False):
"""Invoke the engine to retrieve a single URL. Callers
wishing to obtain multiple URLs at once should use
addUrl() and run().
getUrl will return a read-only file object that allows access
to the URL's data."""
fobj = fileobj.StreamingFileObj(url, self)
t = TransportRequest(url, writefunc=fobj.get_write_func(),
hdrfunc=fobj.get_header_func(), header=header,
sslcert=sslcert, sslkey=sslkey, repourl=repourl,
compressible=compressible)
self.__req_q.appendleft(t)
return fobj
def get_url_header(self, url, header=None, sslcert=None, sslkey=None,
repourl=None):
"""Invoke the engine to retrieve a single URL's headers.
getUrlHeader will return a read-only file object that
contains no data."""
fobj = fileobj.StreamingFileObj(url, self)
t = TransportRequest(url, writefunc=fobj.get_write_func(),
hdrfunc=fobj.get_header_func(), header=header,
httpmethod="HEAD", sslcert=sslcert, sslkey=sslkey,
repourl=repourl)
self.__req_q.appendleft(t)
return fobj
@property
def pending(self):
"""Returns true if the engine still has outstanding
work to perform, false otherwise."""
return len(self.__req_q) > 0 or self.__active_handles > 0
def run(self):
"""Run the transport engine. This polls the underlying
framework to complete any asynchronous I/O. Synchronous
operations should have completed when startRequest
was invoked."""
if not self.pending:
return
if self.__active_handles > 0:
# timeout returned in milliseconds
timeout = self.__mhandle.timeout()
if timeout == -1:
# Pick our own timeout.
timeout = 1.0
elif timeout > 0:
# Timeout of 0 means skip call
# to select.
#
# Convert from milliseconds to seconds.
timeout = timeout / 1000.0
if timeout:
self.__mhandle.select(timeout)
while self.__freehandles and self.__req_q:
t = self.__req_q.pop()
eh = self.__freehandles.pop(-1)
self.__setup_handle(eh, t)
self.__mhandle.add_handle(eh)
self.__call_perform()
self.__cleanup_requests()
def remove_request(self, url):
"""In order to remove a request, it may be necessary
to walk all of the items in the request queue, all of the
currently active handles, and the list of any transient
failures. This is expensive, so only remove a request
if absolutely necessary."""
for h in self.__chandles:
if h.url == url and h not in self.__freehandles:
self.__mhandle.remove_handle(h)
self.__teardown_handle(h)
return
for i, t in enumerate(self.__req_q):
if t.url == url:
del self.__req_q[i]
return
for ex in self.__failures:
if ex.url == url:
self.__failures.remove(ex)
return
def reset(self):
"""Reset the state of the transport engine. Do this
before performing another type of request."""
for c in self.__chandles:
if c not in self.__freehandles:
self.__mhandle.remove_handle(c)
self.__teardown_handle(c)
self.__active_handles = 0
self.__freehandles = self.__chandles[:]
self.__req_q = deque()
def send_data(self, url, data, header=None, sslcert=None, sslkey=None,
repourl=None):
"""Invoke the engine to retrieve a single URL.
This routine sends the data in data, and returns the
server's response.
Callers wishing to obtain multiple URLs at once should use
addUrl() and run().
sendData will return a read-only file object that allows access
to the server's response.."""
fobj = fileobj.StreamingFileObj(url, self)
t = TransportRequest(url, writefunc=fobj.get_write_func(),
hdrfunc=fobj.get_header_func(), header=header, data=data,
httpmethod="POST", sslcert=sslcert, sslkey=sslkey,
repourl=repourl)
self.__req_q.appendleft(t)
return fobj
def set_file_bufsz(self, size):
"""If the downloaded files are being written out by
the file() mechanism, and not written using a callback,
the I/O is buffered. Set the buffer size using
this function. If it's not set, a default of 131072 (128k)
is used."""
if size <= 0:
self.__file_bufsz = 8192
return
self.__file_bufsz = size
def set_header(self, hdrdict=None):
"""Supply a dictionary of name/value pairs in hdrdict.
These will be included on all requests issued by the transport
engine. To append a specific header to a certain request,
supply a dictionary to the header argument of addUrl."""
if not hdrdict:
self.__common_header = {}
return
self.__common_header = hdrdict
def set_user_agent(self, ua_str):
"""Supply a string str and the transport engine will
use this string as its User-Agent header. This is
a header that will be common to all transport requests."""
self.__user_agent = ua_str
def __setup_handle(self, hdl, treq):
"""Setup the curl easy handle, hdl, with the parameters
specified in the TransportRequest treq. If global
parameters are set, apply these to the handle as well."""
# Set nosignal, so timeouts don't crash client
hdl.setopt(pycurl.NOSIGNAL, 1)
# Set connect timeout. Its value is defined in global_settings.
hdl.setopt(pycurl.CONNECTTIMEOUT,
global_settings.PKG_CLIENT_CONNECT_TIMEOUT)
# Set lowspeed limit and timeout. Clients that are too
# slow or have hung after specified amount of time will
# abort the connection.
hdl.setopt(pycurl.LOW_SPEED_LIMIT,
global_settings.pkg_client_lowspeed_limit)
hdl.setopt(pycurl.LOW_SPEED_TIME,
global_settings.PKG_CLIENT_LOWSPEED_TIMEOUT)
# Follow redirects
hdl.setopt(pycurl.FOLLOWLOCATION, True)
# Make sure that we don't use a proxy if the destination
# is localhost.
hdl.setopt(pycurl.NOPROXY, "localhost")
# Set user agent, if client has defined it
if self.__user_agent:
hdl.setopt(pycurl.USERAGENT, self.__user_agent)
# Take header dictionaries and convert them into lists
# of header strings.
if len(self.__common_header) > 0 or \
(treq.header and len(treq.header) > 0):
headerlist = []
# Headers common to all requests
for k, v in self.__common_header.iteritems():
headerstr = "%s: %s" % (k, v)
headerlist.append(headerstr)
# Headers specific to this request
if treq.header:
for k, v in treq.header.iteritems():
headerstr = "%s: %s" % (k, v)
headerlist.append(headerstr)
hdl.setopt(pycurl.HTTPHEADER, headerlist)
# Set request url. Also set attribute on handle.
hdl.setopt(pycurl.URL, treq.url)
hdl.url = treq.url
# The repourl is the url stem that identifies the
# repository. This is useful to have around for coalescing
# error output, and statistics reporting.
hdl.repourl = treq.repourl
if treq.filepath:
try:
hdl.fobj = open(treq.filepath, "wb+",
self.__file_bufsz)
except EnvironmentError, e:
if e.errno == errno.EACCES:
raise api_errors.PermissionsException(
e.filename)
# Raise OperationError if it's not EACCES
raise tx.TransportOperationError(
"Unable to open file: %s" % e)
hdl.setopt(pycurl.WRITEDATA, hdl.fobj)
# Request filetime, if endpoint knows it.
hdl.setopt(pycurl.OPT_FILETIME, True)
hdl.filepath = treq.filepath
elif treq.writefunc:
hdl.setopt(pycurl.WRITEFUNCTION, treq.writefunc)
hdl.setopt(pycurl.FAILONERROR, True)
hdl.filepath = None
hdl.fobj = None
else:
raise tx.TransportOperationError("Transport invocation"
" for URL %s did not specify filepath or write"
" function." % treq.url)
if treq.progtrack:
hdl.setopt(pycurl.NOPROGRESS, 0)
hdl.fileprog = FileProgress(treq.progtrack)
hdl.setopt(pycurl.PROGRESSFUNCTION,
hdl.fileprog.progress_callback)
if treq.compressible:
hdl.setopt(pycurl.ENCODING, "")
if treq.hdrfunc:
hdl.setopt(pycurl.HEADERFUNCTION, treq.hdrfunc)
if treq.httpmethod == "HEAD":
hdl.setopt(pycurl.NOBODY, True)
elif treq.httpmethod == "POST":
hdl.setopt(pycurl.POST, True)
hdl.setopt(pycurl.POSTFIELDS, treq.data)
else:
# Default to GET
hdl.setopt(pycurl.HTTPGET, True)
# Set up SSL options
if treq.sslcert:
hdl.setopt(pycurl.SSLCERT, treq.sslcert)
if treq.sslkey:
hdl.setopt(pycurl.SSLKEY, treq.sslkey)
# Options that apply when SSL is enabled
if treq.sslcert or treq.sslkey:
# Verify that peer's CN matches CN on certificate
hdl.setopt(pycurl.SSL_VERIFYHOST, 2)
cadir = self.__xport.get_ca_dir()
if cadir:
hdl.setopt(pycurl.SSL_VERIFYPEER, 1)
hdl.setopt(pycurl.CAPATH, cadir)
hdl.unsetopt(pycurl.CAINFO)
else:
hdl.setopt(pycurl.SSL_VERIFYPEER, 0)
def __shutdown(self):
"""Shutdown the transport engine, perform cleanup."""
self.reset()
for c in self.__chandles:
c.close()
self.__chandles = None
self.__freehandles = None
self.__mhandle.close()
self.__mhandle = None
@staticmethod
def __teardown_handle(hdl):
"""Cleanup any state that we've associated with this handle.
After a handle has been torn down, it should still be valid
for use, but should have no previous state. To remove
handles from use completely, use __shutdown."""
hdl.reset()
if hdl.fobj:
hdl.fobj.close()
hdl.fobj = None
if not hdl.success:
if hdl.fileprog:
hdl.fileprog.abort()
try:
os.remove(hdl.filepath)
except EnvironmentError, e:
if e.errno != errno.ENOENT:
raise \
tx.TransportOperationError(
"Unable to remove file: %s"
% e)
else:
if hdl.fileprog:
filesz = os.stat(hdl.filepath).st_size
hdl.fileprog.commit(filesz)
if hdl.filepath and hdl.filetime > -1:
# Set atime/mtime, if we were able to
# figure it out. File action will
# override this at install time, if the
# action has a timestamp property.
ft = hdl.filetime
os.utime(hdl.filepath, (ft, ft))
hdl.url = None
hdl.repourl = None
hdl.success = False
hdl.filepath = None
hdl.fileprog = None
hdl.filetime = -1
class FileProgress(object):
"""This class bridges the interfaces between a ProgressTracker
object and the progress callback that's provided by Pycurl.
Since progress callbacks are per curl handle, and handles aren't
guaranteed to succeed, this object watches a handle's progress
and updates the tracker accordingly. If the handle fails,
it will correctly remove the bytes from the file. The curl
callback reports bytes even when it doesn't make progress.
It's necessary to keep additonal state here, since the client's
ProgressTracker has global counts of the bytes. If we're
unable to keep a per-file count, the numbers will get
lost quickly."""
def __init__(self, progtrack):
self.progtrack = progtrack
self.dltotal = 0
self.dlcurrent = 0
self.completed = False
def abort(self):
"""Download failed. Remove the amount of bytes downloaded
by this file from the ProgressTracker."""
self.progtrack.download_add_progress(0, -self.dlcurrent)
self.completed = True
def commit(self, size):
"""Indicate that this download has succeeded. The size
argument is the total size that we received. Compare this
value against the dlcurrent. If it's out of sync, which
can happen if the underlying framework swaps our request
across connections, adjust the progress tracker by the
amount we're off."""
adjustment = int(size - self.dlcurrent)
self.progtrack.download_add_progress(1, adjustment)
self.completed = True
def progress_callback(self, dltot, dlcur, ultot, ulcur):
"""Called by pycurl/libcurl framework to update
progress tracking."""
if hasattr(self.progtrack, "check_cancelation") and \
self.progtrack.check_cancelation():
return -1
if self.completed:
return 0
if self.dltotal != dltot:
self.dltotal = dltot
new_progress = int(dlcur - self.dlcurrent)
if new_progress > 0:
self.dlcurrent += new_progress
self.progtrack.download_add_progress(0, new_progress)
return 0
class TransportRequest(object):
"""A class that contains per-request information for the underlying
transport engines. This is used to set per-request options that
are used either by the framework, the transport, or both."""
def __init__(self, url, filepath=None, writefunc=None,
hdrfunc=None, header=None, data=None, httpmethod="GET",
progtrack=None, sslcert=None, sslkey=None, repourl=None,
compressible=False):
"""Create a TransportRequest with the following parameters:
url - The url that the transport engine should retrieve
filepath - If defined, the transport engine will download the
file to this path. If not defined, the caller should
supply a write function.
writefunc - A function, supplied instead of filepath, that
reads the bytes supplied by the transport engine and writes
them somewhere for processing. This is a callback.
hdrfunc - A callback for examining the contents of header
data in a response to a transport request.
header - A dictionary of key/value pairs to be included
in the request's header.
compressible - A boolean value that indicates whether
the content that is requested is a candidate for transport
level compression.
data - If the request is sending a data payload, include
the data in this argument.
httpmethod - If the request is a HTTP/HTTPS request,
this can override the default HTTP method of GET.
progtrack - If the transport wants the engine to update
the progress of the download, supply a ProgressTracker
object in this argument.
repouri - This is the URL stem that identifies the repo.
It's a subset of url. It's also used by the stats system.
sslcert - If the request is using SSL, HTTPS for example,
provide a path to the SSL certificate here.
sslkey - If the request is using SSL, liks HTTPS for example,
provide a path to the SSL key here."""
self.url = url
self.filepath = filepath
self.writefunc = writefunc
self.hdrfunc = hdrfunc
self.header = header
self.data = data
self.httpmethod = httpmethod
self.progtrack = progtrack
self.repourl = repourl
self.sslcert = sslcert
self.sslkey = sslkey
self.compressible = compressible
| agpl-3.0 |
amiyapatanaik/tensorflow-serving-docker-image | predict_client/client.py | 1 | 2208 | import os
import tensorflow as tf
import grpc
import logging
from grpc import RpcError
from predict_pb2 import PredictRequest
from prediction_service_pb2 import PredictionServiceStub
logger = logging.getLogger(__name__)
class PredictClient:
def __init__(self, localhost, envhost, model_name, model_version, num_scores=0):
if envhost and envhost in os.environ:
self.host = os.environ[envhost]
else:
logger.warning('Model host not in env variable')
self.host = localhost
self.model_name = model_name
self.model_version = model_version
self.num_scores = num_scores
def predict(self, request_data, request_timeout=10):
logger.info('Sending request to tfserving model')
logger.info('Model name: ' + str(self.model_name))
logger.info('Model version: ' + str(self.model_version))
logger.info('Host: ' + str(self.host))
tensor_shape = request_data.shape
if self.model_name == 'incv4' or self.model_name == 'res152':
features_tensor_proto = tf.contrib.util.make_tensor_proto(request_data, shape=tensor_shape)
else:
features_tensor_proto = tf.contrib.util.make_tensor_proto(request_data,
dtype=tf.float32, shape=tensor_shape)
# Create gRPC client and request
channel = grpc.insecure_channel(self.host)
stub = PredictionServiceStub(channel)
request = PredictRequest()
request.model_spec.name = self.model_name
if self.model_version > 0:
request.model_spec.version.value = self.model_version
request.inputs['inputs'].CopyFrom(features_tensor_proto)
try:
result = stub.Predict(request, timeout=request_timeout)
logger.debug('Got scores with len: ' + str(len(list(result.outputs['scores'].float_val))))
return list(result.outputs['scores'].float_val)
except RpcError as e:
logger.error(e)
logger.warning('Prediciton failed. Returning empty predictions of length: ' + str(self.num_scores))
return [0] * self.num_scores | gpl-3.0 |
jdavidrcamacho/Tests_GP | Gedi-0.2/Gedi/kernel_mcmc.py | 1 | 10414 | # -*- coding: utf-8 -*-
import kernel as kl
import kernel_likelihood as lk
import numpy as np
##### markov chain monte carlo #####
def MCMC(kernel,x,y,yerr,parameters,runs=50000,burns=20000):
"""
MCMC() perform the markov chain monte carlo to find the optimal parameters
of a given kernel.
The algorithm needs improvements as it is very inefficient.
Parameters
kernel = kernel in use
x = range of values of the independent variable (usually time)
y = range of values of te dependent variable (the measurments)
yerr = error in the measurments
parameters = the interval of the kernel parameters (check the Tests.py
understand it better)
runs = the number of times the mcmc runs, 50000 by definition, its a lot but
this version of the mcmc its still very inefficient, I hope to release a
better one in the future
"""
#to not loose que original kernel
original_kernel=kernel
initial_params= [0]*len(parameters)
for i, e in enumerate(parameters):
initial_params[i]=np.random.uniform(parameters[i][0],parameters[i][1])
first_kernel=new_kernel(original_kernel,initial_params)
first_likelihood=lk.likelihood(first_kernel,x,y,yerr)
print first_kernel,first_likelihood
i=0
#a better way to define the step is needed
step=5e-3
#to save the evolution of the log likelihood
running_logs=[]
#to save the evolution of the parameters
params_number=len(parameters)
params_list = [[] for _ in range(params_number)]
#lets run the mcmc
while i<runs:
u=np.random.uniform(0,1)
#lets make new parameters
# guess_params=[np.abs(n+(step)*np.random.randn()) for n in initial_params]
guess_params=[n+(step)*np.random.randn() for n in initial_params]
#limits of the variation of the parameters
for j, e in enumerate(guess_params):
if guess_params[j]<parameters[j][0]:
guess_params[j]=parameters[j][0]
if guess_params[j]>parameters[j][1]:
guess_params[j]=parameters[j][1]
#lets see if we keep the new parameters or not
second_kernel=new_kernel(original_kernel,guess_params)
second_likelihood=lk.likelihood(second_kernel,x,y,yerr)
for j, e in enumerate(guess_params):
prior=np.exp(first_likelihood)*initial_params[j]
posterior=np.exp(second_likelihood)*guess_params[j]
if prior<1e-300:
ratio=1
initial_params[j]=guess_params[j]
else:
ratio = posterior/prior
if u<np.minimum(1,ratio):
initial_params[j]=guess_params[j]
else:
initial_params[j]=initial_params[j]
#separation of the burned data and the final data
if i<burns:
pass
else:
params_list[j].append(initial_params[j])
#lets define the new kernel
first_kernel=new_kernel(original_kernel,initial_params)
first_likelihood=lk.likelihood(first_kernel,x,y,yerr)
if i<burns:
pass
else:
running_logs.append(first_likelihood)
i+=1
#final kernel and log likelihood
final_kernel=new_kernel(original_kernel,initial_params)
final_likelihood=lk.likelihood(final_kernel,x,y,yerr)
return [final_kernel,final_likelihood,running_logs,params_list]
##### markov chain monte carlo #####
def MCMC2(kernel,x,y,yerr,parameters,runs=50000,burns=20000):
"""
MCMC2() perform another the markov chain monte carlo to find the
optimal parameters of a given kernel. This one is still begin tested!
Parameters
kernel = kernel in use
x = range of values of the independent variable (usually time)
y = range of values of te dependent variable (the measurments)
yerr = error in the measurments
parameters = the interval of the kernel parameters (check the Tests.py
understand it better)
runs = the number of times the mcmc runs, 50000 by definition, its a lot but
this version of the mcmc its still very inefficient, I hope to release a
better one in the future
"""
#to not loose que original kernel
original_kernel=kernel
initial_params= [0]*len(parameters)
for i, e in enumerate(parameters):
initial_params[i]=np.random.uniform(parameters[i][0],parameters[i][1])
first_kernel=new_kernel(original_kernel,initial_params)
first_likelihood=lk.likelihood(first_kernel,x,y,yerr)
print first_kernel,first_likelihood
i=0
step=5e-3 #a better way to define the step is needed
factor=0.3
running_logs=[] #to save the evolution of the log likelihood
params_number = len(parameters) #to save the evolution of the parameters
params_list = [[] for _ in range(params_number)]
aux_list = [[] for _ in range(params_number)]
step_list = [step for n in params_list]
factor_list= [factor for n in params_list]
#lets run the mcmc
accepted_list=[0 for n in step_list]
rejected_list=[0 for n in step_list]
while i<runs:
u=np.random.uniform(0,1)
#lets make new parameters
guess_params=[n*(m*np.random.randn()) for \
n,m in zip(factor_list,step_list)]
guess_params=[n+m for n,m in zip(initial_params,guess_params)]
#limits of the variation of the parameters
for j, e in enumerate(guess_params):
if guess_params[j]<parameters[j][0]:
guess_params[j]=parameters[j][0]+1e-10
if guess_params[j]>parameters[j][1]:
#pass
guess_params[j]=parameters[j][1]-1e-10
#lets see if we keep the new parameters or not
second_kernel=new_kernel(original_kernel,guess_params)
second_likelihood=lk.likelihood(second_kernel,x,y,yerr)
for j, e in enumerate(guess_params):
prior=np.exp(first_likelihood)*initial_params[j]
posterior=np.exp(second_likelihood)*guess_params[j]
if prior<1e-300:
ratio=1
initial_params[j]=initial_params[j]
rejected_list[j]=rejected_list[j]+1
else:
ratio = posterior/prior
if u<np.minimum(1,ratio):
initial_params[j]=guess_params[j]
accepted_list[j]=accepted_list[j]+1
else:
initial_params[j]=initial_params[j]
rejected_list[j]=rejected_list[j]+1
aux_list[j].append(initial_params[j])
#separation of the burned data and the final data
if i<burns:
if i>0 and i%500==0:
accounting=accepted_list[j]/float(i)
if accounting<0.20 or accounting>0.30:
factor_list[j]=factor_list[j]*accounting*4
else:
factor_list[j]=factor_list[j]*10
else:
pass
else:
params_list[j].append(initial_params[j])
step_list[j]=np.std(aux_list[j][len(aux_list[j])/2 :])
#lets define the new kernel
first_kernel=new_kernel(original_kernel,initial_params)
first_likelihood=lk.likelihood(first_kernel,x,y,yerr)
if i<burns:
#pass
if i>0 and i%500==0:
accounting=accepted_list[j]/float(i)
if accounting<0.20 or accounting>0.3:
factor_list[j]=factor_list[j]*accounting*4
else:
factor_list[j]=factor_list[j]*10
else:
pass
else:
running_logs.append(first_likelihood)
i+=1
#final kernel and log likelihood
final_kernel=new_kernel(original_kernel,initial_params)
final_likelihood=lk.likelihood(final_kernel,x,y,yerr)
return [final_kernel,final_likelihood,running_logs,params_list]
##### auxiliary calculations #####
def new_kernel(original_kernel,b): #to update the kernels
"""
new_kernel() updates the parameters of the kernels as the mcmc advances
Parameters
kernelFIRST = original kernel in use
b = new parameters or new hyperparameters if you prefer using that denomination
"""
if isinstance(original_kernel,kl.ExpSquared):
return kl.ExpSquared(b[0],b[1])
elif isinstance(original_kernel,kl.ExpSineSquared):
return kl.ExpSineSquared(b[0],b[1],b[2])
elif isinstance(original_kernel,kl.RatQuadratic):
return kl.RatQuadratic(b[0],b[1],b[2])
elif isinstance(original_kernel,kl.Exponential):
return kl.Exponential(b[0],b[1])
elif isinstance(original_kernel,kl.Matern32):
return kl.Matern_32(b[0],b[1])
elif isinstance(original_kernel,kl.Matern52):
return kl.Matern_52(b[0],b[1])
elif isinstance(original_kernel,kl.QuasiPeriodic):
return kl.QuasiPeriodic(b[0],b[1],b[2],b[3])
elif isinstance(original_kernel,kl.WhiteNoise):
return kl.WhiteNoise(b[0])
elif isinstance(original_kernel,kl.Sum):
k1_params=[]
for i, e in enumerate(original_kernel.k1.pars):
k1_params.append(b[i])
k2_params=[]
for j, e in enumerate(original_kernel.k2.pars):
k2_params.append(b[len(original_kernel.k1.pars)+j])
new_k1=new_kernel(original_kernel.k1,k1_params)
new_k2=new_kernel(original_kernel.k2,k2_params)
return new_k1+new_k2
elif isinstance(original_kernel,kl.Product):
k1_params=[]
for i, e in enumerate(original_kernel.k1.pars):
k1_params.append(b[i])
k2_params=[]
for j, e in enumerate(original_kernel.k2.pars):
k2_params.append(b[len(original_kernel.k1.pars)+j])
new_k1=new_kernel(original_kernel.k1,k1_params)
new_k2=new_kernel(original_kernel.k2,k2_params)
return new_k1*new_k2
else:
print 'Something is missing'
##### END | mit |
jphilipsen05/zulip | analytics/management/commands/client_activity.py | 6 | 3069 | from __future__ import absolute_import
from __future__ import print_function
from typing import Any
from argparse import ArgumentParser
from django.core.management.base import BaseCommand
from django.db.models import Count, QuerySet
from django.utils.timezone import now as timezone_now
from zerver.models import UserActivity, UserProfile, Realm, \
get_realm, get_user_profile_by_email
import datetime
class Command(BaseCommand):
help = """Report rough client activity globally, for a realm, or for a user
Usage examples:
./manage.py client_activity
./manage.py client_activity zulip
./manage.py client_activity hamlet@zulip.com"""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('arg', metavar='<arg>', type=str, nargs='?', default=None,
help="realm or user to estimate client activity for")
def compute_activity(self, user_activity_objects):
# type: (QuerySet) -> None
# Report data from the past week.
#
# This is a rough report of client activity because we inconsistently
# register activity from various clients; think of it as telling you
# approximately how many people from a group have used a particular
# client recently. For example, this might be useful to get a sense of
# how popular different versions of a desktop client are.
#
# Importantly, this does NOT tell you anything about the relative
# volumes of requests from clients.
threshold = timezone_now() - datetime.timedelta(days=7)
client_counts = user_activity_objects.filter(
last_visit__gt=threshold).values("client__name").annotate(
count=Count('client__name'))
total = 0
counts = []
for client_type in client_counts:
count = client_type["count"]
client = client_type["client__name"]
total += count
counts.append((count, client))
counts.sort()
for count in counts:
print("%25s %15d" % (count[1], count[0]))
print("Total:", total)
def handle(self, *args, **options):
# type: (*Any, **str) -> None
if options['arg'] is None:
# Report global activity.
self.compute_activity(UserActivity.objects.all())
else:
arg = options['arg']
try:
# Report activity for a user.
user_profile = get_user_profile_by_email(arg)
self.compute_activity(UserActivity.objects.filter(
user_profile=user_profile))
except UserProfile.DoesNotExist:
try:
# Report activity for a realm.
realm = get_realm(arg)
self.compute_activity(UserActivity.objects.filter(
user_profile__realm=realm))
except Realm.DoesNotExist:
print("Unknown user or realm %s" % (arg,))
exit(1)
| apache-2.0 |
YueLinHo/Subversion | subversion/bindings/ctypes-python/csvn/repos.py | 5 | 24973 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import csvn.core as svn
from csvn.core import *
import csvn.types as _types
from csvn.ext.callback_receiver import CallbackReceiver
from txn import Txn
from auth import User
import os
class RepositoryURI(object):
"""A URI to an object in a Subversion repository, stored internally in
encoded format.
When you supply URIs to a RemoteClient, or a transaction"""
def __init__(self, uri, encoded=True):
"""Create a RepositoryURI object from a URI. If encoded=True, the
input string may be URI-encoded."""
pool = Pool()
if not encoded:
uri = svn_path_uri_encode(uri, pool)
self._as_parameter_ = str(svn_path_canonicalize(uri, pool))
def join(self, uri):
"""Join this URI and the specified relative URI,
adding a slash if necessary."""
pool = Pool()
return RepositoryURI(svn_path_join(self, uri, pool))
def dirname(self):
"""Get the parent directory of this URI"""
pool = Pool()
return RepositoryURI(svn_path_dirname(self, pool))
def relative_path(self, uri, encoded=True):
"""Convert the supplied URI to a decoded path, relative to me."""
pool = Pool()
if not encoded:
uri = svn_path_uri_encode(uri, pool)
child_path = svn_path_is_child(self, uri, pool) or uri
return str(svn_path_uri_decode(child_path, pool))
def longest_ancestor(self, uri):
"""Get the longest ancestor of this URI and another URI"""
pool = Pool()
return RepositoryURI(svn_path_get_longest_ancestor(self, uri, pool))
def __str__(self):
"""Return the URI as a string"""
return self._as_parameter_
class RemoteRepository(object):
"""This class represents a connection from the client to a remote
Subversion repository."""
# The interface corresponds roughly to the svn_ra API, and an object of
# this type basically represents the C type 'svn_ra_session_t'.
def __init__(self, url, user=None):
"""Open a new session to URL with the specified USER.
USER must be an object that implements the
'csvn.auth.User' interface."""
if user is None:
user = User()
self.pool = Pool()
self.iterpool = Pool()
self.url = RepositoryURI(url)
self.user = user
self.client = POINTER(svn_client_ctx_t)()
svn_client_create_context(byref(self.client), self.pool)
self.user.setup_auth_baton(pointer(self.client.contents.auth_baton))
self._as_parameter_ = POINTER(svn_ra_session_t)()
svn_client_open_ra_session(byref(self._as_parameter_), url,
self.client, self.pool)
self.client[0].log_msg_func2 = \
svn_client_get_commit_log2_t(self._log_func_wrapper)
self.client[0].log_msg_baton2 = c_void_p()
self._log_func = None
def close(self):
"""Close this RemoteRepository object, releasing any resources."""
self.pool.clear()
def txn(self):
"""Create a transaction"""
return Txn(self)
def latest_revnum(self):
"""Get the latest revision number in the repository"""
revnum = svn_revnum_t()
svn_ra_get_latest_revnum(self, byref(revnum), self.iterpool)
self.iterpool.clear()
return revnum.value
def check_path(self, path, rev = None, encoded=True):
"""Check the status of PATH@REV. If REV is not specified,
look at the latest revision in the repository.
If the path is ...
... absent, then we return svn_node_none.
... a regular file, then we return svn_node_file.
... a directory, then we return svn_node_dir
... unknown, then we return svn_node_unknown
If ENCODED is True, the path may be URI-encoded.
"""
path = self._relative_path(path, encoded)
if rev is None:
rev = self.latest_revnum()
kind = svn_node_kind_t()
svn_ra_check_path(self, path, svn_revnum_t(rev), byref(kind),
self.iterpool)
self.iterpool.clear()
return kind.value
def list(self, path, rev = SVN_INVALID_REVNUM, fields = SVN_DIRENT_ALL):
"""List the contents of the specified directory PATH@REV. This
function returns a dictionary, which maps entry names to
directory entries (svn_dirent_t objects).
If REV is not specified, we look at the latest revision of the
repository.
FIELDS controls what portions of the svn_dirent_t object are
filled in. To have them completely filled in, just pass in
SVN_DIRENT_ALL (which is the default); otherwise, pass the
bitwise OR of all the SVN_DIRENT_ fields you would like to
have returned to you.
"""
dirents = _types.Hash(POINTER(svn_dirent_t), None)
svn_ra_get_dir2(self, dirents.byref(), NULL, NULL, path,
rev, fields, dirents.pool)
self.iterpool.clear()
# Create a Python dict of svn_dirent_t objects from this Hash of
# pointers to svn_dirent_t.
result = {}
for path, dirent_p in dirents.items():
result[path] = dirent_p[0]
return result
def cat(self, buffer, path, rev = SVN_INVALID_REVNUM):
"""Get PATH@REV and save it to BUFFER. BUFFER must be a Python file
or a StringIO object.
If REV is not specified, we look at the latest revision of the
repository."""
stream = _types.Stream(buffer)
svn_ra_get_file(self, path, rev, stream, NULL, NULL, stream.pool)
self.iterpool.clear()
def info(self, path, rev = None):
"""Get a pointer to a svn_dirent_t object associated with PATH@REV.
If PATH does not exist, return None.
If REV is not specified, we look at the latest revision of the
file."""
dirent = POINTER(svn_dirent_t)()
dirent.pool = Pool()
if rev is None:
rev = self.latest_revnum()
svn_ra_stat(self, path, rev, byref(dirent), dirent.pool)
self.iterpool.clear()
return dirent[0]
def proplist(self, path, rev = SVN_INVALID_REVNUM):
"""Return a dictionary containing the properties on PATH@REV
If REV is not specified, we look at the latest revision of the
repository."""
props = _types.Hash(POINTER(svn_string_t), None,
wrapper=_types.SvnStringPtr)
status = self.check_path(path, rev)
if status == svn_node_dir:
svn_ra_get_dir2(self, NULL, NULL, props.byref(), path,
rev, 0, props.pool)
else:
svn_ra_get_file(self, path, rev, NULL, NULL, props.byref(),
props.pool)
self.iterpool.clear()
return props
def propget(self, name, path, rev = SVN_INVALID_REVNUM):
"""Get property NAME from PATH@REV.
If REV is not specified, we look at the latest revision of the
repository."""
return self.proplist(path, rev)[name]
def log(self, start_rev, end_rev, paths=None, limit=0,
discover_changed_paths=FALSE, stop_on_copy=FALSE):
"""A generator function which returns information about the revisions
between START_REV and END_REV. Each return value is a
csvn.types.LogEntry object which describes a revision.
For details on what fields are contained in a LogEntry object,
please see the documentation from csvn.types.LogEntry.
You can iterate through the log information for several revisions
using a regular for loop. For example:
for entry in session.log(start_rev, end_rev):
print("Revision %d" % entry.revision)
...
ARGUMENTS:
If PATHS is not None and has one or more elements, then only
show revisions in which at least one of PATHS was changed (i.e.,
if file, text or props changed; if dir, props changed or an entry
was added or deleted). Each PATH should be relative to the current
session's root.
If LIMIT is non-zero, only the first LIMIT logs are returned.
If DISCOVER_CHANGED_PATHS is True, then changed_paths will contain
a list of paths affected by this revision.
If STOP_ON_COPY is True, then this function will not cross
copies while traversing history.
If START_REV or END_REV is a non-existent revision, we throw
a SVN_ERR_FS_NO_SUCH_REVISION SubversionException, without
returning any logs.
"""
paths = _types.Array(c_char_p, paths is None and [""] or paths)
return iter(_LogMessageReceiver(self, start_rev, end_rev, paths,
limit, discover_changed_paths, stop_on_copy))
# Private. Produces a delta editor for the commit, so that the Txn
# class can commit its changes over the RA layer.
def _get_commit_editor(self, message, commit_callback, commit_baton, pool):
editor = POINTER(svn_delta_editor_t)()
editor_baton = c_void_p()
svn_ra_get_commit_editor2(self, byref(editor),
byref(editor_baton), message, commit_callback,
commit_baton, NULL, FALSE, pool)
return (editor, editor_baton)
# Private. Convert a URI to a repository-relative path
def _relative_path(self, path, encoded=True):
return self.url.relative_path(path, encoded)
# Private. Convert a repository-relative copyfrom path into a proper
# copyfrom URI
def _abs_copyfrom_path(self, path):
return self.url.join(RepositoryURI(path, False))
def revprop_list(self, revnum=None):
"""Returns a hash of the revision properties of REVNUM. If REVNUM is
not provided, it defaults to the head revision."""
rev = svn_opt_revision_t()
if revnum is not None:
rev.kind = svn_opt_revision_number
rev.value.number = revnum
else:
rev.kind = svn_opt_revision_head
props = _types.Hash(POINTER(svn_string_t), None,
wrapper=_types.SvnStringPtr)
set_rev = svn_revnum_t()
svn_client_revprop_list(props.byref(),
self.url,
byref(rev),
byref(set_rev),
self.client,
props.pool)
self.iterpool.clear()
return props
def revprop_get(self, propname, revnum=None):
"""Returns the value of PROPNAME at REVNUM. If REVNUM is not
provided, it defaults to the head revision."""
return self.revprop_list(revnum)[propname]
def revprop_set(self, propname, propval=NULL, revnum=None, force=False):
"""Set PROPNAME to PROPVAL for REVNUM. If REVNUM is not given, it
defaults to the head revision. Returns the actual revision number
effected.
If PROPVAL is not provided, the property will be deleted.
If FORCE is True (False by default), newlines will be allowed in the
author property.
Be careful, this is a lossy operation."""
rev = svn_opt_revision_t()
if revnum is not None:
rev.kind = svn_opt_revision_number
rev.value.number = revnum
else:
rev.kind = svn_opt_revision_head
set_rev = svn_revnum_t()
svn_client_revprop_set(propname,
svn_string_create(propval, self.iterpool), self.url,
byref(rev), byref(set_rev), force, self.client,
self.iterpool)
try:
return set_rev.value
finally:
self.iterpool.clear()
def set_log_func(self, log_func):
"""Register a callback to get a log message for commit and
commit-like operations. LOG_FUNC should take an array as an argument,
which holds the files to be committed. It should return a list of the
form [LOG, FILE] where LOG is a log message and FILE is the temporary
file, if one was created instead of a log message. If LOG is None,
the operation will be canceled and FILE will be treated as the
temporary file holding the temporary commit message."""
self._log_func = log_func
def _log_func_wrapper(self, log_msg, tmp_file, commit_items, baton, pool):
log_msg[0].raw = NULL
tmp_file[0] = NULL
if self._log_func:
[log, file] = self._log_func(_types.Array(String, commit_items))
if log:
log_msg[0].raw = apr_pstrdup(pool, String(log)).raw
if file:
tmp_file[0] = apr_pstrdup(pool, String(file)).raw
def svnimport(self, path, url=None, nonrecursive=False, no_ignore=True, log_func=None):
if not url:
url = self.url
if log_func:
self.set_log_func(log_func)
pool = Pool()
commit_info = POINTER(svn_commit_info_t)()
svn_client_import2(byref(commit_info), path, url, nonrecursive,
no_ignore, self.client, pool)
commit_info[0].pool = pool
return commit_info[0]
class LocalRepository(object):
"""A client which accesses the repository directly. This class
may allow you to perform some administrative actions which
cannot be performed remotely (e.g. create repositories,
dump repositories, etc.)
Unlike RemoteRepository, the functions in this class do not
accept URIs, and instead only accept local filesystem
paths.
By default, this class does not perform any checks to verify
permissions, assuming that the specified user has full
administrative access to the repository. To teach this class
to enforce an authz policy, you must subclass csvn.auth.User
and implement the allow_access function.
"""
def __init__(self, path, create=False, user=None):
"""Open the repository at PATH. If create is True,
create a new repository.
If specified, user must be a csvn.auth.User instance.
"""
if user is None:
user = User()
self.pool = Pool()
self.iterpool = Pool()
self._as_parameter_ = POINTER(svn_repos_t)()
self.user = user
if create:
svn_repos_create(byref(self._as_parameter_), path,
None, None, None, None, self.pool)
else:
svn_repos_open(byref(self._as_parameter_), path, self.pool)
self.fs = _fs(self)
def __del__(self):
self.close()
def close(self):
"""Close this LocalRepository object, releasing any resources. In
particular, this closes the rep-cache DB."""
self.pool.clear()
def latest_revnum(self):
"""Get the latest revision in the repository"""
return self.fs.latest_revnum()
def check_path(self, path, rev = None, encoded=False):
"""Check whether the given PATH exists in the specified REV. If REV
is not specified, look at the latest revision.
If the path is ...
... absent, then we return svn_node_none.
... a regular file, then we return svn_node_file.
... a directory, then we return svn_node_dir
... unknown, then we return svn_node_unknown
"""
assert(not encoded)
root = self.fs.root(rev=rev, pool=self.iterpool)
try:
return root.check_path(path)
finally:
self.iterpool.clear()
def uuid(self):
"""Return a universally-unique ID for this repository"""
return self.fs.uuid()
def set_rev_prop(self, rev, name, value, author=NULL):
"""Set the NAME property to VALUE in the specified
REV, attribute the change to AUTHOR if provided."""
rev = svn_revnum_t(rev)
svn_repos_fs_change_rev_prop2(self, rev, author, name, value,
svn_repos_authz_func_t(),
None, self.iterpool)
self.iterpool.clear()
def get_rev_prop(self, rev, name):
"""Returns the value of NAME in REV. If NAME does not exist in REV,
returns None."""
rev = svn_revnum_t(rev)
value = POINTER(svn_string_t)()
svn_repos_fs_revision_prop(byref(value), self, rev, name,
svn_repos_authz_func_t(), None,
self.iterpool)
try:
if value:
return _types.SvnStringPtr.from_param(value)
else:
return None
finally:
self.iterpool.clear()
def txn(self):
"""Open up a new transaction, so that you can commit a change
to the repository"""
assert self.user is not None, (
"If you would like to commit changes to the repository, "
"you must supply a user object when you initialize "
"the repository object")
return Txn(self)
# Private. Produces a delta editor for the commit, so that the Txn
# class can commit its changes over the RA layer.
def _get_commit_editor(self, message, commit_callback, commit_baton, pool):
editor = POINTER(svn_delta_editor_t)()
editor_baton = c_void_p()
svn_repos_get_commit_editor4(byref(editor),
byref(editor_baton), self, None, "", "",
self.user.username(), message,
commit_callback, commit_baton, svn_repos_authz_callback_t(),
None, pool)
return (editor, editor_baton)
def _relative_path(self, path):
return path
# Private. Convert a repository-relative copyfrom path into a proper
# copyfrom URI
def _abs_copyfrom_path(self, path):
return path
def load(self, dumpfile, feedbackfile=None,
uuid_action=svn_repos_load_uuid_default, parent_dir="",
use_pre_commit_hook=False, use_post_commit_hook=False,
cancel_func=None):
"""Read and parse dumpfile-formatted DUMPFILE, reconstructing
filesystem revisions. Dumpfile should be an open python file object
or file like object. UUID will be handled according to UUID_ACTION
which defaults to svn_repos_load_uuid_default.
If FEEDBACKFILE is provided (in the form of a python file object or
file like object), feedback will be sent to it.
If PARENT_DIR is provided, everything loaded from the dump will be
reparented to PARENT_DIR.
USE_PRE_COMMIT_HOOK and USE_POST_COMMIT_HOOK are False by default,
if either is set to True that hook will be used.
If CANCEL_FUNC is provided, it will be called at various points to
allow the operation to be cancelled. The cancel baton will be the
LocalRepository object."""
if not cancel_func:
cancel_func = svn_cancel_func_t()
apr_dump = _types.APRFile(dumpfile)
stream_dump = svn_stream_from_aprfile2(apr_dump._as_parameter_,
False, self.iterpool)
if feedbackfile:
apr_feedback = _types.APRFile(feedbackfile)
stream_feedback = svn_stream_from_aprfile2(
apr_feedback._as_parameter_, False,
self.iterpool)
else:
stream_feedback = NULL
svn_repos_load_fs2(self._as_parameter_, stream_dump, stream_feedback,
uuid_action, parent_dir, use_pre_commit_hook,
use_post_commit_hook, cancel_func,
c_void_p(), self.iterpool)
apr_dump.close()
if feedbackfile:
apr_feedback.close()
self.iterpool.clear()
class _fs(object):
"""NOTE: This is a private class. Don't use it outside of
this module. Use the Repos class instead.
This class represents an svn_fs_t object"""
def __init__(self, repos):
self.iterpool = Pool()
self._as_parameter_ = svn_repos_fs(repos)
def latest_revnum(self):
"""See Repos.latest_revnum"""
rev = svn_revnum_t()
svn_fs_youngest_rev(byref(rev), self, self.iterpool)
self.iterpool.clear()
return rev.value
def uuid(self):
"""See Repos.uuid"""
uuid_buffer = String()
svn_fs_get_uuid(self, byref(uuid_buffer), self.iterpool)
uuid_str = str(uuid_buffer)
self.iterpool.clear()
return uuid_str
def root(self, rev = None, txn = None, pool = None,
iterpool = None):
"""Create a new svn_fs_root_t object from txn or rev.
If neither txn nor rev or set, this root object will
point to the latest revision root.
The svn_fs_root object itself will be allocated in pool.
If iterpool is supplied, iterpool will be used for any
temporary allocations. Otherwise, pool will be used for
temporary allocations."""
return _fs_root(self, rev, txn, pool, iterpool)
class _fs_root(object):
"""NOTE: This is a private class. Don't use it outside of
this module. Use the Repos.txn() method instead.
This class represents an svn_fs_root_t object"""
def __init__(self, fs, rev = None, txn = None, pool = None,
iterpool = None):
"""See _fs.root()"""
assert(pool)
self.pool = pool
self.iterpool = iterpool or pool
self.fs = fs
self._as_parameter_ = POINTER(svn_fs_root_t)()
if txn and rev:
raise Exception("You can't specify both a txn and a rev")
if txn:
svn_fs_txn_root(byref(self._as_parameter_), txn, self.pool)
else:
if not rev:
rev = fs.latest_revnum()
svn_fs_revision_root(byref(self._as_parameter_), fs, rev, self.pool)
def check_path(self, path):
"""Check whether the specified path exists in this root.
See Repos.check_path() for details."""
kind = svn_node_kind_t()
svn_fs_check_path(byref(kind), self, path, self.iterpool)
return kind.value
class LogEntry(object):
"""REVISION, AUTHOR, DATE, and MESSAGE are straightforward, and
contain what you expect. DATE is a csvn.types.SvnDate object.
If no information about the paths changed in this revision is
available, CHANGED_PATHS will be None. Otherwise, CHANGED_PATHS
will contain a dictionary which maps every path committed
in REVISION to svn_log_changed_path_t pointers."""
__slots__ = ['changed_paths', 'revision',
'author', 'date', 'message']
class _LogMessageReceiver(CallbackReceiver):
def collect(self, session, start_rev, end_rev, paths, limit,
discover_changed_paths, stop_on_copy):
self.discover_changed_paths = discover_changed_paths
pool = Pool()
baton = c_void_p()
receiver = svn_log_message_receiver_t(self.receive)
svn_ra_get_log(session, paths, start_rev, end_rev,
limit, discover_changed_paths, stop_on_copy, receiver,
baton, pool)
def receive(self, baton, changed_paths, revision, author, date, message, pool):
entry = LogEntry()
# Save information about the log entry
entry.revision = revision
entry.author = str(author)
entry.date = _types.SvnDate(date)
entry.message = str(message)
if self.discover_changed_paths:
entry.changed_paths = _types.Hash(POINTER(svn_log_changed_path_t),
changed_paths, dup = svn_log_changed_path_dup)
else:
entry.changed_paths = None
self.send(entry)
| apache-2.0 |
mellanoxbmc/ipmi | swig/python/openipmigui/_mc_pefparm.py | 3 | 7497 | # _mc_pefparm.py
#
# openipmi GUI handling for MC PEF parms
#
# Author: MontaVista Software, Inc.
# Corey Minyard <minyard@mvista.com>
# source@mvista.com
#
# Copyright 2005 MontaVista Software Inc.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
#
# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free
# Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import sys
import OpenIPMI
import _oi_logging
import gui_errstr
import gui_list
import gui_popup
import gui_setdialog
class MCPEFData:
def __init__(self, glist, pefc, parm, aidx, pname, ptype, origval):
self.glist = glist
self.pefc = pefc
self.parm = parm
self.aidx = aidx
self.pname = pname
self.ptype = ptype
self.origval = origval
self.currval = origval
return
def SetItem(self, idx):
self.idx = idx;
return
def HandleMenu(self, event, idx, point):
if (self.ptype == "bool"):
menul = [ ("Toggle Value", self.togglevalue) ]
elif (self.ptype == "enum"):
menul = [ ]
nval = [ 0 ]
sval = [ "" ]
val = 0;
while (val != -1):
rv = OpenIPMI.pefconfig_enum_val(self.parm, val, nval, sval)
if (rv == 0):
menul.append( (sval[0] + " (" + str(val) + ")",
self.setenum,
val) )
pass
val = nval[0];
pass
pass
else:
menul = [ ("Set Value", self.setvalue) ]
pass
gui_popup.popup(self.glist, event, menul, point)
return
def ok(self, vals):
rv = self.pefc.set_val(self.parm, self.aidx, self.ptype, str(vals[0]))
if (rv != 0):
self.glist.SetError("Invalid data value: "
+ OpenIPMI.get_error_string(rv))
return
self.currval = vals[0]
self.glist.SetColumn(self.idx, 1, vals[0])
return
def setvalue(self, event):
gui_setdialog.SetDialog("Set value for " + self.pname,
[ self.currval ], 1, self)
return
def setenum(self, val):
rv = self.pefc.set_val(self.parm, self.aidx, "integer", str(val))
if (rv != 0):
self.glist.SetError("Could not set value to " + str(val) + ": "
+ OpenIPMI.get_error_string(rv))
return
self.currval = val
nval = [ 0 ]
sval = [ "" ]
OpenIPMI.pefconfig_enum_val(self.parm, val, nval, sval)
self.glib.SetColumn(self.idx, 1, sval[0])
return
def togglevalue(self, event):
if (self.currval == "true"):
newval = "false"
else:
newval = "true"
pass
rv = self.pefc.set_val(self.parm, self.aidx, self.ptype, newval)
if (rv != 0):
self.glist.SetError("Could not toggle value: "
+ OpenIPMI.get_error_string(rv))
return
self.currval = newval
self.glist.SetColumn(self.idx, 1, newval)
return
pass
class MCPefParm(gui_list.List):
def __init__(self, m, pef, pefc):
gui_list.List.__init__(self,
"PEFPARMS for " + m.name,
[ ("Name", 250), ("Value", 250) ])
self.pef = pef
self.pefc = pefc
i = 0
j = 0
rv = True
v = [ 0 ]
while (rv):
lastv = v[0]
rv = pefc.get_val(i, v)
if (rv):
vals = rv.split(" ", 2)
if (len(vals) == 3):
# Valid parm
if (vals[1] == "integer"):
w = [ 0 ]
x = [ "" ]
err = OpenIPMI.pefconfig_enum_val(i, 0, w, x)
if (err != OpenIPMI.enosys):
vals[1] = "enum"
pass
pass
data = MCPEFData(self, pefc, i, lastv,
vals[0], vals[1], vals[2])
if (v[0] == 0):
title = vals[0]
else:
x = [ "" ]
err = OpenIPMI.pefconfig_enum_idx(i, lastv, x)
if (err):
title = vals[0] + "[" + str(lastv) + "]"
else:
title = vals[0] + "[" + x[0] + "]"
pass
pass
if (vals[1] == "enum"):
nval = [ 0 ]
sval = [ "" ]
OpenIPMI.pefconfig_enum_val(data.parm, int(vals[2]),
nval, sval)
value = sval[0]
pass
else:
value = vals[2]
pass
self.add_data(title, [ value ], data)
j += 1
if (v[0] == 0):
i += 1
pass
if (v[0] == -1):
i += 1
v[0] = 0
pass
pass
else:
v[0] = 0
i += 1
pass
pass
pass
self.AfterDone()
return
def save(self):
rv = self.pef.set_config(self.pefc)
if (rv != 0):
self.errstr.SetError("Error setting config: "
+ OpenIPMI.get_error_string(rv))
return
# Don't forget to set self.pef to None when done so OnClose
# doesn't clear it again
self.pef = None
self.Close()
return
def cancel(self):
self.Close()
return
def do_on_close(self):
# Do it here, not in cancel, to handle closing the window without
# clicking on "save" or "cancel"
if (self.pef):
self.pef.clear_lock(self.pefc)
self.pef = None
pass
self.pefc = None
return
pass
| gpl-2.0 |
hugobranquinho/ines | ines/view.py | 1 | 9890 | # -*- coding: utf-8 -*-
from os import sep as OS_SEP
from os.path import normcase
from os.path import normpath
from os.path import join as join_path
from os.path import isdir
from os.path import exists
from pkg_resources import resource_exists
from pkg_resources import resource_filename
from pkg_resources import resource_isdir
from pyramid.asset import resolve_asset_spec
from pyramid.config.views import DefaultViewMapper
from pyramid.httpexceptions import HTTPNotFound
from pyramid.static import static_view
from pyramid.response import FileResponse
from pyramid.static import _secure_path
from pyramid.traversal import traversal_path_info
from pyramid.view import view_config as pyramid_view_config
from pyramid.view import view_defaults
from ines.convert import maybe_list
from ines.browser import BrowserDecorator
from ines.views.input import InputSchemaView
from ines.views.output import OutputSchemaView
class view_config(pyramid_view_config):
def __call__(self, wrapped):
settings = self.__dict__.copy()
depth = settings.pop('_depth', 0)
def callback(context, name, ob):
config = context.config.with_package(info.module)
route_name = settings.get('route_name') or getattr(ob, '__view_defaults__', {}).get('route_name')
if route_name:
browser_constructor = config.registry.settings.get('browser_constructor')
if not browser_constructor:
browser_settings = {
key[8:]: value
for key, value in config.registry.settings.items()
if key.startswith('browser.') and value}
if browser_settings:
browser_constructor = BrowserDecorator(browser_settings)
config.registry.settings['browser_constructor'] = browser_constructor
if browser_constructor:
decorator = maybe_list(settings.pop('decorator', None))
decorator.append(browser_constructor)
settings['decorator'] = tuple(decorator)
if not config.is_production_environ:
renderer = settings.get('renderer')
renderer_development_folder = config.settings.get('renderer_development_folder')
if renderer and renderer_development_folder and ':' in renderer:
package_name, path = renderer.split(':', 1)
breadcrumbs = path.split(OS_SEP)
breadcrumbs[0] = renderer_development_folder
settings['renderer'] = '%s:%s' % (package_name, join_path(*breadcrumbs))
config.add_view(view=ob, **settings)
info = self.venusian.attach(
wrapped,
callback,
category='pyramid',
depth=depth + 1)
if info.scope == 'class':
if settings.get('attr') is None:
settings['attr'] = wrapped.__name__
if 'request_method' not in settings:
request_method = wrapped.__name__.upper()
if request_method == 'ADD':
request_method = 'POST'
elif request_method == 'UPDATE':
request_method = 'PUT'
settings['request_method'] = request_method
settings['_info'] = info.codeinfo
return wrapped
class api_config(pyramid_view_config):
def __call__(self, wrapped):
settings = self.__dict__.copy()
depth = settings.pop('_depth', 0)
use_fields = settings.pop('use_fields', False)
input_option = settings.pop('input', None)
output_option = settings.pop('output', None)
auto_camelcase = settings.pop('auto_camelcase', True)
def callback(context, name, ob):
view_defaults_settings = getattr(ob, '__view_defaults__', {})
route_name = settings.get('route_name') or view_defaults_settings.get('route_name')
request_method = settings.get('request_method') or view_defaults_settings.get('request_method')
renderer = settings['renderer'] = (
settings.get('renderer')
or view_defaults_settings.get('renderer')
or 'json')
if not context.config.is_production_environ:
renderer = settings.get('renderer')
renderer_development_folder = context.config.settings.get('renderer_development_folder')
if renderer and renderer_development_folder and ':' in renderer:
package_name, path = renderer.split(':', 1)
breadcrumbs = path.split(OS_SEP)
breadcrumbs[0] = renderer_development_folder
settings['renderer'] = '%s:%s' % (package_name, join_path(*breadcrumbs))
# Register input schema
if input_option or use_fields:
if input_option is not None:
if not isinstance(input_option, InputSchemaView):
input_view = InputSchemaView(
route_name,
request_method,
renderer,
schema=input_option,
use_fields=use_fields,
auto_camelcase=auto_camelcase)
else:
input_view = input_option
else:
input_view = InputSchemaView(
route_name,
request_method,
renderer,
use_fields=use_fields,
auto_camelcase=auto_camelcase)
decorator = maybe_list(settings.pop('decorator', None))
decorator.append(input_view)
settings['decorator'] = tuple(decorator)
context.config.register_input_schema(input_view, route_name, request_method)
# Register output schema
if output_option:
if not isinstance(output_option, OutputSchemaView):
output_view = OutputSchemaView(
route_name,
request_method,
renderer,
schema=output_option)
else:
output_view = output_option
previous_mapper = settings.get('mapper', DefaultViewMapper)
class OutputViewMapper(previous_mapper):
def __call__(self, view):
view = super(OutputViewMapper, self).__call__(view)
return output_view(view)
settings['mapper'] = OutputViewMapper
context.config.register_output_schema(output_view, route_name, request_method)
config = context.config.with_package(info.module)
config.add_view(view=ob, **settings)
info = self.venusian.attach(
wrapped,
callback,
category='pyramid',
depth=depth + 1)
if info.scope == 'class':
if settings.get('attr') is None:
settings['attr'] = wrapped.__name__
if 'request_method' not in settings:
request_method = wrapped.__name__.upper()
if request_method == 'ADD':
request_method = 'POST'
elif request_method == 'UPDATE':
request_method = 'PUT'
settings['request_method'] = request_method
settings['_info'] = info.codeinfo
return wrapped
class api_defaults(view_defaults):
def __init__(self, **settings):
view_defaults.__init__(self, **settings)
class gzip_static_view(static_view):
def __init__(self, *args, **kwargs):
gzip_path = kwargs.pop('gzip_path')
super(gzip_static_view, self).__init__(*args, **kwargs)
package_name, self.gzip_docroot = resolve_asset_spec(gzip_path, self.package_name)
self.norm_gzip_docroot = normcase(normpath(self.gzip_docroot))
def __call__(self, context, request):
if self.use_subpath:
path_tuple = request.subpath
else:
path_tuple = traversal_path_info(request.environ['PATH_INFO'])
if self.cachebust_match:
path_tuple = self.cachebust_match(path_tuple)
path = _secure_path(path_tuple)
if path is None:
raise HTTPNotFound('Out of bounds: %s' % request.url)
use_gzip = 'gzip' in request.accept_encoding
if self.package_name: # package resource
docroot = use_gzip and self.gzip_docroot or self.docroot
resource_path ='%s/%s' % (docroot.rstrip('/'), path)
if resource_isdir(self.package_name, resource_path):
if not request.path_url.endswith('/'):
self.add_slash_redirect(request)
resource_path = '%s/%s' % (resource_path.rstrip('/'),self.index)
if not resource_exists(self.package_name, resource_path):
raise HTTPNotFound(request.url)
filepath = resource_filename(self.package_name, resource_path)
else:
norm_docroot = use_gzip and self.gzip_norm_docroot or self.norm_docroot
filepath = normcase(normpath(join_path(norm_docroot, path)))
if isdir(filepath):
if not request.path_url.endswith('/'):
self.add_slash_redirect(request)
filepath = join_path(filepath, self.index)
if not exists(filepath):
raise HTTPNotFound(request.url)
response = FileResponse(filepath, request, self.cache_max_age)
if use_gzip:
response.content_encoding = 'gzip'
return response
| mit |
visualputty/Landing-Lights | django/http/multipartparser.py | 87 | 22774 | """
Multi-part parsing for file uploads.
Exposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to
file upload handlers for processing.
"""
import cgi
from django.conf import settings
from django.core.exceptions import SuspiciousOperation
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_unicode
from django.utils.text import unescape_entities
from django.core.files.uploadhandler import StopUpload, SkipFile, StopFutureHandlers
__all__ = ('MultiPartParser', 'MultiPartParserError', 'InputStreamExhausted')
class MultiPartParserError(Exception):
pass
class InputStreamExhausted(Exception):
"""
No more reads are allowed from this device.
"""
pass
RAW = "raw"
FILE = "file"
FIELD = "field"
class MultiPartParser(object):
"""
A rfc2388 multipart/form-data parser.
``MultiValueDict.parse()`` reads the input stream in ``chunk_size`` chunks
and returns a tuple of ``(MultiValueDict(POST), MultiValueDict(FILES))``. If
"""
def __init__(self, META, input_data, upload_handlers, encoding=None):
"""
Initialize the MultiPartParser object.
:META:
The standard ``META`` dictionary in Django request objects.
:input_data:
The raw post data, as a file-like object.
:upload_handler:
An UploadHandler instance that performs operations on the uploaded
data.
:encoding:
The encoding with which to treat the incoming data.
"""
#
# Content-Type should containt multipart and the boundary information.
#
content_type = META.get('HTTP_CONTENT_TYPE', META.get('CONTENT_TYPE', ''))
if not content_type.startswith('multipart/'):
raise MultiPartParserError('Invalid Content-Type: %s' % content_type)
# Parse the header to get the boundary to split the parts.
ctypes, opts = parse_header(content_type)
boundary = opts.get('boundary')
if not boundary or not cgi.valid_boundary(boundary):
raise MultiPartParserError('Invalid boundary in multipart: %s' % boundary)
#
# Content-Length should contain the length of the body we are about
# to receive.
#
try:
content_length = int(META.get('HTTP_CONTENT_LENGTH', META.get('CONTENT_LENGTH',0)))
except (ValueError, TypeError):
# For now set it to 0; we'll try again later on down.
content_length = 0
if content_length <= 0:
# This means we shouldn't continue...raise an error.
raise MultiPartParserError("Invalid content length: %r" % content_length)
self._boundary = boundary
self._input_data = input_data
# For compatibility with low-level network APIs (with 32-bit integers),
# the chunk size should be < 2^31, but still divisible by 4.
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
self._chunk_size = min([2**31-4] + possible_sizes)
self._meta = META
self._encoding = encoding or settings.DEFAULT_CHARSET
self._content_length = content_length
self._upload_handlers = upload_handlers
def parse(self):
"""
Parse the POST data and break it into a FILES MultiValueDict and a POST
MultiValueDict.
Returns a tuple containing the POST and FILES dictionary, respectively.
"""
# We have to import QueryDict down here to avoid a circular import.
from django.http import QueryDict
encoding = self._encoding
handlers = self._upload_handlers
limited_input_data = LimitBytes(self._input_data, self._content_length)
# See if the handler will want to take care of the parsing.
# This allows overriding everything if somebody wants it.
for handler in handlers:
result = handler.handle_raw_input(limited_input_data,
self._meta,
self._content_length,
self._boundary,
encoding)
if result is not None:
return result[0], result[1]
# Create the data structures to be used later.
self._post = QueryDict('', mutable=True)
self._files = MultiValueDict()
# Instantiate the parser and stream:
stream = LazyStream(ChunkIter(limited_input_data, self._chunk_size))
# Whether or not to signal a file-completion at the beginning of the loop.
old_field_name = None
counters = [0] * len(handlers)
try:
for item_type, meta_data, field_stream in Parser(stream, self._boundary):
if old_field_name:
# We run this at the beginning of the next loop
# since we cannot be sure a file is complete until
# we hit the next boundary/part of the multipart content.
self.handle_file_complete(old_field_name, counters)
old_field_name = None
try:
disposition = meta_data['content-disposition'][1]
field_name = disposition['name'].strip()
except (KeyError, IndexError, AttributeError):
continue
transfer_encoding = meta_data.get('content-transfer-encoding')
field_name = force_unicode(field_name, encoding, errors='replace')
if item_type == FIELD:
# This is a post field, we can just set it in the post
if transfer_encoding == 'base64':
raw_data = field_stream.read()
try:
data = str(raw_data).decode('base64')
except:
data = raw_data
else:
data = field_stream.read()
self._post.appendlist(field_name,
force_unicode(data, encoding, errors='replace'))
elif item_type == FILE:
# This is a file, use the handler...
file_name = disposition.get('filename')
if not file_name:
continue
file_name = force_unicode(file_name, encoding, errors='replace')
file_name = self.IE_sanitize(unescape_entities(file_name))
content_type = meta_data.get('content-type', ('',))[0].strip()
content_type_extra = meta_data.get('content-type', (0,{}))[1]
if content_type_extra is None:
content_type_extra = {}
try:
charset = content_type_extra.get('charset', None)
except:
charset = None
try:
content_length = int(meta_data.get('content-length')[0])
except (IndexError, TypeError, ValueError):
content_length = None
counters = [0] * len(handlers)
try:
for handler in handlers:
try:
handler.new_file(field_name, file_name,
content_type, content_length,
charset, content_type_extra.copy())
except StopFutureHandlers:
break
for chunk in field_stream:
if transfer_encoding == 'base64':
# We only special-case base64 transfer encoding
try:
chunk = str(chunk).decode('base64')
except Exception, e:
# Since this is only a chunk, any error is an unfixable error.
raise MultiPartParserError("Could not decode base64 data: %r" % e)
for i, handler in enumerate(handlers):
chunk_length = len(chunk)
chunk = handler.receive_data_chunk(chunk,
counters[i])
counters[i] += chunk_length
if chunk is None:
# If the chunk received by the handler is None, then don't continue.
break
except SkipFile, e:
# Just use up the rest of this file...
exhaust(field_stream)
else:
# Handle file upload completions on next iteration.
old_field_name = field_name
else:
# If this is neither a FIELD or a FILE, just exhaust the stream.
exhaust(stream)
except StopUpload, e:
if not e.connection_reset:
exhaust(limited_input_data)
else:
# Make sure that the request data is all fed
exhaust(limited_input_data)
# Signal that the upload has completed.
for handler in handlers:
retval = handler.upload_complete()
if retval:
break
return self._post, self._files
def handle_file_complete(self, old_field_name, counters):
"""
Handle all the signalling that takes place when a file is complete.
"""
for i, handler in enumerate(self._upload_handlers):
file_obj = handler.file_complete(counters[i])
if file_obj:
# If it returns a file object, then set the files dict.
self._files.appendlist(force_unicode(old_field_name,
self._encoding,
errors='replace'),
file_obj)
break
def IE_sanitize(self, filename):
"""Cleanup filename from Internet Explorer full paths."""
return filename and filename[filename.rfind("\\")+1:].strip()
class LazyStream(object):
"""
The LazyStream wrapper allows one to get and "unget" bytes from a stream.
Given a producer object (an iterator that yields bytestrings), the
LazyStream object will support iteration, reading, and keeping a "look-back"
variable in case you need to "unget" some bytes.
"""
def __init__(self, producer, length=None):
"""
Every LazyStream must have a producer when instantiated.
A producer is an iterable that returns a string each time it
is called.
"""
self._producer = producer
self._empty = False
self._leftover = ''
self.length = length
self.position = 0
self._remaining = length
self._unget_history = []
def tell(self):
return self.position
def read(self, size=None):
def parts():
remaining = (size is not None and [size] or [self._remaining])[0]
# do the whole thing in one shot if no limit was provided.
if remaining is None:
yield ''.join(self)
return
# otherwise do some bookkeeping to return exactly enough
# of the stream and stashing any extra content we get from
# the producer
while remaining != 0:
assert remaining > 0, 'remaining bytes to read should never go negative'
chunk = self.next()
emitting = chunk[:remaining]
self.unget(chunk[remaining:])
remaining -= len(emitting)
yield emitting
out = ''.join(parts())
return out
def next(self):
"""
Used when the exact number of bytes to read is unimportant.
This procedure just returns whatever is chunk is conveniently returned
from the iterator instead. Useful to avoid unnecessary bookkeeping if
performance is an issue.
"""
if self._leftover:
output = self._leftover
self._leftover = ''
else:
output = self._producer.next()
self._unget_history = []
self.position += len(output)
return output
def close(self):
"""
Used to invalidate/disable this lazy stream.
Replaces the producer with an empty list. Any leftover bytes that have
already been read will still be reported upon read() and/or next().
"""
self._producer = []
def __iter__(self):
return self
def unget(self, bytes):
"""
Places bytes back onto the front of the lazy stream.
Future calls to read() will return those bytes first. The
stream position and thus tell() will be rewound.
"""
if not bytes:
return
self._update_unget_history(len(bytes))
self.position -= len(bytes)
self._leftover = ''.join([bytes, self._leftover])
def _update_unget_history(self, num_bytes):
"""
Updates the unget history as a sanity check to see if we've pushed
back the same number of bytes in one chunk. If we keep ungetting the
same number of bytes many times (here, 50), we're mostly likely in an
infinite loop of some sort. This is usually caused by a
maliciously-malformed MIME request.
"""
self._unget_history = [num_bytes] + self._unget_history[:49]
number_equal = len([current_number for current_number in self._unget_history
if current_number == num_bytes])
if number_equal > 40:
raise SuspiciousOperation(
"The multipart parser got stuck, which shouldn't happen with"
" normal uploaded files. Check for malicious upload activity;"
" if there is none, report this to the Django developers."
)
class ChunkIter(object):
"""
An iterable that will yield chunks of data. Given a file-like object as the
constructor, this object will yield chunks of read operations from that
object.
"""
def __init__(self, flo, chunk_size=64 * 1024):
self.flo = flo
self.chunk_size = chunk_size
def next(self):
try:
data = self.flo.read(self.chunk_size)
except InputStreamExhausted:
raise StopIteration()
if data:
return data
else:
raise StopIteration()
def __iter__(self):
return self
class LimitBytes(object):
""" Limit bytes for a file object. """
def __init__(self, fileobject, length):
self._file = fileobject
self.remaining = length
def read(self, num_bytes=None):
"""
Read data from the underlying file.
If you ask for too much or there isn't anything left,
this will raise an InputStreamExhausted error.
"""
if self.remaining <= 0:
raise InputStreamExhausted()
if num_bytes is None:
num_bytes = self.remaining
else:
num_bytes = min(num_bytes, self.remaining)
self.remaining -= num_bytes
return self._file.read(num_bytes)
class InterBoundaryIter(object):
"""
A Producer that will iterate over boundaries.
"""
def __init__(self, stream, boundary):
self._stream = stream
self._boundary = boundary
def __iter__(self):
return self
def next(self):
try:
return LazyStream(BoundaryIter(self._stream, self._boundary))
except InputStreamExhausted:
raise StopIteration()
class BoundaryIter(object):
"""
A Producer that is sensitive to boundaries.
Will happily yield bytes until a boundary is found. Will yield the bytes
before the boundary, throw away the boundary bytes themselves, and push the
post-boundary bytes back on the stream.
The future calls to .next() after locating the boundary will raise a
StopIteration exception.
"""
def __init__(self, stream, boundary):
self._stream = stream
self._boundary = boundary
self._done = False
# rollback an additional six bytes because the format is like
# this: CRLF<boundary>[--CRLF]
self._rollback = len(boundary) + 6
# Try to use mx fast string search if available. Otherwise
# use Python find. Wrap the latter for consistency.
unused_char = self._stream.read(1)
if not unused_char:
raise InputStreamExhausted()
self._stream.unget(unused_char)
try:
from mx.TextTools import FS
self._fs = FS(boundary).find
except ImportError:
self._fs = lambda data: data.find(boundary)
def __iter__(self):
return self
def next(self):
if self._done:
raise StopIteration()
stream = self._stream
rollback = self._rollback
bytes_read = 0
chunks = []
for bytes in stream:
bytes_read += len(bytes)
chunks.append(bytes)
if bytes_read > rollback:
break
if not bytes:
break
else:
self._done = True
if not chunks:
raise StopIteration()
chunk = ''.join(chunks)
boundary = self._find_boundary(chunk, len(chunk) < self._rollback)
if boundary:
end, next = boundary
stream.unget(chunk[next:])
self._done = True
return chunk[:end]
else:
# make sure we dont treat a partial boundary (and
# its separators) as data
if not chunk[:-rollback]:# and len(chunk) >= (len(self._boundary) + 6):
# There's nothing left, we should just return and mark as done.
self._done = True
return chunk
else:
stream.unget(chunk[-rollback:])
return chunk[:-rollback]
def _find_boundary(self, data, eof = False):
"""
Finds a multipart boundary in data.
Should no boundry exist in the data None is returned instead. Otherwise
a tuple containing the indices of the following are returned:
* the end of current encapsulation
* the start of the next encapsulation
"""
index = self._fs(data)
if index < 0:
return None
else:
end = index
next = index + len(self._boundary)
# backup over CRLF
if data[max(0,end-1)] == '\n':
end -= 1
if data[max(0,end-1)] == '\r':
end -= 1
return end, next
def exhaust(stream_or_iterable):
"""
Completely exhausts an iterator or stream.
Raise a MultiPartParserError if the argument is not a stream or an iterable.
"""
iterator = None
try:
iterator = iter(stream_or_iterable)
except TypeError:
iterator = ChunkIter(stream_or_iterable, 16384)
if iterator is None:
raise MultiPartParserError('multipartparser.exhaust() was passed a non-iterable or stream parameter')
for __ in iterator:
pass
def parse_boundary_stream(stream, max_header_size):
"""
Parses one and exactly one stream that encapsulates a boundary.
"""
# Stream at beginning of header, look for end of header
# and parse it if found. The header must fit within one
# chunk.
chunk = stream.read(max_header_size)
# 'find' returns the top of these four bytes, so we'll
# need to munch them later to prevent them from polluting
# the payload.
header_end = chunk.find('\r\n\r\n')
def _parse_header(line):
main_value_pair, params = parse_header(line)
try:
name, value = main_value_pair.split(':', 1)
except:
raise ValueError("Invalid header: %r" % line)
return name, (value, params)
if header_end == -1:
# we find no header, so we just mark this fact and pass on
# the stream verbatim
stream.unget(chunk)
return (RAW, {}, stream)
header = chunk[:header_end]
# here we place any excess chunk back onto the stream, as
# well as throwing away the CRLFCRLF bytes from above.
stream.unget(chunk[header_end + 4:])
TYPE = RAW
outdict = {}
# Eliminate blank lines
for line in header.split('\r\n'):
# This terminology ("main value" and "dictionary of
# parameters") is from the Python docs.
try:
name, (value, params) = _parse_header(line)
except:
continue
if name == 'content-disposition':
TYPE = FIELD
if params.get('filename'):
TYPE = FILE
outdict[name] = value, params
if TYPE == RAW:
stream.unget(chunk)
return (TYPE, outdict, stream)
class Parser(object):
def __init__(self, stream, boundary):
self._stream = stream
self._separator = '--' + boundary
def __iter__(self):
boundarystream = InterBoundaryIter(self._stream, self._separator)
for sub_stream in boundarystream:
# Iterate over each part
yield parse_boundary_stream(sub_stream, 1024)
def parse_header(line):
""" Parse the header into a key-value. """
plist = _parse_header_params(';' + line)
key = plist.pop(0).lower()
pdict = {}
for p in plist:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i+1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
return key, pdict
def _parse_header_params(s):
plist = []
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and s.count('"', 0, end) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
plist.append(f.strip())
s = s[end:]
return plist
| bsd-3-clause |
JeyZeta/Dangerous | Dangerous/Golismero/thirdparty_libs/django/template/loaders/cached.py | 98 | 2592 | """
Wrapper class that takes a list of template loaders as an argument and attempts
to load templates from them in order, caching the result.
"""
import hashlib
from django.template.base import TemplateDoesNotExist
from django.template.loader import BaseLoader, get_template_from_string, find_template_loader, make_origin
from django.utils.encoding import force_bytes
class Loader(BaseLoader):
is_usable = True
def __init__(self, loaders):
self.template_cache = {}
self._loaders = loaders
self._cached_loaders = []
@property
def loaders(self):
# Resolve loaders on demand to avoid circular imports
if not self._cached_loaders:
# Set self._cached_loaders atomically. Otherwise, another thread
# could see an incomplete list. See #17303.
cached_loaders = []
for loader in self._loaders:
cached_loaders.append(find_template_loader(loader))
self._cached_loaders = cached_loaders
return self._cached_loaders
def find_template(self, name, dirs=None):
for loader in self.loaders:
try:
template, display_name = loader(name, dirs)
return (template, make_origin(display_name, loader, name, dirs))
except TemplateDoesNotExist:
pass
raise TemplateDoesNotExist(name)
def load_template(self, template_name, template_dirs=None):
key = template_name
if template_dirs:
# If template directories were specified, use a hash to differentiate
key = '-'.join([template_name, hashlib.sha1(force_bytes('|'.join(template_dirs))).hexdigest()])
if key not in self.template_cache:
template, origin = self.find_template(template_name, template_dirs)
if not hasattr(template, 'render'):
try:
template = get_template_from_string(template, origin, template_name)
except TemplateDoesNotExist:
# If compiling the template we found raises TemplateDoesNotExist,
# back off to returning the source and display name for the template
# we were asked to load. This allows for correct identification (later)
# of the actual template that does not exist.
return template, origin
self.template_cache[key] = template
return self.template_cache[key], None
def reset(self):
"Empty the template cache."
self.template_cache.clear()
| mit |
kontais/EFI-MIPS | ToolKit/cmds/python/Lib/test/bad/test_trace.py | 12 | 17884 | # Testing the line trace facility.
from test import test_support
import unittest
import sys
import difflib
# A very basic example. If this fails, we're in deep trouble.
def basic():
return 1
basic.events = [(0, 'call'),
(1, 'line'),
(1, 'return')]
# Armin Rigo's failing example:
def arigo_example():
x = 1
del x
while 0:
pass
x = 1
arigo_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(5, 'line'),
(5, 'return')]
# check that lines consisting of just one instruction get traced:
def one_instr_line():
x = 1
del x
x = 1
one_instr_line.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(3, 'return')]
def no_pop_tops(): # 0
x = 1 # 1
for a in range(2): # 2
if a: # 3
x = 1 # 4
else: # 5
x = 1 # 6
no_pop_tops.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(6, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(2, 'line'),
(2, 'return')]
def no_pop_blocks():
while 0:
bla
x = 1
no_pop_blocks.events = [(0, 'call'),
(1, 'line'),
(3, 'line'),
(3, 'return')]
def called(): # line -3
x = 1
def call(): # line 0
called()
call.events = [(0, 'call'),
(1, 'line'),
(-3, 'call'),
(-2, 'line'),
(-2, 'return'),
(1, 'return')]
def raises():
raise Exception
def test_raise():
try:
raises()
except Exception, exc:
x = 1
test_raise.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(-3, 'call'),
(-2, 'line'),
(-2, 'exception'),
(-2, 'return'),
(2, 'exception'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
def _settrace_and_return(tracefunc):
sys.settrace(tracefunc)
sys._getframe().f_back.f_trace = tracefunc
def settrace_and_return(tracefunc):
_settrace_and_return(tracefunc)
settrace_and_return.events = [(1, 'return')]
def _settrace_and_raise(tracefunc):
sys.settrace(tracefunc)
sys._getframe().f_back.f_trace = tracefunc
raise RuntimeError
def settrace_and_raise(tracefunc):
try:
_settrace_and_raise(tracefunc)
except RuntimeError, exc:
pass
settrace_and_raise.events = [(2, 'exception'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
# implicit return example
def ireturn_example():
a = 5
b = 5
if a == b:
b = a+1
else:
pass
ireturn_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(4, 'return')]
# Tight loop with while(1) example (SF #765624)
def tightloop_example():
items = range(0, 3)
try:
i = 0
while 1:
b = items[i]; i+=1
except IndexError:
pass
tightloop_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'line'),
(5, 'exception'),
(6, 'line'),
(7, 'line'),
(7, 'return')]
def tighterloop_example():
items = range(1, 4)
try:
i = 0
while 1: i = items[i]
except IndexError:
pass
tighterloop_example.events = [(0, 'call'),
(1, 'line'),
(2, 'line'),
(3, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'line'),
(4, 'exception'),
(5, 'line'),
(6, 'line'),
(6, 'return')]
class Tracer:
def __init__(self):
self.events = []
def trace(self, frame, event, arg):
self.events.append((frame.f_lineno, event))
return self.trace
class TraceTestCase(unittest.TestCase):
def compare_events(self, line_offset, events, expected_events):
events = [(l - line_offset, e) for (l, e) in events]
if events != expected_events:
self.fail(
"events did not match expectation:\n" +
"\n".join(difflib.ndiff(map(str, expected_events),
map(str, events))))
def run_test(self, func):
tracer = Tracer()
sys.settrace(tracer.trace)
func()
sys.settrace(None)
self.compare_events(func.func_code.co_firstlineno,
tracer.events, func.events)
def run_test2(self, func):
tracer = Tracer()
func(tracer.trace)
sys.settrace(None)
self.compare_events(func.func_code.co_firstlineno,
tracer.events, func.events)
def test_01_basic(self):
self.run_test(basic)
def test_02_arigo(self):
self.run_test(arigo_example)
def test_03_one_instr(self):
self.run_test(one_instr_line)
def test_04_no_pop_blocks(self):
self.run_test(no_pop_blocks)
def test_05_no_pop_tops(self):
self.run_test(no_pop_tops)
def test_06_call(self):
self.run_test(call)
def test_07_raise(self):
self.run_test(test_raise)
def test_08_settrace_and_return(self):
self.run_test2(settrace_and_return)
def test_09_settrace_and_raise(self):
self.run_test2(settrace_and_raise)
def test_10_ireturn(self):
self.run_test(ireturn_example)
def test_11_tightloop(self):
self.run_test(tightloop_example)
def test_12_tighterloop(self):
self.run_test(tighterloop_example)
class RaisingTraceFuncTestCase(unittest.TestCase):
def trace(self, frame, event, arg):
"""A trace function that raises an exception in response to a
specific trace event."""
if event == self.raiseOnEvent:
raise ValueError # just something that isn't RuntimeError
else:
return self.trace
def f(self):
"""The function to trace; raises an exception if that's the case
we're testing, so that the 'exception' trace event fires."""
if self.raiseOnEvent == 'exception':
x = 0
y = 1/x
else:
return 1
def run_test_for_event(self, event):
"""Tests that an exception raised in response to the given event is
handled OK."""
self.raiseOnEvent = event
try:
for i in xrange(sys.getrecursionlimit() + 1):
sys.settrace(self.trace)
try:
self.f()
except ValueError:
pass
else:
self.fail("exception not thrown!")
except RuntimeError:
self.fail("recursion counter not reset")
# Test the handling of exceptions raised by each kind of trace event.
def test_call(self):
self.run_test_for_event('call')
def test_line(self):
self.run_test_for_event('line')
def test_return(self):
self.run_test_for_event('return')
def test_exception(self):
self.run_test_for_event('exception')
def test_trash_stack(self):
def f():
for i in range(5):
print i # line tracing will raise an exception at this line
def g(frame, why, extra):
if (why == 'line' and
frame.f_lineno == f.func_code.co_firstlineno + 2):
raise RuntimeError, "i am crashing"
return g
sys.settrace(g)
try:
f()
except RuntimeError:
# the test is really that this doesn't segfault:
import gc
gc.collect()
else:
self.fail("exception not propagated")
# 'Jump' tests: assigning to frame.f_lineno within a trace function
# moves the execution position - it's how debuggers implement a Jump
# command (aka. "Set next statement").
class JumpTracer:
"""Defines a trace function that jumps from one place to another,
with the source and destination lines of the jump being defined by
the 'jump' property of the function under test."""
def __init__(self, function):
self.function = function
self.jumpFrom = function.jump[0]
self.jumpTo = function.jump[1]
self.done = False
def trace(self, frame, event, arg):
if not self.done and frame.f_code == self.function.func_code:
firstLine = frame.f_code.co_firstlineno
if frame.f_lineno == firstLine + self.jumpFrom:
# Cope with non-integer self.jumpTo (because of
# no_jump_to_non_integers below).
try:
frame.f_lineno = firstLine + self.jumpTo
except TypeError:
frame.f_lineno = self.jumpTo
self.done = True
return self.trace
# The first set of 'jump' tests are for things that are allowed:
def jump_simple_forwards(output):
output.append(1)
output.append(2)
output.append(3)
jump_simple_forwards.jump = (1, 3)
jump_simple_forwards.output = [3]
def jump_simple_backwards(output):
output.append(1)
output.append(2)
jump_simple_backwards.jump = (2, 1)
jump_simple_backwards.output = [1, 1, 2]
def jump_out_of_block_forwards(output):
for i in 1, 2:
output.append(2)
for j in [3]: # Also tests jumping over a block
output.append(4)
output.append(5)
jump_out_of_block_forwards.jump = (3, 5)
jump_out_of_block_forwards.output = [2, 5]
def jump_out_of_block_backwards(output):
output.append(1)
for i in [1]:
output.append(3)
for j in [2]: # Also tests jumping over a block
output.append(5)
output.append(6)
output.append(7)
jump_out_of_block_backwards.jump = (6, 1)
jump_out_of_block_backwards.output = [1, 3, 5, 1, 3, 5, 6, 7]
def jump_to_codeless_line(output):
output.append(1)
# Jumping to this line should skip to the next one.
output.append(3)
jump_to_codeless_line.jump = (1, 2)
jump_to_codeless_line.output = [3]
def jump_to_same_line(output):
output.append(1)
output.append(2)
output.append(3)
jump_to_same_line.jump = (2, 2)
jump_to_same_line.output = [1, 2, 3]
# Tests jumping within a finally block, and over one.
def jump_in_nested_finally(output):
try:
output.append(2)
finally:
output.append(4)
try:
output.append(6)
finally:
output.append(8)
output.append(9)
jump_in_nested_finally.jump = (4, 9)
jump_in_nested_finally.output = [2, 9]
# The second set of 'jump' tests are for things that are not allowed:
def no_jump_too_far_forwards(output):
try:
output.append(2)
output.append(3)
except ValueError, e:
output.append('after' in str(e))
no_jump_too_far_forwards.jump = (3, 6)
no_jump_too_far_forwards.output = [2, True]
def no_jump_too_far_backwards(output):
try:
output.append(2)
output.append(3)
except ValueError, e:
output.append('before' in str(e))
no_jump_too_far_backwards.jump = (3, -1)
no_jump_too_far_backwards.output = [2, True]
# Test each kind of 'except' line.
def no_jump_to_except_1(output):
try:
output.append(2)
except:
e = sys.exc_info()[1]
output.append('except' in str(e))
no_jump_to_except_1.jump = (2, 3)
no_jump_to_except_1.output = [True]
def no_jump_to_except_2(output):
try:
output.append(2)
except ValueError:
e = sys.exc_info()[1]
output.append('except' in str(e))
no_jump_to_except_2.jump = (2, 3)
no_jump_to_except_2.output = [True]
def no_jump_to_except_3(output):
try:
output.append(2)
except ValueError, e:
output.append('except' in str(e))
no_jump_to_except_3.jump = (2, 3)
no_jump_to_except_3.output = [True]
def no_jump_to_except_4(output):
try:
output.append(2)
except (ValueError, RuntimeError), e:
output.append('except' in str(e))
no_jump_to_except_4.jump = (2, 3)
no_jump_to_except_4.output = [True]
def no_jump_forwards_into_block(output):
try:
output.append(2)
for i in 1, 2:
output.append(4)
except ValueError, e:
output.append('into' in str(e))
no_jump_forwards_into_block.jump = (2, 4)
no_jump_forwards_into_block.output = [True]
def no_jump_backwards_into_block(output):
try:
for i in 1, 2:
output.append(3)
output.append(4)
except ValueError, e:
output.append('into' in str(e))
no_jump_backwards_into_block.jump = (4, 3)
no_jump_backwards_into_block.output = [3, 3, True]
def no_jump_into_finally_block(output):
try:
try:
output.append(3)
x = 1
finally:
output.append(6)
except ValueError, e:
output.append('finally' in str(e))
no_jump_into_finally_block.jump = (4, 6)
no_jump_into_finally_block.output = [3, 6, True] # The 'finally' still runs
def no_jump_out_of_finally_block(output):
try:
try:
output.append(3)
finally:
output.append(5)
output.append(6)
except ValueError, e:
output.append('finally' in str(e))
no_jump_out_of_finally_block.jump = (5, 1)
no_jump_out_of_finally_block.output = [3, True]
# This verifies the line-numbers-must-be-integers rule.
def no_jump_to_non_integers(output):
try:
output.append(2)
except ValueError, e:
output.append('integer' in str(e))
no_jump_to_non_integers.jump = (2, "Spam")
no_jump_to_non_integers.output = [True]
# This verifies that you can't set f_lineno via _getframe or similar
# trickery.
def no_jump_without_trace_function():
try:
previous_frame = sys._getframe().f_back
previous_frame.f_lineno = previous_frame.f_lineno
except ValueError, e:
# This is the exception we wanted; make sure the error message
# talks about trace functions.
if 'trace' not in str(e):
raise
else:
# Something's wrong - the expected exception wasn't raised.
raise RuntimeError, "Trace-function-less jump failed to fail"
class JumpTestCase(unittest.TestCase):
def compare_jump_output(self, expected, received):
if received != expected:
self.fail( "Outputs don't match:\n" +
"Expected: " + repr(expected) + "\n" +
"Received: " + repr(received))
def run_test(self, func):
tracer = JumpTracer(func)
sys.settrace(tracer.trace)
output = []
func(output)
sys.settrace(None)
self.compare_jump_output(func.output, output)
def test_01_jump_simple_forwards(self):
self.run_test(jump_simple_forwards)
def test_02_jump_simple_backwards(self):
self.run_test(jump_simple_backwards)
def test_03_jump_out_of_block_forwards(self):
self.run_test(jump_out_of_block_forwards)
def test_04_jump_out_of_block_backwards(self):
self.run_test(jump_out_of_block_backwards)
def test_05_jump_to_codeless_line(self):
self.run_test(jump_to_codeless_line)
def test_06_jump_to_same_line(self):
self.run_test(jump_to_same_line)
def test_07_jump_in_nested_finally(self):
self.run_test(jump_in_nested_finally)
def test_08_no_jump_too_far_forwards(self):
self.run_test(no_jump_too_far_forwards)
def test_09_no_jump_too_far_backwards(self):
self.run_test(no_jump_too_far_backwards)
def test_10_no_jump_to_except_1(self):
self.run_test(no_jump_to_except_1)
def test_11_no_jump_to_except_2(self):
self.run_test(no_jump_to_except_2)
def test_12_no_jump_to_except_3(self):
self.run_test(no_jump_to_except_3)
def test_13_no_jump_to_except_4(self):
self.run_test(no_jump_to_except_4)
def test_14_no_jump_forwards_into_block(self):
self.run_test(no_jump_forwards_into_block)
def test_15_no_jump_backwards_into_block(self):
self.run_test(no_jump_backwards_into_block)
def test_16_no_jump_into_finally_block(self):
self.run_test(no_jump_into_finally_block)
def test_17_no_jump_out_of_finally_block(self):
self.run_test(no_jump_out_of_finally_block)
def test_18_no_jump_to_non_integers(self):
self.run_test(no_jump_to_non_integers)
def test_19_no_jump_without_trace_function(self):
no_jump_without_trace_function()
def test_main():
test_support.run_unittest(
TraceTestCase,
RaisingTraceFuncTestCase,
JumpTestCase
)
if __name__ == "__main__":
test_main()
| bsd-3-clause |
m1trix/Tetris-Wars | tetris_wars/sdl2/audio.py | 1 | 7936 | import sys
from ctypes import Structure, c_int, c_char_p, c_double, c_void_p, CFUNCTYPE, \
POINTER
from .dll import _bind
from .endian import SDL_BYTEORDER, SDL_LIL_ENDIAN
from .stdinc import Uint8, Uint16, Uint32
from .rwops import SDL_RWops, SDL_RWFromFile
__all__ = ["SDL_AudioFormat", "SDL_AUDIO_MASK_BITSIZE",
"SDL_AUDIO_MASK_DATATYPE", "SDL_AUDIO_MASK_ENDIAN",
"SDL_AUDIO_MASK_SIGNED", "SDL_AUDIO_BITSIZE", "SDL_AUDIO_ISFLOAT",
"SDL_AUDIO_ISBIGENDIAN", "SDL_AUDIO_ISSIGNED", "SDL_AUDIO_ISINT",
"SDL_AUDIO_ISLITTLEENDIAN", "SDL_AUDIO_ISUNSIGNED", "AUDIO_U8",
"AUDIO_S8", "AUDIO_U16LSB", "AUDIO_S16LSB", "AUDIO_U16MSB",
"AUDIO_S16MSB", "AUDIO_U16", "AUDIO_S16", "AUDIO_S32LSB",
"AUDIO_S32MSB", "AUDIO_S32", "AUDIO_F32LSB", "AUDIO_S32MSB",
"AUDIO_F32", "AUDIO_U16SYS", "AUDIO_S16SYS", "AUDIO_S32SYS", "AUDIO_FORMATS",
"AUDIO_F32SYS", "SDL_AUDIO_ALLOW_FREQUENCY_CHANGE",
"SDL_AUDIO_ALLOW_FORMAT_CHANGE", "SDL_AUDIO_ALLOW_CHANNELS_CHANGE",
"SDL_AUDIO_ALLOW_ANY_CHANGE", "SDL_AudioCallback", "SDL_AudioSpec",
"SDL_AudioCVT", "SDL_AudioFilter", "SDL_GetNumAudioDrivers",
"SDL_GetAudioDriver", "SDL_AudioInit", "SDL_AudioQuit",
"SDL_GetCurrentAudioDriver", "SDL_OpenAudio", "SDL_AudioDeviceID",
"SDL_GetNumAudioDevices", "SDL_GetAudioDeviceName",
"SDL_OpenAudioDevice", "SDL_AUDIO_STOPPED", "SDL_AUDIO_PLAYING",
"SDL_AUDIO_PAUSED", "SDL_AudioStatus", "SDL_GetAudioStatus",
"SDL_GetAudioDeviceStatus", "SDL_PauseAudio", "SDL_PauseAudioDevice",
"SDL_LoadWAV_RW", "SDL_LoadWAV", "SDL_FreeWAV", "SDL_BuildAudioCVT",
"SDL_ConvertAudio", "SDL_MIX_MAXVOLUME", "SDL_MixAudio",
"SDL_MixAudioFormat", "SDL_LockAudio", "SDL_LockAudioDevice",
"SDL_UnlockAudio", "SDL_UnlockAudioDevice", "SDL_CloseAudio",
"SDL_CloseAudioDevice"
]
SDL_AudioFormat = Uint16
SDL_AUDIO_MASK_BITSIZE = 0xFF
SDL_AUDIO_MASK_DATATYPE = 1 << 8
SDL_AUDIO_MASK_ENDIAN = 1 << 12
SDL_AUDIO_MASK_SIGNED = 1 << 15
SDL_AUDIO_BITSIZE = lambda x: (x & SDL_AUDIO_MASK_BITSIZE)
SDL_AUDIO_ISFLOAT = lambda x: (x & SDL_AUDIO_MASK_DATATYPE)
SDL_AUDIO_ISBIGENDIAN = lambda x: (x & SDL_AUDIO_MASK_ENDIAN)
SDL_AUDIO_ISSIGNED = lambda x: (x & SDL_AUDIO_MASK_SIGNED)
SDL_AUDIO_ISINT = lambda x: (not SDL_AUDIO_ISFLOAT(x))
SDL_AUDIO_ISLITTLEENDIAN = lambda x: (not SDL_AUDIO_ISBIGENDIAN(x))
SDL_AUDIO_ISUNSIGNED = lambda x: (not SDL_AUDIO_ISSIGNED(x))
AUDIO_U8 = 0x0008
AUDIO_S8 = 0x8008
AUDIO_U16LSB = 0x0010
AUDIO_S16LSB = 0x8010
AUDIO_U16MSB = 0x1010
AUDIO_S16MSB = 0x9010
AUDIO_U16 = AUDIO_U16LSB
AUDIO_S16 = AUDIO_S16LSB
AUDIO_S32LSB = 0x8020
AUDIO_S32MSB = 0x9020
AUDIO_S32 = AUDIO_S32LSB
AUDIO_F32LSB = 0x8120
AUDIO_F32MSB = 0x9120
AUDIO_F32 = AUDIO_F32LSB
# All of the audio formats should be in this set which is provided as a
# convenience to the end user for purposes of iteration and validation.
# (is the provided audio format in the supported set?)
AUDIO_FORMATS = set([AUDIO_U8, AUDIO_S8, AUDIO_U16LSB, AUDIO_S16LSB,
AUDIO_U16MSB, AUDIO_S16MSB, AUDIO_U16, AUDIO_S16,
AUDIO_S32LSB, AUDIO_S32MSB, AUDIO_S32, AUDIO_F32LSB,
AUDIO_F32MSB, AUDIO_F32])
if SDL_BYTEORDER == SDL_LIL_ENDIAN:
AUDIO_U16SYS = AUDIO_U16LSB
AUDIO_S16SYS = AUDIO_S16LSB
AUDIO_S32SYS = AUDIO_S32LSB
AUDIO_F32SYS = AUDIO_F32LSB
else:
AUDIO_U16SYS = AUDIO_U16MSB
AUDIO_S16SYS = AUDIO_S16MSB
AUDIO_S32SYS = AUDIO_S32MSB
AUDIO_F32SYS = AUDIO_F32MSB
SDL_AUDIO_ALLOW_FREQUENCY_CHANGE = 0x00000001
SDL_AUDIO_ALLOW_FORMAT_CHANGE = 0x00000002
SDL_AUDIO_ALLOW_CHANNELS_CHANGE = 0x00000004
SDL_AUDIO_ALLOW_ANY_CHANGE = (SDL_AUDIO_ALLOW_FREQUENCY_CHANGE |
SDL_AUDIO_ALLOW_FORMAT_CHANGE |
SDL_AUDIO_ALLOW_CHANNELS_CHANGE)
SDL_AudioCallback = CFUNCTYPE(None, c_void_p, POINTER(Uint8), c_int)
class SDL_AudioSpec(Structure):
_fields_ = [("freq", c_int),
("format", SDL_AudioFormat),
("channels", Uint8),
("silence", Uint8),
("samples", Uint16),
("padding", Uint16),
("size", Uint32),
("callback", SDL_AudioCallback),
("userdata", c_void_p)
]
def __init__(self, freq, aformat, channels, samples,
callback=SDL_AudioCallback(), userdata=c_void_p(0)):
super(SDL_AudioSpec, self).__init__()
self.freq = freq
self.format = aformat
self.channels = channels
self.samples = samples
self.callback = callback
self.userdata = userdata
class SDL_AudioCVT(Structure):
pass
SDL_AudioFilter = CFUNCTYPE(POINTER(SDL_AudioCVT), SDL_AudioFormat)
# HACK: hack for an IronPython 2.7.2.1+ issue:
# ptrarray = (CFUNCTYPE() * int)
# is not supported properly
if sys.platform == "cli":
_X_SDL_AudioFilter = POINTER(SDL_AudioFilter)
else:
_X_SDL_AudioFilter = SDL_AudioFilter
SDL_AudioCVT._fields_ = [("needed", c_int),
("src_format", SDL_AudioFormat),
("dst_format", SDL_AudioFormat),
("rate_incr", c_double),
("buf", POINTER(Uint8)),
("len", c_int),
("len_cvt", c_int),
("len_mult", c_int),
("len_ratio", c_double),
("filters", (_X_SDL_AudioFilter * 10)),
("filter_index", c_int)
]
SDL_GetNumAudioDrivers = _bind("SDL_GetNumAudioDrivers", None, c_int)
SDL_GetAudioDriver = _bind("SDL_GetAudioDriver", [c_int], c_char_p)
SDL_AudioInit = _bind("SDL_AudioInit", [c_char_p], c_int)
SDL_AudioQuit = _bind("SDL_AudioQuit")
SDL_GetCurrentAudioDriver = _bind("SDL_GetCurrentAudioDriver", None, c_char_p)
SDL_OpenAudio = _bind("SDL_OpenAudio", [POINTER(SDL_AudioSpec), POINTER(SDL_AudioSpec)], c_int)
SDL_AudioDeviceID = Uint32
SDL_GetNumAudioDevices = _bind("SDL_GetNumAudioDevices", [c_int], c_int)
SDL_GetAudioDeviceName = _bind("SDL_GetAudioDeviceName", [c_int, c_int], c_char_p)
SDL_OpenAudioDevice = _bind("SDL_OpenAudioDevice", [c_char_p, c_int, POINTER(SDL_AudioSpec), POINTER(SDL_AudioSpec), c_int], SDL_AudioDeviceID)
SDL_AUDIO_STOPPED = 0
SDL_AUDIO_PLAYING = 1
SDL_AUDIO_PAUSED = 2
SDL_AudioStatus = c_int
SDL_GetAudioStatus = _bind("SDL_GetAudioStatus", None, SDL_AudioStatus)
SDL_GetAudioDeviceStatus = _bind("SDL_GetAudioDeviceStatus", [SDL_AudioDeviceID], SDL_AudioStatus)
SDL_PauseAudio = _bind("SDL_PauseAudio", [c_int])
SDL_PauseAudioDevice = _bind("SDL_PauseAudioDevice", [SDL_AudioDeviceID, c_int])
SDL_LoadWAV_RW = _bind("SDL_LoadWAV_RW", [POINTER(SDL_RWops), c_int, POINTER(SDL_AudioSpec), POINTER(POINTER(Uint8)), POINTER(Uint32)], POINTER(SDL_AudioSpec))
SDL_LoadWAV = lambda f, s, ab, al: SDL_LoadWAV_RW(SDL_RWFromFile(f, b"rb"), 1, s, ab , al)
SDL_FreeWAV = _bind("SDL_FreeWAV", [POINTER(Uint8)])
SDL_BuildAudioCVT = _bind("SDL_BuildAudioCVT", [POINTER(SDL_AudioCVT), SDL_AudioFormat, Uint8, c_int, SDL_AudioFormat, Uint8, c_int], c_int)
SDL_ConvertAudio = _bind("SDL_ConvertAudio", [POINTER(SDL_AudioCVT)], c_int)
SDL_MIX_MAXVOLUME = 128
SDL_MixAudio = _bind("SDL_MixAudio", [POINTER(Uint8), POINTER(Uint8), Uint32, c_int])
SDL_MixAudioFormat = _bind("SDL_MixAudioFormat", [POINTER(Uint8), POINTER(Uint8), SDL_AudioFormat, Uint32, c_int])
SDL_LockAudio = _bind("SDL_LockAudio")
SDL_LockAudioDevice = _bind("SDL_LockAudioDevice", [SDL_AudioDeviceID])
SDL_UnlockAudio = _bind("SDL_UnlockAudio")
SDL_UnlockAudioDevice = _bind("SDL_UnlockAudioDevice", [SDL_AudioDeviceID])
SDL_CloseAudio = _bind("SDL_CloseAudio")
SDL_CloseAudioDevice = _bind("SDL_CloseAudioDevice", [SDL_AudioDeviceID])
| gpl-2.0 |
asnir/airflow | airflow/operators/s3_to_hive_operator.py | 7 | 10569 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import next
from builtins import zip
import logging
from tempfile import NamedTemporaryFile
from airflow.utils.file import TemporaryDirectory
import gzip
import bz2
import tempfile
import os
from airflow.exceptions import AirflowException
from airflow.hooks.S3_hook import S3Hook
from airflow.hooks.hive_hooks import HiveCliHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.utils.compression import uncompress_file
class S3ToHiveTransfer(BaseOperator):
"""
Moves data from S3 to Hive. The operator downloads a file from S3,
stores the file locally before loading it into a Hive table.
If the ``create`` or ``recreate`` arguments are set to ``True``,
a ``CREATE TABLE`` and ``DROP TABLE`` statements are generated.
Hive data types are inferred from the cursor's metadata from.
Note that the table generated in Hive uses ``STORED AS textfile``
which isn't the most efficient serialization format. If a
large amount of data is loaded and/or if the tables gets
queried considerably, you may want to use this operator only to
stage the data into a temporary table before loading it into its
final destination using a ``HiveOperator``.
:param s3_key: The key to be retrieved from S3
:type s3_key: str
:param field_dict: A dictionary of the fields name in the file
as keys and their Hive types as values
:type field_dict: dict
:param hive_table: target Hive table, use dot notation to target a
specific database
:type hive_table: str
:param create: whether to create the table if it doesn't exist
:type create: bool
:param recreate: whether to drop and recreate the table at every
execution
:type recreate: bool
:param partition: target partition as a dict of partition columns
and values
:type partition: dict
:param headers: whether the file contains column names on the first
line
:type headers: bool
:param check_headers: whether the column names on the first line should be
checked against the keys of field_dict
:type check_headers: bool
:param wildcard_match: whether the s3_key should be interpreted as a Unix
wildcard pattern
:type wildcard_match: bool
:param delimiter: field delimiter in the file
:type delimiter: str
:param s3_conn_id: source s3 connection
:type s3_conn_id: str
:param hive_cli_conn_id: destination hive connection
:type hive_cli_conn_id: str
:param input_compressed: Boolean to determine if file decompression is
required to process headers
:type input_compressed: bool
:param tblproperties: TBLPROPERTIES of the hive table being created
:type tblproperties: dict
"""
template_fields = ('s3_key', 'partition', 'hive_table')
template_ext = ()
ui_color = '#a0e08c'
@apply_defaults
def __init__(
self,
s3_key,
field_dict,
hive_table,
delimiter=',',
create=True,
recreate=False,
partition=None,
headers=False,
check_headers=False,
wildcard_match=False,
s3_conn_id='s3_default',
hive_cli_conn_id='hive_cli_default',
input_compressed=False,
tblproperties=None,
*args, **kwargs):
super(S3ToHiveTransfer, self).__init__(*args, **kwargs)
self.s3_key = s3_key
self.field_dict = field_dict
self.hive_table = hive_table
self.delimiter = delimiter
self.create = create
self.recreate = recreate
self.partition = partition
self.headers = headers
self.check_headers = check_headers
self.wildcard_match = wildcard_match
self.hive_cli_conn_id = hive_cli_conn_id
self.s3_conn_id = s3_conn_id
self.input_compressed = input_compressed
self.tblproperties = tblproperties
if (self.check_headers and
not (self.field_dict is not None and self.headers)):
raise AirflowException("To check_headers provide " +
"field_dict and headers")
def execute(self, context):
# Downloading file from S3
self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
self.hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
logging.info("Downloading S3 file")
if self.wildcard_match:
if not self.s3.check_for_wildcard_key(self.s3_key):
raise AirflowException("No key matches {0}"
.format(self.s3_key))
s3_key_object = self.s3.get_wildcard_key(self.s3_key)
else:
if not self.s3.check_for_key(self.s3_key):
raise AirflowException(
"The key {0} does not exists".format(self.s3_key))
s3_key_object = self.s3.get_key(self.s3_key)
root, file_ext = os.path.splitext(s3_key_object.key)
with TemporaryDirectory(prefix='tmps32hive_') as tmp_dir,\
NamedTemporaryFile(mode="w",
dir=tmp_dir,
suffix=file_ext) as f:
logging.info("Dumping S3 key {0} contents to local"
" file {1}".format(s3_key_object.key, f.name))
s3_key_object.get_contents_to_file(f)
f.flush()
self.s3.connection.close()
if not self.headers:
logging.info("Loading file {0} into Hive".format(f.name))
self.hive.load_file(
f.name,
self.hive_table,
field_dict=self.field_dict,
create=self.create,
partition=self.partition,
delimiter=self.delimiter,
recreate=self.recreate,
tblproperties=self.tblproperties)
else:
# Decompressing file
if self.input_compressed:
logging.info("Uncompressing file {0}".format(f.name))
fn_uncompressed = uncompress_file(f.name,
file_ext,
tmp_dir)
logging.info("Uncompressed to {0}".format(fn_uncompressed))
# uncompressed file available now so deleting
# compressed file to save disk space
f.close()
else:
fn_uncompressed = f.name
# Testing if header matches field_dict
if self.check_headers:
logging.info("Matching file header against field_dict")
header_list = self._get_top_row_as_list(fn_uncompressed)
if not self._match_headers(header_list):
raise AirflowException("Header check failed")
# Deleting top header row
logging.info("Removing header from file {0}".
format(fn_uncompressed))
headless_file = (
self._delete_top_row_and_compress(fn_uncompressed,
file_ext,
tmp_dir))
logging.info("Headless file {0}".format(headless_file))
logging.info("Loading file {0} into Hive".format(headless_file))
self.hive.load_file(headless_file,
self.hive_table,
field_dict=self.field_dict,
create=self.create,
partition=self.partition,
delimiter=self.delimiter,
recreate=self.recreate,
tblproperties=self.tblproperties)
def _get_top_row_as_list(self, file_name):
with open(file_name, 'rt') as f:
header_line = f.readline().strip()
header_list = header_line.split(self.delimiter)
return header_list
def _match_headers(self, header_list):
if not header_list:
raise AirflowException("Unable to retrieve header row from file")
field_names = self.field_dict.keys()
if len(field_names) != len(header_list):
logging.warning("Headers count mismatch"
"File headers:\n {header_list}\n"
"Field names: \n {field_names}\n"
"".format(**locals()))
return False
test_field_match = [h1.lower() == h2.lower()
for h1, h2 in zip(header_list, field_names)]
if not all(test_field_match):
logging.warning("Headers do not match field names"
"File headers:\n {header_list}\n"
"Field names: \n {field_names}\n"
"".format(**locals()))
return False
else:
return True
def _delete_top_row_and_compress(
self,
input_file_name,
output_file_ext,
dest_dir):
# When output_file_ext is not defined, file is not compressed
open_fn = open
if output_file_ext.lower() == '.gz':
open_fn = gzip.GzipFile
elif output_file_ext.lower() == '.bz2':
open_fn = bz2.BZ2File
os_fh_output, fn_output = \
tempfile.mkstemp(suffix=output_file_ext, dir=dest_dir)
with open(input_file_name, 'rb') as f_in,\
open_fn(fn_output, 'wb') as f_out:
f_in.seek(0)
next(f_in)
for line in f_in:
f_out.write(line)
return fn_output
| apache-2.0 |
jimi-c/ansible | lib/ansible/modules/cloud/google/gce.py | 43 | 27529 | #!/usr/bin/python
# Copyright 2013 Google Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gce
version_added: "1.4"
short_description: create or terminate GCE instances
description:
- Creates or terminates Google Compute Engine (GCE) instances. See
U(https://cloud.google.com/compute) for an overview.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
image:
description:
- image string to use for the instance (default will follow latest
stable debian image)
default: "debian-8"
image_family:
description:
- image family from which to select the image. The most recent
non-deprecated image in the family will be used.
version_added: "2.4"
external_projects:
description:
- A list of other projects (accessible with the provisioning credentials)
to be searched for the image.
version_added: "2.4"
instance_names:
description:
- a comma-separated list of instance names to create or destroy
machine_type:
description:
- machine type to use for the instance, use 'n1-standard-1' by default
default: "n1-standard-1"
metadata:
description:
- a hash/dictionary of custom data for the instance;
'{"key":"value", ...}'
service_account_email:
version_added: "1.5.1"
description:
- service account email
service_account_permissions:
version_added: "2.0"
description:
- service account permissions (see
U(https://cloud.google.com/sdk/gcloud/reference/compute/instances/create),
--scopes section for detailed information)
choices: [
"bigquery", "cloud-platform", "compute-ro", "compute-rw",
"useraccounts-ro", "useraccounts-rw", "datastore", "logging-write",
"monitoring", "sql-admin", "storage-full", "storage-ro",
"storage-rw", "taskqueue", "userinfo-email"
]
pem_file:
version_added: "1.5.1"
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
credentials_file:
version_added: "2.1.0"
description:
- path to the JSON file associated with the service account email
project_id:
version_added: "1.5.1"
description:
- your GCE project ID
name:
description:
- either a name of a single instance or when used with 'num_instances',
the base name of a cluster of nodes
aliases: ['base_name']
num_instances:
description:
- can be used with 'name', specifies
the number of nodes to provision using 'name'
as a base name
version_added: "2.3"
network:
description:
- name of the network, 'default' will be used if not specified
default: "default"
subnetwork:
description:
- name of the subnetwork in which the instance should be created
version_added: "2.2"
persistent_boot_disk:
description:
- if set, create the instance with a persistent boot disk
type: bool
default: 'no'
disks:
description:
- a list of persistent disks to attach to the instance; a string value
gives the name of the disk; alternatively, a dictionary value can
define 'name' and 'mode' ('READ_ONLY' or 'READ_WRITE'). The first entry
will be the boot disk (which must be READ_WRITE).
version_added: "1.7"
state:
description:
- desired state of the resource
default: "present"
choices: ["active", "present", "absent", "deleted", "started", "stopped", "terminated"]
tags:
description:
- a comma-separated list of tags to associate with the instance
zone:
description:
- the GCE zone to use. The list of available zones is at U(https://cloud.google.com/compute/docs/regions-zones/regions-zones#available).
required: true
default: "us-central1-a"
ip_forward:
version_added: "1.9"
description:
- set to C(yes) if the instance can forward ip packets (useful for
gateways)
type: bool
default: 'no'
external_ip:
version_added: "1.9"
description:
- type of external ip, ephemeral by default; alternatively, a fixed gce ip or ip name can be given. Specify 'none' if no external ip is desired.
default: "ephemeral"
disk_auto_delete:
version_added: "1.9"
description:
- if set boot disk will be removed after instance destruction
type: bool
default: 'yes'
preemptible:
version_added: "2.1"
description:
- if set to C(yes), instances will be preemptible and time-limited.
(requires libcloud >= 0.20.0)
type: bool
default: 'no'
disk_size:
description:
- The size of the boot disk created for this instance (in GB)
default: 10
version_added: "2.3"
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials,
>= 0.20.0 if using preemptible option"
notes:
- Either I(instance_names) or I(name) is required.
- JSON credentials strongly preferred.
author: "Eric Johnson (@erjohnso) <erjohnso@google.com>, Tom Melendez (@supertom) <supertom@google.com>"
'''
EXAMPLES = '''
# Basic provisioning example. Create a single Debian 8 instance in the
# us-central1-a Zone of the n1-standard-1 machine type.
# Create multiple instances by specifying multiple names, separated by
# commas in the instance_names field
# (e.g. my-test-instance1,my-test-instance2)
- gce:
instance_names: my-test-instance1
zone: us-central1-a
machine_type: n1-standard-1
image: debian-8
state: present
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
disk_size: 32
# Create a single instance of an image from the "my-base-image" image family
# in the us-central1-a Zone of the n1-standard-1 machine type.
# This image family is in the "my-other-project" GCP project.
- gce:
instance_names: my-test-instance1
zone: us-central1-a
machine_type: n1-standard-1
image_family: my-base-image
external_projects:
- my-other-project
state: present
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
disk_size: 32
# Create a single Debian 8 instance in the us-central1-a Zone
# Use existing disks, custom network/subnetwork, set service account permissions
# add tags and metadata.
- gce:
instance_names: my-test-instance
zone: us-central1-a
machine_type: n1-standard-1
state: present
metadata: '{"db":"postgres", "group":"qa", "id":500}'
tags:
- http-server
- my-other-tag
disks:
- name: disk-2
mode: READ_WRITE
- name: disk-3
mode: READ_ONLY
disk_auto_delete: false
network: foobar-network
subnetwork: foobar-subnetwork-1
preemptible: true
ip_forward: true
service_account_permissions:
- storage-full
- taskqueue
- bigquery
- https://www.googleapis.com/auth/ndev.clouddns.readwrite
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
---
# Example Playbook
- name: Compute Engine Instance Examples
hosts: localhost
vars:
service_account_email: "your-sa@your-project-name.iam.gserviceaccount.com"
credentials_file: "/path/to/your-key.json"
project_id: "your-project-name"
tasks:
- name: create multiple instances
# Basic provisioning example. Create multiple Debian 8 instances in the
# us-central1-a Zone of n1-standard-1 machine type.
gce:
instance_names: test1,test2,test3
zone: us-central1-a
machine_type: n1-standard-1
image: debian-8
state: present
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
metadata : '{ "startup-script" : "apt-get update" }'
register: gce
- name: Save host data
add_host:
hostname: "{{ item.public_ip }}"
groupname: gce_instances_ips
with_items: "{{ gce.instance_data }}"
- name: Wait for SSH for instances
wait_for:
delay: 1
host: "{{ item.public_ip }}"
port: 22
state: started
timeout: 30
with_items: "{{ gce.instance_data }}"
- name: Configure Hosts
hosts: gce_instances_ips
become: yes
become_method: sudo
roles:
- my-role-one
- my-role-two
tags:
- config
- name: delete test-instances
# Basic termination of instance.
gce:
service_account_email: "{{ service_account_email }}"
credentials_file: "{{ credentials_file }}"
project_id: "{{ project_id }}"
instance_names: "{{ gce.instance_names }}"
zone: us-central1-a
state: absent
tags:
- delete
'''
import socket
import logging
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
try:
import libcloud
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceInUseError, ResourceNotFoundError
from libcloud.compute.drivers.gce import GCEAddress
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gce import gce_connect, unexpected_error_msg
from ansible.module_utils.gcp import get_valid_location
from ansible.module_utils.six.moves import reduce
def get_instance_info(inst):
"""Retrieves instance information from an instance object and returns it
as a dictionary.
"""
metadata = {}
if 'metadata' in inst.extra and 'items' in inst.extra['metadata']:
for md in inst.extra['metadata']['items']:
metadata[md['key']] = md['value']
try:
netname = inst.extra['networkInterfaces'][0]['network'].split('/')[-1]
except:
netname = None
try:
subnetname = inst.extra['networkInterfaces'][0]['subnetwork'].split('/')[-1]
except:
subnetname = None
if 'disks' in inst.extra:
disk_names = [disk_info['source'].split('/')[-1]
for disk_info
in sorted(inst.extra['disks'],
key=lambda disk_info: disk_info['index'])]
else:
disk_names = []
if len(inst.public_ips) == 0:
public_ip = None
else:
public_ip = inst.public_ips[0]
return ({
'image': inst.image is not None and inst.image.split('/')[-1] or None,
'disks': disk_names,
'machine_type': inst.size,
'metadata': metadata,
'name': inst.name,
'network': netname,
'subnetwork': subnetname,
'private_ip': inst.private_ips[0],
'public_ip': public_ip,
'status': ('status' in inst.extra) and inst.extra['status'] or None,
'tags': ('tags' in inst.extra) and inst.extra['tags'] or [],
'zone': ('zone' in inst.extra) and inst.extra['zone'].name or None,
})
def create_instances(module, gce, instance_names, number, lc_zone):
"""Creates new instances. Attributes other than instance_names are picked
up from 'module'
module : AnsibleModule object
gce: authenticated GCE libcloud driver
instance_names: python list of instance names to create
number: number of instances to create
lc_zone: GCEZone object
Returns:
A list of dictionaries with instance information
about the instances that were launched.
"""
image = module.params.get('image')
image_family = module.params.get('image_family')
external_projects = module.params.get('external_projects')
machine_type = module.params.get('machine_type')
metadata = module.params.get('metadata')
network = module.params.get('network')
subnetwork = module.params.get('subnetwork')
persistent_boot_disk = module.params.get('persistent_boot_disk')
disks = module.params.get('disks')
tags = module.params.get('tags')
ip_forward = module.params.get('ip_forward')
external_ip = module.params.get('external_ip')
disk_auto_delete = module.params.get('disk_auto_delete')
preemptible = module.params.get('preemptible')
disk_size = module.params.get('disk_size')
service_account_permissions = module.params.get('service_account_permissions')
if external_ip == "none":
instance_external_ip = None
elif external_ip != "ephemeral":
instance_external_ip = external_ip
try:
# check if instance_external_ip is an ip or a name
try:
socket.inet_aton(instance_external_ip)
instance_external_ip = GCEAddress(id='unknown', name='unknown', address=instance_external_ip, region='unknown', driver=gce)
except socket.error:
instance_external_ip = gce.ex_get_address(instance_external_ip)
except GoogleBaseError as e:
module.fail_json(msg='Unexpected error attempting to get a static ip %s, error: %s' % (external_ip, e.value))
else:
instance_external_ip = external_ip
new_instances = []
changed = False
lc_disks = []
disk_modes = []
for i, disk in enumerate(disks or []):
if isinstance(disk, dict):
lc_disks.append(gce.ex_get_volume(disk['name'], lc_zone))
disk_modes.append(disk['mode'])
else:
lc_disks.append(gce.ex_get_volume(disk, lc_zone))
# boot disk is implicitly READ_WRITE
disk_modes.append('READ_ONLY' if i > 0 else 'READ_WRITE')
lc_network = gce.ex_get_network(network)
lc_machine_type = gce.ex_get_size(machine_type, lc_zone)
# Try to convert the user's metadata value into the format expected
# by GCE. First try to ensure user has proper quoting of a
# dictionary-like syntax using 'literal_eval', then convert the python
# dict into a python list of 'key' / 'value' dicts. Should end up
# with:
# [ {'key': key1, 'value': value1}, {'key': key2, 'value': value2}, ...]
if metadata:
if isinstance(metadata, dict):
md = metadata
else:
try:
md = literal_eval(str(metadata))
if not isinstance(md, dict):
raise ValueError('metadata must be a dict')
except ValueError as e:
module.fail_json(msg='bad metadata: %s' % str(e))
except SyntaxError as e:
module.fail_json(msg='bad metadata syntax')
if hasattr(libcloud, '__version__') and libcloud.__version__ < '0.15':
items = []
for k, v in md.items():
items.append({"key": k, "value": v})
metadata = {'items': items}
else:
metadata = md
lc_image = LazyDiskImage(module, gce, image, lc_disks, family=image_family, projects=external_projects)
ex_sa_perms = []
bad_perms = []
if service_account_permissions:
for perm in service_account_permissions:
if perm not in gce.SA_SCOPES_MAP and not perm.startswith('https://www.googleapis.com/auth'):
bad_perms.append(perm)
if len(bad_perms) > 0:
module.fail_json(msg='bad permissions: %s' % str(bad_perms))
ex_sa_perms.append({'email': "default"})
ex_sa_perms[0]['scopes'] = service_account_permissions
# These variables all have default values but check just in case
if not lc_network or not lc_machine_type or not lc_zone:
module.fail_json(msg='Missing required create instance variable',
changed=False)
gce_args = dict(
location=lc_zone,
ex_network=network, ex_tags=tags, ex_metadata=metadata,
ex_can_ip_forward=ip_forward,
external_ip=instance_external_ip, ex_disk_auto_delete=disk_auto_delete,
ex_service_accounts=ex_sa_perms
)
if preemptible is not None:
gce_args['ex_preemptible'] = preemptible
if subnetwork is not None:
gce_args['ex_subnetwork'] = subnetwork
if isinstance(instance_names, str) and not number:
instance_names = [instance_names]
if isinstance(instance_names, str) and number:
instance_responses = gce.ex_create_multiple_nodes(instance_names, lc_machine_type,
lc_image(), number, **gce_args)
for resp in instance_responses:
n = resp
if isinstance(resp, libcloud.compute.drivers.gce.GCEFailedNode):
try:
n = gce.ex_get_node(n.name, lc_zone)
except ResourceNotFoundError:
pass
else:
# Assure that at least one node has been created to set changed=True
changed = True
new_instances.append(n)
else:
for instance in instance_names:
pd = None
if lc_disks:
pd = lc_disks[0]
elif persistent_boot_disk:
try:
pd = gce.ex_get_volume("%s" % instance, lc_zone)
except ResourceNotFoundError:
pd = gce.create_volume(disk_size, "%s" % instance, image=lc_image())
gce_args['ex_boot_disk'] = pd
inst = None
try:
inst = gce.ex_get_node(instance, lc_zone)
except ResourceNotFoundError:
inst = gce.create_node(
instance, lc_machine_type, lc_image(), **gce_args
)
changed = True
except GoogleBaseError as e:
module.fail_json(msg='Unexpected error attempting to create ' +
'instance %s, error: %s' % (instance, e.value))
if inst:
new_instances.append(inst)
for inst in new_instances:
for i, lc_disk in enumerate(lc_disks):
# Check whether the disk is already attached
if (len(inst.extra['disks']) > i):
attached_disk = inst.extra['disks'][i]
if attached_disk['source'] != lc_disk.extra['selfLink']:
module.fail_json(
msg=("Disk at index %d does not match: requested=%s found=%s" % (
i, lc_disk.extra['selfLink'], attached_disk['source'])))
elif attached_disk['mode'] != disk_modes[i]:
module.fail_json(
msg=("Disk at index %d is in the wrong mode: requested=%s found=%s" % (
i, disk_modes[i], attached_disk['mode'])))
else:
continue
gce.attach_volume(inst, lc_disk, ex_mode=disk_modes[i])
# Work around libcloud bug: attached volumes don't get added
# to the instance metadata. get_instance_info() only cares about
# source and index.
if len(inst.extra['disks']) != i + 1:
inst.extra['disks'].append(
{'source': lc_disk.extra['selfLink'], 'index': i})
instance_names = []
instance_json_data = []
for inst in new_instances:
d = get_instance_info(inst)
instance_names.append(d['name'])
instance_json_data.append(d)
return (changed, instance_json_data, instance_names)
def change_instance_state(module, gce, instance_names, number, zone, state):
"""Changes the state of a list of instances. For example,
change from started to stopped, or started to absent.
module: Ansible module object
gce: authenticated GCE connection object
instance_names: a list of instance names to terminate
zone: GCEZone object where the instances reside prior to termination
state: 'state' parameter passed into module as argument
Returns a dictionary of instance names that were changed.
"""
changed = False
nodes = []
state_instance_names = []
if isinstance(instance_names, str) and number:
node_names = ['%s-%03d' % (instance_names, i) for i in range(number)]
elif isinstance(instance_names, str) and not number:
node_names = [instance_names]
else:
node_names = instance_names
for name in node_names:
inst = None
try:
inst = gce.ex_get_node(name, zone)
except ResourceNotFoundError:
state_instance_names.append(name)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
else:
nodes.append(inst)
state_instance_names.append(name)
if state in ['absent', 'deleted'] and number:
changed_nodes = gce.ex_destroy_multiple_nodes(nodes) or [False]
changed = reduce(lambda x, y: x or y, changed_nodes)
else:
for node in nodes:
if state in ['absent', 'deleted']:
gce.destroy_node(node)
changed = True
elif state == 'started' and node.state == libcloud.compute.types.NodeState.STOPPED:
gce.ex_start_node(node)
changed = True
elif state in ['stopped', 'terminated'] and node.state == libcloud.compute.types.NodeState.RUNNING:
gce.ex_stop_node(node)
changed = True
return (changed, state_instance_names)
def main():
module = AnsibleModule(
argument_spec=dict(
image=dict(default='debian-8'),
image_family=dict(),
external_projects=dict(type='list'),
instance_names=dict(),
machine_type=dict(default='n1-standard-1'),
metadata=dict(),
name=dict(aliases=['base_name']),
num_instances=dict(type='int'),
network=dict(default='default'),
subnetwork=dict(),
persistent_boot_disk=dict(type='bool', default=False),
disks=dict(type='list'),
state=dict(choices=['active', 'present', 'absent', 'deleted',
'started', 'stopped', 'terminated'],
default='present'),
tags=dict(type='list'),
zone=dict(default='us-central1-a'),
service_account_email=dict(),
service_account_permissions=dict(type='list'),
pem_file=dict(type='path'),
credentials_file=dict(type='path'),
project_id=dict(),
ip_forward=dict(type='bool', default=False),
external_ip=dict(default='ephemeral'),
disk_auto_delete=dict(type='bool', default=True),
disk_size=dict(type='int', default=10),
preemptible=dict(type='bool', default=None),
),
mutually_exclusive=[('instance_names', 'name')]
)
if not HAS_PYTHON26:
module.fail_json(msg="GCE module requires python's 'ast' module, python v2.6+")
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.17.0+) required for this module')
gce = gce_connect(module)
image = module.params.get('image')
image_family = module.params.get('image_family')
external_projects = module.params.get('external_projects')
instance_names = module.params.get('instance_names')
name = module.params.get('name')
number = module.params.get('num_instances')
subnetwork = module.params.get('subnetwork')
state = module.params.get('state')
zone = module.params.get('zone')
preemptible = module.params.get('preemptible')
changed = False
inames = None
if isinstance(instance_names, list):
inames = instance_names
elif isinstance(instance_names, str):
inames = instance_names.split(',')
if name:
inames = name
if not inames:
module.fail_json(msg='Must specify a "name" or "instance_names"',
changed=False)
if not zone:
module.fail_json(msg='Must specify a "zone"', changed=False)
lc_zone = get_valid_location(module, gce, zone)
if preemptible is not None and hasattr(libcloud, '__version__') and libcloud.__version__ < '0.20':
module.fail_json(msg="Apache Libcloud 0.20.0+ is required to use 'preemptible' option",
changed=False)
if subnetwork is not None and not hasattr(gce, 'ex_get_subnetwork'):
module.fail_json(msg="Apache Libcloud 1.0.0+ is required to use 'subnetwork' option",
changed=False)
json_output = {'zone': zone}
if state in ['absent', 'deleted', 'started', 'stopped', 'terminated']:
json_output['state'] = state
(changed, state_instance_names) = change_instance_state(
module, gce, inames, number, lc_zone, state)
# based on what user specified, return the same variable, although
# value could be different if an instance could not be destroyed
if instance_names or name and number:
json_output['instance_names'] = state_instance_names
elif name:
json_output['name'] = name
elif state in ['active', 'present']:
json_output['state'] = 'present'
(changed, instance_data, instance_name_list) = create_instances(
module, gce, inames, number, lc_zone)
json_output['instance_data'] = instance_data
if instance_names:
json_output['instance_names'] = instance_name_list
elif name:
json_output['name'] = name
json_output['changed'] = changed
module.exit_json(**json_output)
class LazyDiskImage:
"""
Object for lazy instantiation of disk image
gce.ex_get_image is a very expensive call, so we want to avoid calling it as much as possible.
"""
def __init__(self, module, gce, name, has_pd, family=None, projects=None):
self.image = None
self.was_called = False
self.gce = gce
self.name = name
self.has_pd = has_pd
self.module = module
self.family = family
self.projects = projects
def __call__(self):
if not self.was_called:
self.was_called = True
if not self.has_pd:
if self.family:
self.image = self.gce.ex_get_image_from_family(self.family, ex_project_list=self.projects)
else:
self.image = self.gce.ex_get_image(self.name, ex_project_list=self.projects)
if not self.image:
self.module.fail_json(msg='image or disks missing for create instance', changed=False)
return self.image
if __name__ == '__main__':
main()
| gpl-3.0 |
nolanliou/tensorflow | tensorflow/compiler/tests/categorical_op_test.py | 24 | 5227 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for multinomial generation ops in the XLA JIT compiler."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
from tensorflow.compiler.tests.xla_test import XLATestCase
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import googletest
# TODO(srvasude): Merge this with
# third_party/tensorflow/python/kernel_tests/random/multinomial_op_test.py.
class CategoricalTest(XLATestCase):
"""Test cases for random-number generating operators."""
def output_dtypes(self):
return set(self.int_types).intersection([np.int32, np.int64])
def _chi2(self, expected, actual):
"""Returns Chi2 GOF statistic."""
actual = np.asarray(actual)
expected = np.asarray(expected)
diff = actual - expected
chi2 = np.sum(diff * diff / expected)
return chi2
def _do_sampling(self, logits, num_samples):
"""Categorical samples from given input.
Args:
logits: Numpy ndarray of shape [batch_size, num_classes].
num_samples: Int; number of samples to draw.
Returns:
Frequencies from sampled classes; shape [batch_size, num_classes].
"""
with self.test_session() as sess, self.test_scope():
random_seed.set_random_seed(1618)
op = random_ops.multinomial(logits, num_samples,
output_dtype=dtypes.int32)
d = sess.run(op)
batch_size, num_classes = logits.shape
freqs_mat = []
for i in range(batch_size):
cnts = dict(collections.Counter(d[i, :]))
# Requires drawn class labels be in range.
self.assertLess(max(cnts.keys()), num_classes)
self.assertGreaterEqual(min(cnts.keys()), 0)
freqs = [(cnts[k] * 1. / num_samples if k in cnts else 0)
for k in range(num_classes)]
freqs_mat.append(freqs)
return freqs_mat
def _testRngIsNotConstant(self, rng, dtype, output_dtype):
# Tests that 'rng' does not always return the same value.
with self.test_session() as sess:
with self.test_scope():
x = rng(dtype, output_dtype)
# The random-number generator, if working correctly, should produce the
# same output multiple times with low probability.
y = sess.run(x)
z = sess.run(x)
w = sess.run(x)
# We use exact equality here. If the random-number generator is producing
# deterministic output, all three outputs will be bitwise identical.
self.assertTrue((not np.array_equal(y, z)) or
(not np.array_equal(z, w)) or
(not np.array_equal(y, w)))
def testCategoricalIsNotConstant(self):
def rng(dtype, output_dtype):
return random_ops.multinomial(np.array([[1., 1., 1.]], dtype=dtype), 10,
output_dtype=output_dtype)
dtype = np.float32
for output_dtype in self.output_dtypes():
self._testRngIsNotConstant(rng, dtype, output_dtype)
def testCategoricalIsInRange(self):
for dtype in self.float_types:
for output_dtype in self.output_dtypes():
with self.test_session() as sess:
with self.test_scope():
x = random_ops.multinomial(
array_ops.ones(shape=[1, 20], dtype=dtype), 1000,
output_dtype=output_dtype)
y = sess.run(x)
self.assertTrue((y >= 0).sum() == 1000)
self.assertTrue((y < 20).sum() == 1000)
def testSamplingCorrectness(self):
np.random.seed(1618) # Make it reproducible.
num_samples = 21000
rand_probs = np.random.dirichlet([1., 1., 2., 3.])
rand_probs2 = np.random.dirichlet([1., 4., 5.], size=3) # batched
for probs in [[.5, .5], [.85, .05, .1], rand_probs, rand_probs2]:
probs = np.asarray(probs)
if len(probs.shape) == 1:
probs = probs.reshape(1, probs.size) # singleton batch
logits = np.log(probs).astype(np.float32)
freqs = self._do_sampling(logits, num_samples)
# the test here is similar to
# python/kernel_tests/random/multinomial_op_test.py
# Note that df >= 1 in all these cases. Choosing a cutoff of 1e-3
# corresponds to an alpha value of 2.5% for df = 1, and smaller for larger
# df.
chi2 = self._chi2(probs, freqs)
self.assertLess(chi2, 1e-3)
if __name__ == '__main__':
googletest.main()
| apache-2.0 |
barraponto/scrapy | scrapy/core/downloader/handlers/s3.py | 34 | 3870 | from six.moves.urllib.parse import unquote
from scrapy.exceptions import NotConfigured
from scrapy.utils.httpobj import urlparse_cached
from scrapy.utils.boto import is_botocore
from .http import HTTPDownloadHandler
def _get_boto_connection():
from boto.s3.connection import S3Connection
class _v19_S3Connection(S3Connection):
"""A dummy S3Connection wrapper that doesn't do any synchronous download"""
def _mexe(self, method, bucket, key, headers, *args, **kwargs):
return headers
class _v20_S3Connection(S3Connection):
"""A dummy S3Connection wrapper that doesn't do any synchronous download"""
def _mexe(self, http_request, *args, **kwargs):
http_request.authorize(connection=self)
return http_request.headers
try:
import boto.auth
except ImportError:
_S3Connection = _v19_S3Connection
else:
_S3Connection = _v20_S3Connection
return _S3Connection
class S3DownloadHandler(object):
def __init__(self, settings, aws_access_key_id=None, aws_secret_access_key=None, \
httpdownloadhandler=HTTPDownloadHandler, **kw):
if not aws_access_key_id:
aws_access_key_id = settings['AWS_ACCESS_KEY_ID']
if not aws_secret_access_key:
aws_secret_access_key = settings['AWS_SECRET_ACCESS_KEY']
# If no credentials could be found anywhere,
# consider this an anonymous connection request by default;
# unless 'anon' was set explicitly (True/False).
anon = kw.get('anon')
if anon is None and not aws_access_key_id and not aws_secret_access_key:
kw['anon'] = True
self.anon = kw.get('anon')
self._signer = None
if is_botocore():
import botocore.auth
import botocore.credentials
kw.pop('anon', None)
if kw:
raise TypeError('Unexpected keyword arguments: %s' % kw)
if not self.anon:
SignerCls = botocore.auth.AUTH_TYPE_MAPS['s3']
self._signer = SignerCls(botocore.credentials.Credentials(
aws_access_key_id, aws_secret_access_key))
else:
_S3Connection = _get_boto_connection()
try:
self.conn = _S3Connection(
aws_access_key_id, aws_secret_access_key, **kw)
except Exception as ex:
raise NotConfigured(str(ex))
self._download_http = httpdownloadhandler(settings).download_request
def download_request(self, request, spider):
p = urlparse_cached(request)
scheme = 'https' if request.meta.get('is_secure') else 'http'
bucket = p.hostname
path = p.path + '?' + p.query if p.query else p.path
url = '%s://%s.s3.amazonaws.com%s' % (scheme, bucket, path)
if self.anon:
request = request.replace(url=url)
elif self._signer is not None:
import botocore.awsrequest
awsrequest = botocore.awsrequest.AWSRequest(
method=request.method,
url='%s://s3.amazonaws.com/%s%s' % (scheme, bucket, path),
headers=request.headers.to_unicode_dict(),
data=request.body)
self._signer.add_auth(awsrequest)
request = request.replace(
url=url, headers=awsrequest.headers.items())
else:
signed_headers = self.conn.make_request(
method=request.method,
bucket=bucket,
key=unquote(p.path),
query_args=unquote(p.query),
headers=request.headers,
data=request.body)
request = request.replace(url=url, headers=signed_headers)
return self._download_http(request, spider)
| bsd-3-clause |
alianmohammad/pd-gem5-latest | src/mem/ruby/network/BasicLink.py | 53 | 2758 | # Copyright (c) 2011 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
# Brad Beckmann
from m5.params import *
from m5.SimObject import SimObject
class BasicLink(SimObject):
type = 'BasicLink'
cxx_header = "mem/ruby/network/BasicLink.hh"
link_id = Param.Int("ID in relation to other links")
latency = Param.Cycles(1, "latency")
# The following banwidth factor does not translate to the same value for
# both the simple and Garnet models. For the most part, the bandwidth
# factor is the width of the link in bytes, expect for certain situations
# with regard to the simple network.
bandwidth_factor = Param.Int("generic bandwidth factor, usually in bytes")
weight = Param.Int(1, "used to restrict routing in shortest path analysis")
class BasicExtLink(BasicLink):
type = 'BasicExtLink'
cxx_header = "mem/ruby/network/BasicLink.hh"
ext_node = Param.RubyController("External node")
int_node = Param.BasicRouter("ID of internal node")
bandwidth_factor = 16
class BasicIntLink(BasicLink):
type = 'BasicIntLink'
cxx_header = "mem/ruby/network/BasicLink.hh"
node_a = Param.BasicRouter("Router on one end")
node_b = Param.BasicRouter("Router on other end")
bandwidth_factor = 16
| bsd-3-clause |
philogb/jit | webpy/web/browser.py | 16 | 7579 | """Browser to test web applications.
(from web.py)
"""
from utils import re_compile
from net import htmlunquote
import httplib, urllib, urllib2
import cookielib
import copy
from StringIO import StringIO
DEBUG = False
__all__ = [
"BrowserError",
"Browser", "AppBrowser",
"AppHandler"
]
class BrowserError(Exception):
pass
class Browser:
def __init__(self):
self.cookiejar = cookielib.CookieJar()
self._cookie_processor = urllib2.HTTPCookieProcessor(self.cookiejar)
self.form = None
self.url = "http://0.0.0.0:8080/"
self.path = "/"
self.status = None
self.data = None
self._response = None
self._forms = None
def reset(self):
"""Clears all cookies and history."""
self.cookiejar.clear()
def build_opener(self):
"""Builds the opener using urllib2.build_opener.
Subclasses can override this function to prodive custom openers.
"""
return urllib2.build_opener()
def do_request(self, req):
if DEBUG:
print 'requesting', req.get_method(), req.get_full_url()
opener = self.build_opener()
opener.add_handler(self._cookie_processor)
try:
self._response = opener.open(req)
except urllib2.HTTPError, e:
self._response = e
self.url = self._response.geturl()
self.path = urllib2.Request(self.url).get_selector()
self.data = self._response.read()
self.status = self._response.code
self._forms = None
self.form = None
return self.get_response()
def open(self, url, data=None, headers={}):
"""Opens the specified url."""
url = urllib.basejoin(self.url, url)
req = urllib2.Request(url, data, headers)
return self.do_request(req)
def show(self):
"""Opens the current page in real web browser."""
f = open('page.html', 'w')
f.write(self.data)
f.close()
import webbrowser, os
url = 'file://' + os.path.abspath('page.html')
webbrowser.open(url)
def get_response(self):
"""Returns a copy of the current response."""
return urllib.addinfourl(StringIO(self.data), self._response.info(), self._response.geturl())
def get_soup(self):
"""Returns beautiful soup of the current document."""
import BeautifulSoup
return BeautifulSoup.BeautifulSoup(self.data)
def get_text(self, e=None):
"""Returns content of e or the current document as plain text."""
e = e or self.get_soup()
return ''.join([htmlunquote(c) for c in e.recursiveChildGenerator() if isinstance(c, unicode)])
def _get_links(self):
soup = self.get_soup()
return [a for a in soup.findAll(name='a')]
def get_links(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
"""Returns all links in the document."""
return self._filter_links(self._get_links(),
text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
def follow_link(self, link=None, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
if link is None:
links = self._filter_links(self.get_links(),
text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
link = links and links[0]
if link:
return self.open(link['href'])
else:
raise BrowserError("No link found")
def find_link(self, text=None, text_regex=None, url=None, url_regex=None, predicate=None):
links = self._filter_links(self.get_links(),
text=text, text_regex=text_regex, url=url, url_regex=url_regex, predicate=predicate)
return links and links[0] or None
def _filter_links(self, links,
text=None, text_regex=None,
url=None, url_regex=None,
predicate=None):
predicates = []
if text is not None:
predicates.append(lambda link: link.string == text)
if text_regex is not None:
predicates.append(lambda link: re_compile(text_regex).search(link.string or ''))
if url is not None:
predicates.append(lambda link: link.get('href') == url)
if url_regex is not None:
predicates.append(lambda link: re_compile(url_regex).search(link.get('href', '')))
if predicate:
predicate.append(predicate)
def f(link):
for p in predicates:
if not p(link):
return False
return True
return [link for link in links if f(link)]
def get_forms(self):
"""Returns all forms in the current document.
The returned form objects implement the ClientForm.HTMLForm interface.
"""
if self._forms is None:
import ClientForm
self._forms = ClientForm.ParseResponse(self.get_response(), backwards_compat=False)
return self._forms
def select_form(self, name=None, predicate=None, index=0):
"""Selects the specified form."""
forms = self.get_forms()
if name is not None:
forms = [f for f in forms if f.name == name]
if predicate:
forms = [f for f in forms if predicate(f)]
if forms:
self.form = forms[index]
return self.form
else:
raise BrowserError("No form selected.")
def submit(self):
"""submits the currently selected form."""
if self.form is None:
raise BrowserError("No form selected.")
req = self.form.click()
return self.do_request(req)
def __getitem__(self, key):
return self.form[key]
def __setitem__(self, key, value):
self.form[key] = value
class AppBrowser(Browser):
"""Browser interface to test web.py apps.
b = AppBrowser(app)
b.open('/')
b.follow_link(text='Login')
b.select_form(name='login')
b['username'] = 'joe'
b['password'] = 'secret'
b.submit()
assert b.path == '/'
assert 'Welcome joe' in b.get_text()
"""
def __init__(self, app):
Browser.__init__(self)
self.app = app
def build_opener(self):
return urllib2.build_opener(AppHandler(self.app))
class AppHandler(urllib2.HTTPHandler):
"""urllib2 handler to handle requests using web.py application."""
handler_order = 100
def __init__(self, app):
self.app = app
def http_open(self, req):
result = self.app.request(
localpart=req.get_selector(),
method=req.get_method(),
host=req.get_host(),
data=req.get_data(),
headers=dict(req.header_items()),
https=req.get_type() == "https"
)
return self._make_response(result, req.get_full_url())
def https_open(self, req):
return self.http_open(req)
https_request = urllib2.HTTPHandler.do_request_
def _make_response(self, result, url):
data = "\r\n".join(["%s: %s" % (k, v) for k, v in result.header_items])
headers = httplib.HTTPMessage(StringIO(data))
response = urllib.addinfourl(StringIO(result.data), headers, url)
code, msg = result.status.split(None, 1)
response.code, response.msg = int(code), msg
return response
| mit |
flingone/rk3066-kernel | tools/perf/scripts/python/sched-migration.py | 11215 | 11670 | #!/usr/bin/python
#
# Cpu task migration overview toy
#
# Copyright (C) 2010 Frederic Weisbecker <fweisbec@gmail.com>
#
# perf script event handlers have been generated by perf script -g python
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
import os
import sys
from collections import defaultdict
from UserList import UserList
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
sys.path.append('scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from SchedGui import *
threads = { 0 : "idle"}
def thread_name(pid):
return "%s:%d" % (threads[pid], pid)
class RunqueueEventUnknown:
@staticmethod
def color():
return None
def __repr__(self):
return "unknown"
class RunqueueEventSleep:
@staticmethod
def color():
return (0, 0, 0xff)
def __init__(self, sleeper):
self.sleeper = sleeper
def __repr__(self):
return "%s gone to sleep" % thread_name(self.sleeper)
class RunqueueEventWakeup:
@staticmethod
def color():
return (0xff, 0xff, 0)
def __init__(self, wakee):
self.wakee = wakee
def __repr__(self):
return "%s woke up" % thread_name(self.wakee)
class RunqueueEventFork:
@staticmethod
def color():
return (0, 0xff, 0)
def __init__(self, child):
self.child = child
def __repr__(self):
return "new forked task %s" % thread_name(self.child)
class RunqueueMigrateIn:
@staticmethod
def color():
return (0, 0xf0, 0xff)
def __init__(self, new):
self.new = new
def __repr__(self):
return "task migrated in %s" % thread_name(self.new)
class RunqueueMigrateOut:
@staticmethod
def color():
return (0xff, 0, 0xff)
def __init__(self, old):
self.old = old
def __repr__(self):
return "task migrated out %s" % thread_name(self.old)
class RunqueueSnapshot:
def __init__(self, tasks = [0], event = RunqueueEventUnknown()):
self.tasks = tuple(tasks)
self.event = event
def sched_switch(self, prev, prev_state, next):
event = RunqueueEventUnknown()
if taskState(prev_state) == "R" and next in self.tasks \
and prev in self.tasks:
return self
if taskState(prev_state) != "R":
event = RunqueueEventSleep(prev)
next_tasks = list(self.tasks[:])
if prev in self.tasks:
if taskState(prev_state) != "R":
next_tasks.remove(prev)
elif taskState(prev_state) == "R":
next_tasks.append(prev)
if next not in next_tasks:
next_tasks.append(next)
return RunqueueSnapshot(next_tasks, event)
def migrate_out(self, old):
if old not in self.tasks:
return self
next_tasks = [task for task in self.tasks if task != old]
return RunqueueSnapshot(next_tasks, RunqueueMigrateOut(old))
def __migrate_in(self, new, event):
if new in self.tasks:
self.event = event
return self
next_tasks = self.tasks[:] + tuple([new])
return RunqueueSnapshot(next_tasks, event)
def migrate_in(self, new):
return self.__migrate_in(new, RunqueueMigrateIn(new))
def wake_up(self, new):
return self.__migrate_in(new, RunqueueEventWakeup(new))
def wake_up_new(self, new):
return self.__migrate_in(new, RunqueueEventFork(new))
def load(self):
""" Provide the number of tasks on the runqueue.
Don't count idle"""
return len(self.tasks) - 1
def __repr__(self):
ret = self.tasks.__repr__()
ret += self.origin_tostring()
return ret
class TimeSlice:
def __init__(self, start, prev):
self.start = start
self.prev = prev
self.end = start
# cpus that triggered the event
self.event_cpus = []
if prev is not None:
self.total_load = prev.total_load
self.rqs = prev.rqs.copy()
else:
self.rqs = defaultdict(RunqueueSnapshot)
self.total_load = 0
def __update_total_load(self, old_rq, new_rq):
diff = new_rq.load() - old_rq.load()
self.total_load += diff
def sched_switch(self, ts_list, prev, prev_state, next, cpu):
old_rq = self.prev.rqs[cpu]
new_rq = old_rq.sched_switch(prev, prev_state, next)
if old_rq is new_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def migrate(self, ts_list, new, old_cpu, new_cpu):
if old_cpu == new_cpu:
return
old_rq = self.prev.rqs[old_cpu]
out_rq = old_rq.migrate_out(new)
self.rqs[old_cpu] = out_rq
self.__update_total_load(old_rq, out_rq)
new_rq = self.prev.rqs[new_cpu]
in_rq = new_rq.migrate_in(new)
self.rqs[new_cpu] = in_rq
self.__update_total_load(new_rq, in_rq)
ts_list.append(self)
if old_rq is not out_rq:
self.event_cpus.append(old_cpu)
self.event_cpus.append(new_cpu)
def wake_up(self, ts_list, pid, cpu, fork):
old_rq = self.prev.rqs[cpu]
if fork:
new_rq = old_rq.wake_up_new(pid)
else:
new_rq = old_rq.wake_up(pid)
if new_rq is old_rq:
return
self.rqs[cpu] = new_rq
self.__update_total_load(old_rq, new_rq)
ts_list.append(self)
self.event_cpus = [cpu]
def next(self, t):
self.end = t
return TimeSlice(t, self)
class TimeSliceList(UserList):
def __init__(self, arg = []):
self.data = arg
def get_time_slice(self, ts):
if len(self.data) == 0:
slice = TimeSlice(ts, TimeSlice(-1, None))
else:
slice = self.data[-1].next(ts)
return slice
def find_time_slice(self, ts):
start = 0
end = len(self.data)
found = -1
searching = True
while searching:
if start == end or start == end - 1:
searching = False
i = (end + start) / 2
if self.data[i].start <= ts and self.data[i].end >= ts:
found = i
end = i
continue
if self.data[i].end < ts:
start = i
elif self.data[i].start > ts:
end = i
return found
def set_root_win(self, win):
self.root_win = win
def mouse_down(self, cpu, t):
idx = self.find_time_slice(t)
if idx == -1:
return
ts = self[idx]
rq = ts.rqs[cpu]
raw = "CPU: %d\n" % cpu
raw += "Last event : %s\n" % rq.event.__repr__()
raw += "Timestamp : %d.%06d\n" % (ts.start / (10 ** 9), (ts.start % (10 ** 9)) / 1000)
raw += "Duration : %6d us\n" % ((ts.end - ts.start) / (10 ** 6))
raw += "Load = %d\n" % rq.load()
for t in rq.tasks:
raw += "%s \n" % thread_name(t)
self.root_win.update_summary(raw)
def update_rectangle_cpu(self, slice, cpu):
rq = slice.rqs[cpu]
if slice.total_load != 0:
load_rate = rq.load() / float(slice.total_load)
else:
load_rate = 0
red_power = int(0xff - (0xff * load_rate))
color = (0xff, red_power, red_power)
top_color = None
if cpu in slice.event_cpus:
top_color = rq.event.color()
self.root_win.paint_rectangle_zone(cpu, color, top_color, slice.start, slice.end)
def fill_zone(self, start, end):
i = self.find_time_slice(start)
if i == -1:
return
for i in xrange(i, len(self.data)):
timeslice = self.data[i]
if timeslice.start > end:
return
for cpu in timeslice.rqs:
self.update_rectangle_cpu(timeslice, cpu)
def interval(self):
if len(self.data) == 0:
return (0, 0)
return (self.data[0].start, self.data[-1].end)
def nr_rectangles(self):
last_ts = self.data[-1]
max_cpu = 0
for cpu in last_ts.rqs:
if cpu > max_cpu:
max_cpu = cpu
return max_cpu
class SchedEventProxy:
def __init__(self):
self.current_tsk = defaultdict(lambda : -1)
self.timeslices = TimeSliceList()
def sched_switch(self, headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
""" Ensure the task we sched out this cpu is really the one
we logged. Otherwise we may have missed traces """
on_cpu_task = self.current_tsk[headers.cpu]
if on_cpu_task != -1 and on_cpu_task != prev_pid:
print "Sched switch event rejected ts: %s cpu: %d prev: %s(%d) next: %s(%d)" % \
(headers.ts_format(), headers.cpu, prev_comm, prev_pid, next_comm, next_pid)
threads[prev_pid] = prev_comm
threads[next_pid] = next_comm
self.current_tsk[headers.cpu] = next_pid
ts = self.timeslices.get_time_slice(headers.ts())
ts.sched_switch(self.timeslices, prev_pid, prev_state, next_pid, headers.cpu)
def migrate(self, headers, pid, prio, orig_cpu, dest_cpu):
ts = self.timeslices.get_time_slice(headers.ts())
ts.migrate(self.timeslices, pid, orig_cpu, dest_cpu)
def wake_up(self, headers, comm, pid, success, target_cpu, fork):
if success == 0:
return
ts = self.timeslices.get_time_slice(headers.ts())
ts.wake_up(self.timeslices, pid, target_cpu, fork)
def trace_begin():
global parser
parser = SchedEventProxy()
def trace_end():
app = wx.App(False)
timeslices = parser.timeslices
frame = RootFrame(timeslices, "Migration")
app.MainLoop()
def sched__sched_stat_runtime(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, runtime, vruntime):
pass
def sched__sched_stat_iowait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_sleep(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_stat_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, delay):
pass
def sched__sched_process_fork(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
parent_comm, parent_pid, child_comm, child_pid):
pass
def sched__sched_process_wait(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_process_free(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_migrate_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, orig_cpu,
dest_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.migrate(headers, pid, prio, orig_cpu, dest_cpu)
def sched__sched_switch(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.sched_switch(headers, prev_comm, prev_pid, prev_prio, prev_state,
next_comm, next_pid, next_prio)
def sched__sched_wakeup_new(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 1)
def sched__sched_wakeup(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio, success,
target_cpu):
headers = EventHeaders(common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
parser.wake_up(headers, comm, pid, success, target_cpu, 0)
def sched__sched_wait_task(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid, prio):
pass
def sched__sched_kthread_stop_ret(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
ret):
pass
def sched__sched_kthread_stop(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
comm, pid):
pass
def trace_unhandled(event_name, context, common_cpu, common_secs, common_nsecs,
common_pid, common_comm):
pass
| gpl-2.0 |
dkluffy/dkluff-code | code/work/lazyed_bluecoat/liblaz/dklib.py | 1 | 2154 | import re
REG_MATCH_PUBLIC_DNS=re.compile('\.[A-Za-z]{1,9}$|^[1-9]')
REG_REMOVESPACE=re.compile(r'".*?"') #remove space in Double quotes
PROTO_NAME_LIST=(
("HTTP","80","8080"),
("HTTPS","443"),
("SSH","22"),
("FTP","21"),
("RDP","3389"),
("VNC","5800",5900),
("OTHER","-"),
#("CIFS","139","445"),
)
def checkProtocol(s):
p = s.upper()
for k in PROTO_NAME_LIST:
if p in k:
return k[0]
return "OTHER"
def humanRead(n):
if n < 1000000:
return "<1MB"
if n < 1000000000 and n >1000000:
return "{0:.2f}MB".format(n/1000000.00)
else:
return "{0:.2f}GB".format(n/1000000000.00)
def dic_topten(dic,f,p=False):
"""
return a list[("hostname",[0,0,0]),]
dic.iteritems().next()=("hostname",[r,sc+cs,sc+cs])
"""
top=sorted(dic.iteritems(),key=lambda x:x[1][f],reverse=True)
if len(top)>=10 and p == False:
return top[0:10]
else:
return top
def drawBar(i,t):
if t == 0: t=1
percent=i*100.00/t
bar="["
for b in range(int(percent/5)):
bar+=">>"
bar+="]{0:.2f}%"
return bar.format(percent)
def drawTab(c):
i=0
td="<tr>{0}</tr>\n"
temp=""
while i <c:
temp +="<td>{"+str(i)+"}</td>"
i +=1
return td.format(temp)
def drawList(s):
k="<ul>{0}</ul>"
items=list(s)
temp=""
i=0
while i < 5:
try:
temp += "<li>"+str(items.pop())+"</li>\n"
i +=1
except IndexError:
break
_k=""
if len(items)>0:
_temp="<option>--More--</option>"
_k="<li><select>{0}</select></li>"
while True:
try:
_temp += "<option>"+str(items.pop())+"</option>"
except IndexError:
break
_k=_k.format(_temp)
temp+=_k
return k.format(temp)
import time
def drawTime():
e_curtime=time.time()
s_curtime=e_curtime-7*24*3600
_e=time.strftime("%Y.%m.%d",time.localtime(e_curtime))
_s=time.strftime("%Y.%m.%d",time.localtime(s_curtime))
titletime=_s + " - " + _e
| apache-2.0 |
jimberlage/servo | tests/wpt/web-platform-tests/tools/third_party/pytest/src/_pytest/assertion/rewrite.py | 32 | 35779 | """Rewrite assertion AST to produce nice error messages"""
from __future__ import absolute_import, division, print_function
import ast
import errno
import itertools
import imp
import marshal
import os
import re
import six
import struct
import sys
import types
import atomicwrites
import py
from _pytest.assertion import util
# pytest caches rewritten pycs in __pycache__.
if hasattr(imp, "get_tag"):
PYTEST_TAG = imp.get_tag() + "-PYTEST"
else:
if hasattr(sys, "pypy_version_info"):
impl = "pypy"
elif sys.platform == "java":
impl = "jython"
else:
impl = "cpython"
ver = sys.version_info
PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
del ver, impl
PYC_EXT = ".py" + (__debug__ and "c" or "o")
PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
if sys.version_info >= (3, 5):
ast_Call = ast.Call
else:
def ast_Call(a, b, c):
return ast.Call(a, b, c, None, None)
class AssertionRewritingHook(object):
"""PEP302 Import hook which rewrites asserts."""
def __init__(self, config):
self.config = config
self.fnpats = config.getini("python_files")
self.session = None
self.modules = {}
self._rewritten_names = set()
self._register_with_pkg_resources()
self._must_rewrite = set()
def set_session(self, session):
self.session = session
def find_module(self, name, path=None):
state = self.config._assertstate
state.trace("find_module called for: %s" % name)
names = name.rsplit(".", 1)
lastname = names[-1]
pth = None
if path is not None:
# Starting with Python 3.3, path is a _NamespacePath(), which
# causes problems if not converted to list.
path = list(path)
if len(path) == 1:
pth = path[0]
if pth is None:
try:
fd, fn, desc = imp.find_module(lastname, path)
except ImportError:
return None
if fd is not None:
fd.close()
tp = desc[2]
if tp == imp.PY_COMPILED:
if hasattr(imp, "source_from_cache"):
try:
fn = imp.source_from_cache(fn)
except ValueError:
# Python 3 doesn't like orphaned but still-importable
# .pyc files.
fn = fn[:-1]
else:
fn = fn[:-1]
elif tp != imp.PY_SOURCE:
# Don't know what this is.
return None
else:
fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
fn_pypath = py.path.local(fn)
if not self._should_rewrite(name, fn_pypath, state):
return None
self._rewritten_names.add(name)
# The requested module looks like a test file, so rewrite it. This is
# the most magical part of the process: load the source, rewrite the
# asserts, and load the rewritten source. We also cache the rewritten
# module code in a special pyc. We must be aware of the possibility of
# concurrent pytest processes rewriting and loading pycs. To avoid
# tricky race conditions, we maintain the following invariant: The
# cached pyc is always a complete, valid pyc. Operations on it must be
# atomic. POSIX's atomic rename comes in handy.
write = not sys.dont_write_bytecode
cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
if write:
try:
os.mkdir(cache_dir)
except OSError:
e = sys.exc_info()[1].errno
if e == errno.EEXIST:
# Either the __pycache__ directory already exists (the
# common case) or it's blocked by a non-dir node. In the
# latter case, we'll ignore it in _write_pyc.
pass
elif e in [errno.ENOENT, errno.ENOTDIR]:
# One of the path components was not a directory, likely
# because we're in a zip file.
write = False
elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
state.trace("read only directory: %r" % fn_pypath.dirname)
write = False
else:
raise
cache_name = fn_pypath.basename[:-3] + PYC_TAIL
pyc = os.path.join(cache_dir, cache_name)
# Notice that even if we're in a read-only directory, I'm going
# to check for a cached pyc. This may not be optimal...
co = _read_pyc(fn_pypath, pyc, state.trace)
if co is None:
state.trace("rewriting %r" % (fn,))
source_stat, co = _rewrite_test(self.config, fn_pypath)
if co is None:
# Probably a SyntaxError in the test.
return None
if write:
_write_pyc(state, co, source_stat, pyc)
else:
state.trace("found cached rewritten pyc for %r" % (fn,))
self.modules[name] = co, pyc
return self
def _should_rewrite(self, name, fn_pypath, state):
# always rewrite conftest files
fn = str(fn_pypath)
if fn_pypath.basename == "conftest.py":
state.trace("rewriting conftest file: %r" % (fn,))
return True
if self.session is not None:
if self.session.isinitpath(fn):
state.trace("matched test file (was specified on cmdline): %r" % (fn,))
return True
# modules not passed explicitly on the command line are only
# rewritten if they match the naming convention for test files
for pat in self.fnpats:
if fn_pypath.fnmatch(pat):
state.trace("matched test file %r" % (fn,))
return True
for marked in self._must_rewrite:
if name == marked or name.startswith(marked + "."):
state.trace("matched marked file %r (from %r)" % (name, marked))
return True
return False
def mark_rewrite(self, *names):
"""Mark import names as needing to be rewritten.
The named module or package as well as any nested modules will
be rewritten on import.
"""
already_imported = (
set(names).intersection(sys.modules).difference(self._rewritten_names)
)
for name in already_imported:
if not AssertionRewriter.is_rewrite_disabled(
sys.modules[name].__doc__ or ""
):
self._warn_already_imported(name)
self._must_rewrite.update(names)
def _warn_already_imported(self, name):
self.config.warn(
"P1", "Module already imported so cannot be rewritten: %s" % name
)
def load_module(self, name):
# If there is an existing module object named 'fullname' in
# sys.modules, the loader must use that existing module. (Otherwise,
# the reload() builtin will not work correctly.)
if name in sys.modules:
return sys.modules[name]
co, pyc = self.modules.pop(name)
# I wish I could just call imp.load_compiled here, but __file__ has to
# be set properly. In Python 3.2+, this all would be handled correctly
# by load_compiled.
mod = sys.modules[name] = imp.new_module(name)
try:
mod.__file__ = co.co_filename
# Normally, this attribute is 3.2+.
mod.__cached__ = pyc
mod.__loader__ = self
py.builtin.exec_(co, mod.__dict__)
except: # noqa
if name in sys.modules:
del sys.modules[name]
raise
return sys.modules[name]
def is_package(self, name):
try:
fd, fn, desc = imp.find_module(name)
except ImportError:
return False
if fd is not None:
fd.close()
tp = desc[2]
return tp == imp.PKG_DIRECTORY
@classmethod
def _register_with_pkg_resources(cls):
"""
Ensure package resources can be loaded from this loader. May be called
multiple times, as the operation is idempotent.
"""
try:
import pkg_resources
# access an attribute in case a deferred importer is present
pkg_resources.__name__
except ImportError:
return
# Since pytest tests are always located in the file system, the
# DefaultProvider is appropriate.
pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
def get_data(self, pathname):
"""Optional PEP302 get_data API.
"""
with open(pathname, "rb") as f:
return f.read()
def _write_pyc(state, co, source_stat, pyc):
# Technically, we don't have to have the same pyc format as
# (C)Python, since these "pycs" should never be seen by builtin
# import. However, there's little reason deviate, and I hope
# sometime to be able to use imp.load_compiled to load them. (See
# the comment in load_module above.)
try:
with atomicwrites.atomic_write(pyc, mode="wb", overwrite=True) as fp:
fp.write(imp.get_magic())
mtime = int(source_stat.mtime)
size = source_stat.size & 0xFFFFFFFF
fp.write(struct.pack("<ll", mtime, size))
fp.write(marshal.dumps(co))
except EnvironmentError as e:
state.trace("error writing pyc file at %s: errno=%s" % (pyc, e.errno))
# we ignore any failure to write the cache file
# there are many reasons, permission-denied, __pycache__ being a
# file etc.
return False
return True
RN = "\r\n".encode("utf-8")
N = "\n".encode("utf-8")
cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
BOM_UTF8 = "\xef\xbb\xbf"
def _rewrite_test(config, fn):
"""Try to read and rewrite *fn* and return the code object."""
state = config._assertstate
try:
stat = fn.stat()
source = fn.read("rb")
except EnvironmentError:
return None, None
if ASCII_IS_DEFAULT_ENCODING:
# ASCII is the default encoding in Python 2. Without a coding
# declaration, Python 2 will complain about any bytes in the file
# outside the ASCII range. Sadly, this behavior does not extend to
# compile() or ast.parse(), which prefer to interpret the bytes as
# latin-1. (At least they properly handle explicit coding cookies.) To
# preserve this error behavior, we could force ast.parse() to use ASCII
# as the encoding by inserting a coding cookie. Unfortunately, that
# messes up line numbers. Thus, we have to check ourselves if anything
# is outside the ASCII range in the case no encoding is explicitly
# declared. For more context, see issue #269. Yay for Python 3 which
# gets this right.
end1 = source.find("\n")
end2 = source.find("\n", end1 + 1)
if (
not source.startswith(BOM_UTF8)
and cookie_re.match(source[0:end1]) is None
and cookie_re.match(source[end1 + 1:end2]) is None
):
if hasattr(state, "_indecode"):
# encodings imported us again, so don't rewrite.
return None, None
state._indecode = True
try:
try:
source.decode("ascii")
except UnicodeDecodeError:
# Let it fail in real import.
return None, None
finally:
del state._indecode
try:
tree = ast.parse(source)
except SyntaxError:
# Let this pop up again in the real import.
state.trace("failed to parse: %r" % (fn,))
return None, None
rewrite_asserts(tree, fn, config)
try:
co = compile(tree, fn.strpath, "exec", dont_inherit=True)
except SyntaxError:
# It's possible that this error is from some bug in the
# assertion rewriting, but I don't know of a fast way to tell.
state.trace("failed to compile: %r" % (fn,))
return None, None
return stat, co
def _read_pyc(source, pyc, trace=lambda x: None):
"""Possibly read a pytest pyc containing rewritten code.
Return rewritten code if successful or None if not.
"""
try:
fp = open(pyc, "rb")
except IOError:
return None
with fp:
try:
mtime = int(source.mtime())
size = source.size()
data = fp.read(12)
except EnvironmentError as e:
trace("_read_pyc(%s): EnvironmentError %s" % (source, e))
return None
# Check for invalid or out of date pyc file.
if (
len(data) != 12
or data[:4] != imp.get_magic()
or struct.unpack("<ll", data[4:]) != (mtime, size)
):
trace("_read_pyc(%s): invalid or out of date pyc" % source)
return None
try:
co = marshal.load(fp)
except Exception as e:
trace("_read_pyc(%s): marshal.load error %s" % (source, e))
return None
if not isinstance(co, types.CodeType):
trace("_read_pyc(%s): not a code object" % source)
return None
return co
def rewrite_asserts(mod, module_path=None, config=None):
"""Rewrite the assert statements in mod."""
AssertionRewriter(module_path, config).run(mod)
def _saferepr(obj):
"""Get a safe repr of an object for assertion error messages.
The assertion formatting (util.format_explanation()) requires
newlines to be escaped since they are a special character for it.
Normally assertion.util.format_explanation() does this but for a
custom repr it is possible to contain one of the special escape
sequences, especially '\n{' and '\n}' are likely to be present in
JSON reprs.
"""
repr = py.io.saferepr(obj)
if isinstance(repr, six.text_type):
t = six.text_type
else:
t = six.binary_type
return repr.replace(t("\n"), t("\\n"))
from _pytest.assertion.util import format_explanation as _format_explanation # noqa
def _format_assertmsg(obj):
"""Format the custom assertion message given.
For strings this simply replaces newlines with '\n~' so that
util.format_explanation() will preserve them instead of escaping
newlines. For other objects py.io.saferepr() is used first.
"""
# reprlib appears to have a bug which means that if a string
# contains a newline it gets escaped, however if an object has a
# .__repr__() which contains newlines it does not get escaped.
# However in either case we want to preserve the newline.
if isinstance(obj, six.text_type) or isinstance(obj, six.binary_type):
s = obj
is_repr = False
else:
s = py.io.saferepr(obj)
is_repr = True
if isinstance(s, six.text_type):
t = six.text_type
else:
t = six.binary_type
s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
if is_repr:
s = s.replace(t("\\n"), t("\n~"))
return s
def _should_repr_global_name(obj):
return not hasattr(obj, "__name__") and not callable(obj)
def _format_boolop(explanations, is_or):
explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
if isinstance(explanation, six.text_type):
t = six.text_type
else:
t = six.binary_type
return explanation.replace(t("%"), t("%%"))
def _call_reprcompare(ops, results, expls, each_obj):
for i, res, expl in zip(range(len(ops)), results, expls):
try:
done = not res
except Exception:
done = True
if done:
break
if util._reprcompare is not None:
custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
if custom is not None:
return custom
return expl
unary_map = {ast.Not: "not %s", ast.Invert: "~%s", ast.USub: "-%s", ast.UAdd: "+%s"}
binop_map = {
ast.BitOr: "|",
ast.BitXor: "^",
ast.BitAnd: "&",
ast.LShift: "<<",
ast.RShift: ">>",
ast.Add: "+",
ast.Sub: "-",
ast.Mult: "*",
ast.Div: "/",
ast.FloorDiv: "//",
ast.Mod: "%%", # escaped for string formatting
ast.Eq: "==",
ast.NotEq: "!=",
ast.Lt: "<",
ast.LtE: "<=",
ast.Gt: ">",
ast.GtE: ">=",
ast.Pow: "**",
ast.Is: "is",
ast.IsNot: "is not",
ast.In: "in",
ast.NotIn: "not in",
}
# Python 3.5+ compatibility
try:
binop_map[ast.MatMult] = "@"
except AttributeError:
pass
# Python 3.4+ compatibility
if hasattr(ast, "NameConstant"):
_NameConstant = ast.NameConstant
else:
def _NameConstant(c):
return ast.Name(str(c), ast.Load())
def set_location(node, lineno, col_offset):
"""Set node location information recursively."""
def _fix(node, lineno, col_offset):
if "lineno" in node._attributes:
node.lineno = lineno
if "col_offset" in node._attributes:
node.col_offset = col_offset
for child in ast.iter_child_nodes(node):
_fix(child, lineno, col_offset)
_fix(node, lineno, col_offset)
return node
class AssertionRewriter(ast.NodeVisitor):
"""Assertion rewriting implementation.
The main entrypoint is to call .run() with an ast.Module instance,
this will then find all the assert statements and rewrite them to
provide intermediate values and a detailed assertion error. See
http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
for an overview of how this works.
The entry point here is .run() which will iterate over all the
statements in an ast.Module and for each ast.Assert statement it
finds call .visit() with it. Then .visit_Assert() takes over and
is responsible for creating new ast statements to replace the
original assert statement: it rewrites the test of an assertion
to provide intermediate values and replace it with an if statement
which raises an assertion error with a detailed explanation in
case the expression is false.
For this .visit_Assert() uses the visitor pattern to visit all the
AST nodes of the ast.Assert.test field, each visit call returning
an AST node and the corresponding explanation string. During this
state is kept in several instance attributes:
:statements: All the AST statements which will replace the assert
statement.
:variables: This is populated by .variable() with each variable
used by the statements so that they can all be set to None at
the end of the statements.
:variable_counter: Counter to create new unique variables needed
by statements. Variables are created using .variable() and
have the form of "@py_assert0".
:on_failure: The AST statements which will be executed if the
assertion test fails. This is the code which will construct
the failure message and raises the AssertionError.
:explanation_specifiers: A dict filled by .explanation_param()
with %-formatting placeholders and their corresponding
expressions to use in the building of an assertion message.
This is used by .pop_format_context() to build a message.
:stack: A stack of the explanation_specifiers dicts maintained by
.push_format_context() and .pop_format_context() which allows
to build another %-formatted string while already building one.
This state is reset on every new assert statement visited and used
by the other visitors.
"""
def __init__(self, module_path, config):
super(AssertionRewriter, self).__init__()
self.module_path = module_path
self.config = config
def run(self, mod):
"""Find all assert statements in *mod* and rewrite them."""
if not mod.body:
# Nothing to do.
return
# Insert some special imports at the top of the module but after any
# docstrings and __future__ imports.
aliases = [
ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
ast.alias("_pytest.assertion.rewrite", "@pytest_ar"),
]
doc = getattr(mod, "docstring", None)
expect_docstring = doc is None
if doc is not None and self.is_rewrite_disabled(doc):
return
pos = 0
lineno = 1
for item in mod.body:
if (
expect_docstring
and isinstance(item, ast.Expr)
and isinstance(item.value, ast.Str)
):
doc = item.value.s
if self.is_rewrite_disabled(doc):
return
expect_docstring = False
elif (
not isinstance(item, ast.ImportFrom)
or item.level > 0
or item.module != "__future__"
):
lineno = item.lineno
break
pos += 1
else:
lineno = item.lineno
imports = [
ast.Import([alias], lineno=lineno, col_offset=0) for alias in aliases
]
mod.body[pos:pos] = imports
# Collect asserts.
nodes = [mod]
while nodes:
node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
for i, child in enumerate(field):
if isinstance(child, ast.Assert):
# Transform assert.
new.extend(self.visit(child))
else:
new.append(child)
if isinstance(child, ast.AST):
nodes.append(child)
setattr(node, name, new)
elif (
isinstance(field, ast.AST)
and
# Don't recurse into expressions as they can't contain
# asserts.
not isinstance(field, ast.expr)
):
nodes.append(field)
@staticmethod
def is_rewrite_disabled(docstring):
return "PYTEST_DONT_REWRITE" in docstring
def variable(self):
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
self.variables.append(name)
return name
def assign(self, expr):
"""Give *expr* a name."""
name = self.variable()
self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
return ast.Name(name, ast.Load())
def display(self, expr):
"""Call py.io.saferepr on the expression."""
return self.helper("saferepr", expr)
def helper(self, name, *args):
"""Call a helper in this module."""
py_name = ast.Name("@pytest_ar", ast.Load())
attr = ast.Attribute(py_name, "_" + name, ast.Load())
return ast_Call(attr, list(args), [])
def builtin(self, name):
"""Return the builtin called *name*."""
builtin_name = ast.Name("@py_builtins", ast.Load())
return ast.Attribute(builtin_name, name, ast.Load())
def explanation_param(self, expr):
"""Return a new named %-formatting placeholder for expr.
This creates a %-formatting placeholder for expr in the
current formatting context, e.g. ``%(py0)s``. The placeholder
and expr are placed in the current format context so that it
can be used on the next call to .pop_format_context().
"""
specifier = "py" + str(next(self.variable_counter))
self.explanation_specifiers[specifier] = expr
return "%(" + specifier + ")s"
def push_format_context(self):
"""Create a new formatting context.
The format context is used for when an explanation wants to
have a variable value formatted in the assertion message. In
this case the value required can be added using
.explanation_param(). Finally .pop_format_context() is used
to format a string of %-formatted values as added by
.explanation_param().
"""
self.explanation_specifiers = {}
self.stack.append(self.explanation_specifiers)
def pop_format_context(self, expl_expr):
"""Format the %-formatted string with current format context.
The expl_expr should be an ast.Str instance constructed from
the %-placeholders created by .explanation_param(). This will
add the required code to format said string to .on_failure and
return the ast.Name instance of the formatted string.
"""
current = self.stack.pop()
if self.stack:
self.explanation_specifiers = self.stack[-1]
keys = [ast.Str(key) for key in current.keys()]
format_dict = ast.Dict(keys, list(current.values()))
form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
name = "@py_format" + str(next(self.variable_counter))
self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
return ast.Name(name, ast.Load())
def generic_visit(self, node):
"""Handle expressions we don't have custom code for."""
assert isinstance(node, ast.expr)
res = self.assign(node)
return res, self.explanation_param(self.display(res))
def visit_Assert(self, assert_):
"""Return the AST statements to replace the ast.Assert instance.
This rewrites the test of an assertion to provide
intermediate values and replace it with an if statement which
raises an assertion error with a detailed explanation in case
the expression is false.
"""
if isinstance(assert_.test, ast.Tuple) and self.config is not None:
fslocation = (self.module_path, assert_.lineno)
self.config.warn(
"R1",
"assertion is always true, perhaps " "remove parentheses?",
fslocation=fslocation,
)
self.statements = []
self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
self.push_format_context()
# Rewrite assert into a bunch of statements.
top_condition, explanation = self.visit(assert_.test)
# Create failure message.
body = self.on_failure
negation = ast.UnaryOp(ast.Not(), top_condition)
self.statements.append(ast.If(negation, body, []))
if assert_.msg:
assertmsg = self.helper("format_assertmsg", assert_.msg)
explanation = "\n>assert " + explanation
else:
assertmsg = ast.Str("")
explanation = "assert " + explanation
template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
msg = self.pop_format_context(template)
fmt = self.helper("format_explanation", msg)
err_name = ast.Name("AssertionError", ast.Load())
exc = ast_Call(err_name, [fmt], [])
if sys.version_info[0] >= 3:
raise_ = ast.Raise(exc, None)
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
# Clear temporary variables by setting them to None.
if self.variables:
variables = [ast.Name(name, ast.Store()) for name in self.variables]
clear = ast.Assign(variables, _NameConstant(None))
self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
return self.statements
def visit_Name(self, name):
# Display the repr of the name if it's a local variable or
# _should_repr_global_name() thinks it's acceptable.
locs = ast_Call(self.builtin("locals"), [], [])
inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
dorepr = self.helper("should_repr_global_name", name)
test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
res_var = self.variable()
expl_list = self.assign(ast.List([], ast.Load()))
app = ast.Attribute(expl_list, "append", ast.Load())
is_or = int(isinstance(boolop.op, ast.Or))
body = save = self.statements
fail_save = self.on_failure
levels = len(boolop.values) - 1
self.push_format_context()
# Process each operand, short-circuting if needed.
for i, v in enumerate(boolop.values):
if i:
fail_inner = []
# cond is set in a prior loop iteration below
self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
self.on_failure = fail_inner
self.push_format_context()
res, expl = self.visit(v)
body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
expl_format = self.pop_format_context(ast.Str(expl))
call = ast_Call(app, [expl_format], [])
self.on_failure.append(ast.Expr(call))
if i < levels:
cond = res
if is_or:
cond = ast.UnaryOp(ast.Not(), cond)
inner = []
self.statements.append(ast.If(cond, inner, []))
self.statements = body = inner
self.statements = save
self.on_failure = fail_save
expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_res, operand_expl = self.visit(unary.operand)
res = self.assign(ast.UnaryOp(unary.op, operand_res))
return res, pattern % (operand_expl,)
def visit_BinOp(self, binop):
symbol = binop_map[binop.op.__class__]
left_expr, left_expl = self.visit(binop.left)
right_expr, right_expl = self.visit(binop.right)
explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
return res, explanation
def visit_Call_35(self, call):
"""
visit `ast.Call` nodes on Python3.5 and after
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
for arg in call.args:
res, expl = self.visit(arg)
arg_expls.append(expl)
new_args.append(res)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
if keyword.arg:
arg_expls.append(keyword.arg + "=" + expl)
else: # **args have `arg` keywords with an .arg of None
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ", ".join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
def visit_Starred(self, starred):
# From Python 3.5, a Starred node can appear in a function call
res, expl = self.visit(starred.value)
return starred, "*" + expl
def visit_Call_legacy(self, call):
"""
visit `ast.Call nodes on 3.4 and below`
"""
new_func, func_expl = self.visit(call.func)
arg_expls = []
new_args = []
new_kwargs = []
new_star = new_kwarg = None
for arg in call.args:
res, expl = self.visit(arg)
new_args.append(res)
arg_expls.append(expl)
for keyword in call.keywords:
res, expl = self.visit(keyword.value)
new_kwargs.append(ast.keyword(keyword.arg, res))
arg_expls.append(keyword.arg + "=" + expl)
if call.starargs:
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ", ".join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
res = self.assign(new_call)
res_expl = self.explanation_param(self.display(res))
outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
return res, outer_expl
# ast.Call signature changed on 3.5,
# conditionally change which methods is named
# visit_Call depending on Python version
if sys.version_info >= (3, 5):
visit_Call = visit_Call_35
else:
visit_Call = visit_Call_legacy
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
value, value_expl = self.visit(attr.value)
res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
res_expl = self.explanation_param(self.display(res))
pat = "%s\n{%s = %s.%s\n}"
expl = pat % (res_expl, res_expl, value_expl, attr.attr)
return res, expl
def visit_Compare(self, comp):
self.push_format_context()
left_res, left_expl = self.visit(comp.left)
if isinstance(comp.left, (ast.Compare, ast.BoolOp)):
left_expl = "({})".format(left_expl)
res_variables = [self.variable() for i in range(len(comp.ops))]
load_names = [ast.Name(v, ast.Load()) for v in res_variables]
store_names = [ast.Name(v, ast.Store()) for v in res_variables]
it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
expls = []
syms = []
results = [left_res]
for i, op, next_operand in it:
next_res, next_expl = self.visit(next_operand)
if isinstance(next_operand, (ast.Compare, ast.BoolOp)):
next_expl = "({})".format(next_expl)
results.append(next_res)
sym = binop_map[op.__class__]
syms.append(ast.Str(sym))
expl = "%s %s %s" % (left_expl, sym, next_expl)
expls.append(ast.Str(expl))
res_expr = ast.Compare(left_res, [op], [next_res])
self.statements.append(ast.Assign([store_names[i]], res_expr))
left_res, left_expl = next_res, next_expl
# Use pytest.assertion.util._reprcompare if that's available.
expl_call = self.helper(
"call_reprcompare",
ast.Tuple(syms, ast.Load()),
ast.Tuple(load_names, ast.Load()),
ast.Tuple(expls, ast.Load()),
ast.Tuple(results, ast.Load()),
)
if len(comp.ops) > 1:
res = ast.BoolOp(ast.And(), load_names)
else:
res = load_names[0]
return res, self.explanation_param(self.pop_format_context(expl_call))
| mpl-2.0 |
tedmeeds/tcga_encoder | tcga_encoder/models/vae/batcher_dna_prediction_from_sources.py | 1 | 20697 | from tcga_encoder.models.vae.batcher_ABC import *
class DnaBatcher( TCGABatcherABC ):
def PostInitInit(self):
if self.data_dict.has_key("dna_genes"):
self.dna_genes = self.data_dict["dna_genes"]
self.dna_store = self.dna_store[self.dna_genes]
self.dna_dim = len(self.dna_genes)
self.dims_dict[DNA] = self.dna_dim
def CallBack( self, function_name, sess, cb_info ):
if function_name == "everything":
self.FillDna( sess, cb_info )
#self.TestFillZ( sess, cb_info )
#self.TrainFillZ( sess, cb_info )
self.SaveModel( sess, cb_info )
self.BatchEpoch( sess, cb_info )
self.TestEpoch( sess, cb_info )
self.ValEpoch( sess, cb_info )
self.VizEpochs( sess, cb_info )
self.VizModel( sess, cb_info )
def FillDna( self, sess, info_dict ):
epoch = info_dict[EPOCH]
# feed_dict = info_dict[TEST_FEED_DICT]
# impute_dict = info_dict[TEST_FEED_IMPUTATION]
#
# self.RunFillZ( epoch, sess, feed_dict, impute_dict, mode="TEST" )
feed_dict = info_dict[VAL_FEED_DICT]
impute_dict = info_dict[VAL_FEED_IMPUTATION]
self.RunFillDna( epoch, sess, feed_dict, impute_dict, mode="VAL" )
feed_dict = info_dict[BATCH_FEED_DICT]
impute_dict = info_dict[BATCH_FEED_IMPUTATION]
self.batch_ids = info_dict["batch_ids"]
self.RunFillDna( epoch, sess, feed_dict, impute_dict, mode="BATCH" )
for batch_ids in chunks( np.arange(len(self.train_barcodes)), 5000 ):
barcodes = self.train_barcodes[batch_ids]
impute_dict = self.FillBatch( barcodes, mode = "TRAIN" ) #self.NextBatch(batch_ids)
self.batch_ids = batch_ids
train_feed_dict={}
self.network.FillFeedDict( train_feed_dict, impute_dict )
#batch = self.FillBatch( impute_dict[BARCODES], mode )
self.RunFillDna( epoch, sess, train_feed_dict, impute_dict, mode="TRAIN" )
def GetAlgoDictStuff(self):
pass
def SummarizeData(self):
print "Running: SummarizeData()"
self.dna_mean = self.dna_store.loc[self.train_barcodes].mean(0)
self.dna_std = self.dna_store.loc[self.train_barcodes].std(0)
self.dna_order = np.argsort( self.dna_mean.values )
self.tissue_statistics = {}
#pdb.set_trace()
tissue_names = self.train_tissue.columns
stats = np.zeros( (5,len(tissue_names)))
for t_idx, tissue in zip( range(len(tissue_names)),tissue_names ):
bcs = self.train_tissue.loc[self.train_tissue[tissue]==1].index.values
#pdb.set_trace()
#mirna=self.data_store[self.miRNA_key].loc[ bcs ]
self.tissue_statistics[ tissue ] = {}
self.tissue_statistics[ tissue ][ DNA ] = {}
self.tissue_statistics[ tissue ][ DNA ][ "mean"] = self.dna_store.mean(0).fillna(0)
self.tissue_statistics[ tissue ][ DNA ][ "var"] = self.dna_store.var(0).fillna(0)
try:
dna=self.dna_store.loc[ bcs ]
self.tissue_statistics[ tissue ][ DNA ][ "mean"] = dna.mean(0).fillna(0)
self.tissue_statistics[ tissue ][ DNA ][ "var"] = dna.var(0).fillna(0)
except:
print "No DNA for %s"%(tissue)
# def MakeBarcodes(self):
# obs_dna = self.data_store["/CLINICAL/observed"]["DNA"][ self.data_store["/CLINICAL/observed"]["DNA"] ==1 ]
# dna_barcodes = obs_dna.index.values
#
# self.train_barcodes = np.intersect1d( self.train_barcodes, dna_barcodes)
# self.validation_barcodes = np.intersect1d( self.validation_barcodes, dna_barcodes)
#
def InitializeAnythingYouWant(self, sess, network ):
print "Running : InitializeAnythingYouWant"
self.selected_aucs = {}
input_sources = ["DNA"]
layers = ["dna_predictions"]
n_tissues = len(self.data_store[self.TISSUE_key].columns)
#self.data_store[self.TISSUE_key].loc[ batch_barcodes ]
m = self.dna_mean.values + 1e-5
beta_0 = np.log( m ) - np.log( 1.0 - m )
if np.any(np.isnan(beta_0)) or np.any(np.isinf(beta_0)):
pdb.set_trace()
# get log_alpha and log_beta values
for layer_name, input_name in zip( layers, input_sources ):
n_dims = self.dims_dict[ input_name ]
alpha = np.zeros( (self.n_z, n_dims ), dtype = float )
beta = np.zeros( (n_tissues, n_dims ), dtype = float )
for t_idx, tissue in zip( range( n_tissues), self.data_store[self.TISSUE_key].columns):
n_samples = self.train_tissue[ tissue ].sum()
m = self.tissue_statistics[ tissue ][ DNA ][ "mean"].values
beta[t_idx,:] = np.log( m + 1e-3 ) - np.log( 1.0 - m + 1e-3)
if np.any(np.isnan(beta[t_idx,:])) or np.any(np.isinf(beta[t_idx,:])):
pdb.set_trace()
#log_alpha = np.log( alpha + 0.001 ).astype(np.float32)
#log_beta = np.log( beta + 0.001).astype(np.float32)
#layer = network.GetLayer( layer_name )
#sess.run( tf.assign(layer.weights[0][0], log_alpha) )
#sess.run( tf.assign(layer.weights[1][0], log_beta) )
if 1:
if len(network.GetLayer( layer_name ).weights) == 2:
#
print "initialize as if log reg and tissue specific biases"
#pdb.set_trace()
try:
network.GetLayer( layer_name ).SetWeights( sess, [alpha, beta ])
except:
print "could not init bias weights"
else:
if network.GetLayer( layer_name ).biases is not None:
print "initialize with tissue specific biases"
try:
network.GetLayer( layer_name ).SetBiases( sess, [beta_0])
except:
print "could not init bias biases"
#
# def StoreNames(self):
# self.model_store_name = self.network_name + "_DNA_" + MODEL
# self.model_store = OpenHdfStore(self.savedir, self.model_store_name, mode="a" )
#
# self.epoch_store_name = self.network_name + "_DNA_" + EPOCH
# self.epoch_store = OpenHdfStore(self.savedir, self.epoch_store_name, mode=self.default_store_mode )
#
# self.fill_store_dna_name = self.network_name + "_DNA_" + FILL
# self.fill_store_dna = OpenHdfStore(self.savedir, self.fill_store_name, mode="a")
#
# self.fill_store_dna.close()
# self.model_store.close()
# self.epoch_store.close()
# def CloseAll(self):
# self.data_store.close()
# self.fill_store_dna.close()
# self.model_store.close()
# self.epoch_store.close()
def MakeVizFilenames(self):
self.viz_filename_dna_batch_target = os.path.join( self.savedir, "dna_batch_target" )
self.viz_filename_dna_batch_predict = os.path.join( self.savedir, "dna_batch_predict" )
self.viz_filename_dna_aucs = os.path.join( self.savedir, "dna_aucs" )
self.viz_filename_lower_bound = os.path.join( self.savedir, "dna_lower_bound.png" )
self.viz_filename_error_sources_per_gene_fill = os.path.join( self.savedir, "dna_errors_fill.png" )
#self.viz_filename_weights = os.path.join( self.savedir, "weights_" )
self.viz_dna_weights = os.path.join( self.savedir, "dna_weights" )
def PlotLogPdf(self):
f = pp.figure()
#pdb.set_trace()
pp.plot( self.epoch_store["Batch"]["Epoch"].values, self.epoch_store["Batch"]["log p(x)"], 'bo-', lw=2 , label="Batch")
if self.n_test > 0:
pp.plot( self.epoch_store["Test"]["Epoch"].values, self.epoch_store["Test"]["log p(x)"], 'ro-', lw=2, label="Test" )
if self.n_val > 0:
pp.plot( self.epoch_store["Val"]["Epoch"].values, self.epoch_store["Val"]["log p(x)"], 'ro-', lw=2, label="Val" )
pp.legend( loc="lower right")
pp.xlabel("Epoch")
pp.ylabel("log p(x)")
pp.grid('on')
pp.savefig( self.viz_filename_lower_bound, dpi = 300, fmt="png", bbox_inches = "tight")
pp.close(f)
def FillDerivedPlaceholder( self, batch, layer_name, mode ):
pass
def PlotFillError(self,main_sources):
f = pp.figure(figsize=(12,10))
legends = []
n_sources = len(main_sources)
for idx,target_source in zip( range(n_sources),main_sources):
s = f.add_subplot(1,n_sources,idx+1)
inputs = "RNA+DNA+METH"
if self.n_test > 0:
query1 = self.epoch_store[TEST_FILL_ERROR]["Target"] == target_source
query = query1#&query2
df = self.epoch_store[TEST_FILL_ERROR][query]
epochs = df["Epoch"].values
loglik = df["Error"].values
if len(loglik) == 0:
continue
pp.plot( epochs, loglik, 'o-', \
color=self.source2darkcolor[target_source],\
mec=self.source2darkcolor[target_source], mew=1, \
mfc=self.source2lightcolor[target_source], lw=2, \
ms = 8, \
label="Test (%0.6f)"%(loglik[-1]) )
if self.n_val > 0:
query1 = self.epoch_store[VAL_FILL_ERROR]["Target"] == target_source
query = query1#&query2
df = self.epoch_store[VAL_FILL_ERROR][query]
epochs = df["Epoch"].values
loglik = df["Error"].values
if len(loglik) == 0:
continue
pp.plot( epochs, loglik, 'v-', \
color=self.source2mediumcolor[target_source],\
mec=self.source2darkcolor[target_source], mew=1, \
mfc=self.source2lightcolor[target_source], lw=2, \
ms = 8, \
label="Val (%0.6f)"%(loglik[-1]) )
if idx==0:
pp.ylabel("Error") #%(target_source))
pp.legend(loc="upper right")
pp.title( "%s"%(target_source))
pp.xlabel("Epoch")
pp.grid('on')
#pdb.set_trace()
pp.savefig( self.viz_filename_error_sources_per_gene_fill, dpi = 300, fmt="png", bbox_inches = "tight")
pp.close(f)
def VizEpochs(self, sess, info_dict ):
print "** VIZ Epochs"
main_sources = [DNA]
#prior_sources = [miRNA+"_b", RNA+"_b", METH+"_b"]
self.epoch_store.open()
self.PlotLogPdf()
#self.PlotLogPdf(main_sources,prior_sources)
#self.PlotFillLogPdf(main_sources,prior_sources)
self.PlotFillError(main_sources)
self.PlotAucs("VAL")
self.PlotAucs("TRAIN")
self.epoch_store.close()
pp.close('all')
def RunFillDna( self, epoch, sess, feed_dict, impute_dict, mode ):
print "COMPUTE Z-SPACE"
use_dna = False
use_rna = True
use_meth = True
use_mirna = True
barcodes = impute_dict[BARCODES]
batch = self.FillBatch( impute_dict[BARCODES], mode )
#pdb.set_trace()
dna_expectation_tensor = self.network.GetLayer( "dna_predictions" ).expectation
dna_data = self.dna_store.loc[ barcodes ].fillna( 0 ).values
dna_data = np.minimum(1.0,dna_data)
loglikes_data_as_matrix = self.network.loglikes_data_as_matrix
tensors = [dna_expectation_tensor]
tensor_names = ["dna_predictions"]
assert len(tensor_names)==len(tensors), "should be same number"
self.network.FillFeedDict( feed_dict, impute_dict )
#pdb.set_trace()
rna_observed_query = batch[ INPUT_OBSERVATIONS ][:,self.observed_batch_order[RNA]] == 1
# meth_observed_query = batch[ INPUT_OBSERVATIONS ][:,self.observed_batch_order[METH]] == 1
# mirna_observed_query = batch[ INPUT_OBSERVATIONS ][:,self.observed_batch_order[miRNA]] == 1
dna_observed_query = batch[ INPUT_OBSERVATIONS ][:,self.observed_batch_order[DNA]] == 1
tensor2fill = []
tensor2fill.extend( [dna_expectation_tensor, loglikes_data_as_matrix["dna_predictions"] ] )
z_ids = [0,1]
# ---------
# RUN SESS
# ---------
self.network.FillFeedDict( feed_dict, batch )
#pdb.set_trace()
tensor2fill_eval = sess.run( tensor2fill, feed_dict = feed_dict )
# ------
# FILL EVALUATION
# -----
dna_expectation = tensor2fill_eval[0]
dna_loglikelihood = tensor2fill_eval[1]
#pdb.set_trace()
self.WriteRunFillExpectation( epoch, DNA, barcodes, self.dna_genes, dna_observed_query, dna_expectation, dna_data, mode )
self.WriteRunFillLoglikelihood( epoch, DNA, barcodes[dna_observed_query], self.dna_genes, dna_loglikelihood[dna_observed_query,:], mode )
self.WriteAucs( epoch, DNA, barcodes, self.dna_genes, dna_observed_query, dna_expectation, dna_data, mode )
#pdb.set_trace()
def WriteAucs( self, epoch, target, barcodes, columns, obs_query, X, Y, mode ):
#inputs = inputs2use[0]
#for s in inputs2use[1:]:
# inputs += "+%s"%(s)
#print "Running: WriteAucs"
self.fill_store.open()
if target == DNA:
#for channel in range(self.n_dna_channels):
s = "/AUC/%s/%s/"%(mode,target )
#self.fill_store[ s ] = pd.DataFrame( X, index = barcodes, columns = columns )
x_obs = X[obs_query,:] #.flatten()
y_obs = Y[obs_query,:] # .flatten()
auc = np.zeros( x_obs.shape[1] )
ok = np.zeros( x_obs.shape[1] )
for d_idx in xrange( x_obs.shape[1] ):
if y_obs[:,d_idx].sum()>0 and y_obs[:,d_idx].sum() != 0:
try:
auc[d_idx] = roc_auc_score(y_obs[:,d_idx],x_obs[:,d_idx])
except:
auc[d_idx] = 1.0
ok[d_idx] = 1
else:
auc[d_idx] = 1
ok[d_idx] = 1
self.selected_aucs[s] = pp.find(ok)
#ok = pp.find(ok)
auc = auc[ self.selected_aucs[s] ]
columns = [columns[ idx ] for idx in self.selected_aucs[s] ]
I = np.argsort( auc )
print mode, [ ["%s %0.2f"%(columns[i],auc[i]) for i in I]]
self.fill_store[ s ] = pd.DataFrame( auc.reshape((1,len(auc))), columns = columns )
#pdb.set_trace()
self.fill_store.close()
def PlotAucs( self, mode ):
self.fill_store.open()
#pdb.set_trace()
s = "/AUC/%s/%s/"%(mode,DNA )
f = pp.figure(figsize=(14,4))
ax=f.add_subplot(111)
df = self.fill_store[s]
I_local = np.argsort( np.squeeze(df.values))
#print s
#print "len(I_local) = ", len(I_local)
#pdb.set_trace()
I_global = self.selected_aucs[s][ I_local ]
#I = self.dna_order
mean = self.tissue_statistics[ self.validation_tissues[0] ][ DNA ][ "mean"]
sorted_mean = pd.DataFrame( np.squeeze(mean.values)[I_global].reshape((1,len(I_global))), columns = np.array(self.dna_mean.index.values)[I_global] )
sorted_all_mean = pd.DataFrame( np.squeeze(self.dna_mean.values)[I_global].reshape((1,len(I_global))), columns = np.array(self.dna_mean.index.values)[I_global] )
sorted = pd.DataFrame( np.squeeze(df.values)[I_local].reshape((1,len(I_local))), columns = np.array(df.columns)[I_local] )
#pdb.set_trace()
sorted_mean.T.plot(kind='bar',ax=ax, sharex=True)
sorted.T.plot(ax=ax)
sorted_all_mean.T.plot(kind='bar',ax=ax, fontsize=6, sharex=True)
sorted_mean.T.plot(kind='bar',ax=ax, fontsize=6, sharex=True)
pp.title( "mean = %0.3f median = %0.3f"%(df.values.mean(), np.median(df.values)))
pp.savefig( self.viz_filename_dna_aucs + "_%s.png"%(mode), fmt="png", bbox_inches = "tight", dpi=600)
self.fill_store.close()
def Epoch( self, epoch_key, sess, info_dict, epoch, feed_dict, impute_dict, mode ):
barcodes = impute_dict[BARCODES]
batch_tensor_evals = sess.run( self.network.batch_log_tensors, feed_dict = feed_dict )
# batch_counts = self.CountSourcesInDict( impute_dict )
#
# n_batch = []
# for source in self.arch_dict[TARGET_SOURCES]:
# n_batch.append( batch_counts[source] )
# n_batch = np.array(n_batch).astype(float)
#
n_batch_size = len(impute_dict[BARCODES])
#
# log_p_z = batch_tensor_evals[2]/float(n_batch_size)
# log_q_z = batch_tensor_evals[3]/float(n_batch_size)
#
# # normalize by nbr observed for each source
# log_p_source_z_values = batch_tensor_evals[4:]/n_batch
#
# #print np.sort(info_dict[BATCH_IDS])
# new_log_p_x_given_z = log_p_source_z_values.sum()
# lower_bound = log_p_z-log_q_z + new_log_p_x_given_z
new_values = [epoch]
new_values.extend( batch_tensor_evals )
new_values[1]/=n_batch_size
self.AddSeries( self.epoch_store, epoch_key, values = new_values, columns = self.network.batch_log_columns )
epoch_values = [epoch]
epoch_values.extend( batch_tensor_evals )
#epoch_columns = ['Epoch']
epoch_columns = self.network.batch_log_columns
#pdb.set_trace()
if mode == "BATCH":
self.AddSeries( self.epoch_store, BATCH_SOURCE_LOGPDF, values = epoch_values, columns = epoch_columns )
self.PrintRow( self.epoch_store, epoch_key )
elif mode == "TEST" and self.n_test>0:
self.AddSeries( self.epoch_store, TEST_SOURCE_LOGPDF, values = epoch_values, columns = epoch_columns )
self.PrintRow( self.epoch_store, epoch_key )
elif mode == "VAL" and self.n_val>0:
self.AddSeries( self.epoch_store, VAL_SOURCE_LOGPDF, values = epoch_values, columns = epoch_columns )
self.PrintRow( self.epoch_store, epoch_key )
def VizWeightsGeneric( self, sess, info_dict ):
print " -> Generic Viz"
self.model_store.open()
keys = self.model_store.keys()
old_layer = ""
needs_closing=False
for k in keys:
dum,layer_name, W_or_b, W_or_b_id = k.split("/")
if W_or_b == "b":
continue
#print "processing %s"%(k)
if old_layer != layer_name:
if needs_closing is True:
#print " closing figure, ",old_layer
pp.legend()
pp.suptitle(old_layer)
pp.savefig( self.viz_dna_weights + "%s.png"%old_layer, fmt="png", bbox_inches = "tight")
pp.close(fig_)
needs_closing = False
if W_or_b == "W":
#print " new figure"
fig_ = pp.figure()
ax1_ = fig_.add_subplot(121)
ax2_ = fig_.add_subplot(122)
needs_closing = True
if W_or_b == "W":
#print " adding weights, ",layer_name
W = np.squeeze( self.model_store[k].values ).flatten()
ax1_.hist( W, 20, normed=True, alpha=0.5, label = "%s/%s"%(layer_name,W_or_b_id) )
pp.grid('on')
ax2_.plot( np.sort(W), lw=2, alpha=0.85, label = "%s/%s"%(layer_name,W_or_b_id) )
pp.grid('on')
needs_closing = True
#pdb.set_trace()
old_layer = layer_name
if needs_closing:
#print " closing figure, ",old_layer
pp.legend()
pp.suptitle(old_layer)
pp.savefig( self.viz_dna_weights + "%s.png"%old_layer, fmt="png", bbox_inches = "tight")
pp.close(fig_)
needs_closing = False
# try:
# rec_rna_weights = self.model_store[ "/rec_hidden1/W/0" ].values.flatten()
# f = pp.figure()
# pp.hist( rec_rna_weights, 50, normed=True, alpha=0.5 )
# pp.grid('on')
# pp.savefig( self.viz_filename_weights_rec_rna, dpi = 300, fmt="png", bbox_inches = "tight")
# pp.close(f)
# except:
# print "** could not viz any model"
self.model_store.close()
pp.close('all')
def VizModel( self, sess, info_dict ):
print "** VIZ Model"
self.VizWeightsGeneric(sess, info_dict )
# def InitializeAnythingYouWant(self, sess, network ):
# pass
# print "Running : InitializeAnythingYouWant"
# input_sources = ["METH","RNA","miRNA"]
# layers = ["gen_meth_space_basic","gen_rna_space_basic","gen_mirna_space_basic"]
#
# n_tissues = len(self.data_store[self.TISSUE_key].columns)
# #self.data_store[self.TISSUE_key].loc[ batch_barcodes ]
#
# # get log_alpha and log_beta values
# for layer_name, input_name in zip( layers, input_sources ):
# n_dims = self.dims_dict[ input_name ]
#
# alpha = np.zeros( (n_tissues, n_dims ), dtype = float )
# beta = np.zeros( (n_tissues, n_dims ), dtype = float )
#
# for t_idx, tissue in zip( range( n_tissues), self.data_store[self.TISSUE_key].columns):
#
# n_samples = self.train_tissue[ tissue ].sum()
# alpha[t_idx,:] = self.tissue_statistics[ tissue ][ input_name ][ "alpha"]
# beta[t_idx,:] = self.tissue_statistics[ tissue ][ input_name ][ "beta"]
#
# log_alpha = np.log( alpha + 0.001 ).astype(np.float32)
# log_beta = np.log( beta + 0.001).astype(np.float32)
#
# #layer = network.GetLayer( layer_name )
#
# #sess.run( tf.assign(layer.weights[0][0], log_alpha) )
# #sess.run( tf.assign(layer.weights[1][0], log_beta) )
# network.GetLayer( layer_name ).SetWeights( sess, [log_alpha, log_beta ])
# #pdb.set_trace()
| mit |
lento/cortex | test/IECoreGL/Camera.py | 7 | 6252 | ##########################################################################
#
# Copyright (c) 2008-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import IECoreGL
IECoreGL.init( False )
import os.path
import os
import shutil
class CameraTest( unittest.TestCase ) :
def testPositioning( self ) :
# render a plane at z = 0 with the default camera
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "immediate" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
r.display( os.path.dirname( __file__ ) + "/output/testCamera.tif", "tiff", "rgba", {} )
r.camera( "main", { "resolution" : IECore.V2iData( IECore.V2i( 512 ) ), "projection" : IECore.StringData( "perspective" ) } )
r.worldBegin()
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
IECore.MeshPrimitive.createPlane( IECore.Box2f( IECore.V2f( -0.1 ), IECore.V2f( 0.1 ) ) ).render( r )
r.worldEnd()
# check that nothing appears in the output image
i = IECore.Reader.create( os.path.dirname( __file__ ) + "/output/testCamera.tif" ).read()
e = IECore.PrimitiveEvaluator.create( i )
result = e.createResult()
a = e.G()
e.pointAtUV( IECore.V2f( 0.5, 0.5 ), result )
self.assertEqual( result.floatPrimVar( a ), 0 )
# render a plane at z = 0 with the camera moved back a touch to see it
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "immediate" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
r.display( os.path.dirname( __file__ ) + "/output/testCamera.tif", "tiff", "rgba", {} )
r.transformBegin()
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, 1 ) ) )
r.camera( "main", { "resolution" : IECore.V2iData( IECore.V2i( 512 ) ), "projection" : IECore.StringData( "perspective" ) } )
r.transformEnd()
r.worldBegin()
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
IECore.MeshPrimitive.createPlane( IECore.Box2f( IECore.V2f( -0.1 ), IECore.V2f( 0.1 ) ) ).render( r )
r.worldEnd()
# check that something appears in the output image
i = IECore.Reader.create( os.path.dirname( __file__ ) + "/output/testCamera.tif" ).read()
e = IECore.PrimitiveEvaluator.create( i )
result = e.createResult()
a = e.A()
e.pointAtUV( IECore.V2f( 0.5, 0.5 ), result )
self.assertEqual( result.floatPrimVar( a ), 1 )
def testXYOrientation( self ) :
# render a red square at x==1, and a green one at y==1
r = IECoreGL.Renderer()
r.setOption( "gl:mode", IECore.StringData( "immediate" ) )
r.setOption( "gl:searchPath:shader", IECore.StringData( os.path.dirname( __file__ ) + "/shaders" ) )
r.display( os.path.dirname( __file__ ) + "/output/testCamera.tif", "tiff", "rgba", {} )
r.transformBegin()
r.concatTransform( IECore.M44f.createTranslated( IECore.V3f( 0, 0, 1 ) ) )
r.camera( "main", { "resolution" : IECore.V2iData( IECore.V2i( 512 ) ) } )
r.transformEnd()
r.worldBegin()
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 1, 0, 0 ) ) } )
IECore.MeshPrimitive.createPlane( IECore.Box2f( IECore.V2f( 0.75, -0.25 ), IECore.V2f( 1.25, 0.25 ) ) ).render( r )
r.shader( "surface", "color", { "colorValue" : IECore.Color3fData( IECore.Color3f( 0, 1, 0 ) ) } )
IECore.MeshPrimitive.createPlane( IECore.Box2f( IECore.V2f( -0.25, 0.75 ), IECore.V2f( 0.25, 1.25 ) ) ).render( r )
r.worldEnd()
# check we get the colors we'd expect where we expect them
i = IECore.Reader.create( os.path.dirname( __file__ ) + "/output/testCamera.tif" ).read()
e = IECore.PrimitiveEvaluator.create( i )
result = e.createResult()
a = e.A()
r = e.R()
g = e.G()
b = e.B()
e.pointAtUV( IECore.V2f( 1, 0.5 ), result )
self.assertEqual( result.floatPrimVar( a ), 1 )
self.assertEqual( result.floatPrimVar( r ), 1 )
self.assertEqual( result.floatPrimVar( g ), 0 )
self.assertEqual( result.floatPrimVar( b ), 0 )
e.pointAtUV( IECore.V2f( 0.5, 0 ), result )
self.assertEqual( result.floatPrimVar( a ), 1 )
self.assertEqual( result.floatPrimVar( r ), 0 )
self.assertEqual( result.floatPrimVar( g ), 1 )
self.assertEqual( result.floatPrimVar( b ), 0 )
def setUp( self ) :
if not os.path.isdir( "test/IECoreGL/output" ) :
os.makedirs( "test/IECoreGL/output" )
def tearDown( self ) :
if os.path.isdir( "test/IECoreGL/output" ) :
shutil.rmtree( "test/IECoreGL/output" )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
jeremypogue/ansible | lib/ansible/executor/task_executor.py | 2 | 34767 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import base64
import subprocess
import sys
import time
import traceback
from ansible.compat.six import iteritems, string_types, binary_type
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError, AnsibleUndefinedVariable, AnsibleConnectionFailure
from ansible.executor.task_result import TaskResult
from ansible.module_utils._text import to_bytes, to_text
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.template import Templar
from ansible.utils.encrypt import key_for_hostname
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.vars.unsafe_proxy import UnsafeProxy, wrap_var
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
__all__ = ['TaskExecutor']
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
# Modules that we optimize by squashing loop items into a single call to
# the module
SQUASH_ACTIONS = frozenset(C.DEFAULT_SQUASH_ACTIONS)
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj, rslt_q):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
self._connection = None
self._rslt_q = rslt_q
self._task.squash()
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with self._run_loop()
or self._execute(). After that, the returned results are parsed and
returned as a dict.
'''
display.debug("in run()")
try:
# get search path for this task to pass to lookup plugins
self._job_vars['ansible_search_path'] = self._task.get_search_path()
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# loop through the item results, and remember the changed/failed
# result flags based on any item there.
changed = False
failed = False
for item in item_results:
if 'changed' in item and item['changed']:
changed = True
if 'failed' in item and item['failed']:
failed = True
# create the overall result item, and set the changed/failed
# flags there to reflect the overall result of the loop
res = dict(results=item_results)
if changed:
res['changed'] = True
if failed:
res['failed'] = True
res['msg'] = 'One or more items failed'
else:
res['msg'] = 'All items completed'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
display.debug("calling self._execute()")
res = self._execute()
display.debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
def _clean_res(res):
if isinstance(res, UnsafeProxy):
return res._obj
elif isinstance(res, binary_type):
return to_text(res, errors='surrogate_or_strict')
elif isinstance(res, dict):
for k in res:
res[k] = _clean_res(res[k])
elif isinstance(res, list):
for idx,item in enumerate(res):
res[idx] = _clean_res(item)
return res
display.debug("dumping result to json")
res = _clean_res(res)
display.debug("done dumping result, returning")
return res
except AnsibleError as e:
return dict(failed=True, msg=to_text(e, nonstring='simplerepr'))
except Exception as e:
return dict(failed=True, msg='Unexpected failure during module execution.', exception=to_text(traceback.format_exc()), stdout='')
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception as e:
display.debug(u"error closing connection: %s" % to_text(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# save the play context variables to a temporary dictionary,
# so that we can modify the job vars without doing a full copy
# and later restore them to avoid modifying things too early
play_context_vars = dict()
self._play_context.update_vars(play_context_vars)
old_vars = dict()
for k in play_context_vars:
if k in self._job_vars:
old_vars[k] = self._job_vars[k]
self._job_vars[k] = play_context_vars[k]
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
items = None
if self._task.loop:
if self._task.loop in self._shared_loader_obj.lookup_loader:
# TODO: remove convert_bare true and deprecate this in with_
if self._task.loop == 'first_found':
# first_found loops are special. If the item is undefined
# then we want to fall through to the next value rather
# than failing.
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar,
loader=self._loader, fail_on_undefined=False, convert_bare=True)
loop_terms = [t for t in loop_terms if not templar._contains_vars(t)]
else:
try:
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar,
loader=self._loader, fail_on_undefined=True, convert_bare=True)
except AnsibleUndefinedVariable as e:
display.deprecated("Skipping task due to undefined Error, in the future this will be a fatal error.: %s" % to_bytes(e))
return None
# get lookup
mylookup = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar)
# give lookup task 'context' for subdir (mostly needed for first_found)
for subdir in ['template', 'var', 'file']: # TODO: move this to constants?
if subdir in self._task.action:
break
setattr(mylookup,'_subdir', subdir + 's')
# run lookup
items = mylookup.run(terms=loop_terms, variables=self._job_vars, wantlist=True)
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
# now we restore any old job variables that may have been modified,
# and delete them if they were in the play context vars but not in
# the old variables dictionary
for k in play_context_vars:
if k in old_vars:
self._job_vars[k] = old_vars[k]
else:
del self._job_vars[k]
if items:
from ansible.vars.unsafe_proxy import UnsafeProxy
for idx, item in enumerate(items):
if item is not None and not isinstance(item, UnsafeProxy):
items[idx] = UnsafeProxy(item)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
#task_vars = self._job_vars.copy()
task_vars = self._job_vars
loop_var = 'item'
label = None
loop_pause = 0
if self._task.loop_control:
# the value may be 'None', so we still need to default it back to 'item'
loop_var = self._task.loop_control.loop_var or 'item'
label = self._task.loop_control.label or ('{{' + loop_var + '}}')
loop_pause = self._task.loop_control.pause or 0
if loop_var in task_vars:
display.warning(u"The loop variable '%s' is already in use."
u"You should set the `loop_var` value in the `loop_control` option for the task"
u" to something else to avoid variable collisions and unexpected behavior." % loop_var)
ran_once = False
items = self._squash_items(items, loop_var, task_vars)
for item in items:
task_vars[loop_var] = item
# pause between loop iterations
if loop_pause and ran_once:
time.sleep(loop_pause)
else:
ran_once = True
try:
tmp_task = self._task.copy(exclude_parent=True, exclude_tasks=True)
tmp_task._parent = self._task._parent
tmp_play_context = self._play_context.copy()
except AnsibleParserError as e:
results.append(dict(failed=True, msg=to_text(e)))
continue
# now we swap the internal task and play context with their copies,
# execute, and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task)
(self._play_context, tmp_play_context) = (tmp_play_context, self._play_context)
# now update the result with the item info, and append the result
# to the list of results
res[loop_var] = item
res['_ansible_item_result'] = True
if label is not None:
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=self._job_vars)
res['_ansible_item_label'] = templar.template(label, fail_on_undefined=False)
self._rslt_q.put(TaskResult(self._host.name, self._task._uuid, res), block=False)
results.append(res)
del task_vars[loop_var]
return results
def _squash_items(self, items, loop_var, variables):
'''
Squash items down to a comma-separated list for certain modules which support it
(typically package management modules).
'''
name = None
try:
# _task.action could contain templatable strings (via action: and
# local_action:) Template it before comparing. If we don't end up
# optimizing it here, the templatable string might use template vars
# that aren't available until later (it could even use vars from the
# with_items loop) so don't make the templated string permanent yet.
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
task_action = self._task.action
if templar._contains_vars(task_action):
task_action = templar.template(task_action, fail_on_undefined=False)
if len(items) > 0 and task_action in self.SQUASH_ACTIONS:
if all(isinstance(o, string_types) for o in items):
final_items = []
for allowed in ['name', 'pkg', 'package']:
name = self._task.args.pop(allowed, None)
if name is not None:
break
# This gets the information to check whether the name field
# contains a template that we can squash for
template_no_item = template_with_item = None
if name:
if templar._contains_vars(name):
variables[loop_var] = '\0$'
template_no_item = templar.template(name, variables, cache=False)
variables[loop_var] = '\0@'
template_with_item = templar.template(name, variables, cache=False)
del variables[loop_var]
# Check if the user is doing some operation that doesn't take
# name/pkg or the name/pkg field doesn't have any variables
# and thus the items can't be squashed
if template_no_item != template_with_item:
for item in items:
variables[loop_var] = item
if self._task.evaluate_conditional(templar, variables):
new_item = templar.template(name, cache=False)
final_items.append(new_item)
self._task.args['name'] = final_items
# Wrap this in a list so that the calling function loop
# executes exactly once
return [final_items]
else:
# Restore the name parameter
self._task.args['name'] = name
#elif:
# Right now we only optimize single entries. In the future we
# could optimize more types:
# * lists can be squashed together
# * dicts could squash entries that match in all cases except the
# name or pkg field.
except:
# Squashing is an optimization. If it fails for any reason,
# simply use the unoptimized list of items.
# Restore the name parameter
if name is not None:
self._task.args['name'] = name
return items
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
context_validation_error = None
try:
# apply the given task's information to the connection info,
# which may override some fields already set by the play or
# the options specified on the command line
self._play_context = self._play_context.set_task_and_variable_override(task=self._task, variables=variables, templar=templar)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, if the remote_addr is not set
# default to using the host's address field as the remote address
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.address
# We also add "magic" variables back into the variables dict to make sure
# a certain subset of variables exist.
self._play_context.update_vars(variables)
except AnsibleError as e:
# save the error, which we'll raise later if we don't end up
# skipping this task during the conditional evaluation step
context_validation_error = e
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
try:
if not self._task.evaluate_conditional(templar, variables):
display.debug("when evaluation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed', _ansible_no_log=self._play_context.no_log)
except AnsibleError:
# skip conditional exception in the case of includes as the vars needed might not be avaiable except in the included tasks or due to tags
if self._task.action not in ['include', 'include_role']:
raise
# if we ran into an error while setting up the PlayContext, raise it now
if context_validation_error is not None:
raise context_validation_error
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action == 'include':
include_variables = self._task.args.copy()
include_file = include_variables.pop('_raw_params', None)
if not include_file:
return dict(failed=True, msg="No include file was specified to the include")
include_file = templar.template(include_file)
return dict(include=include_file, include_variables=include_variables)
# TODO: not needed?
# if this task is a IncludeRole, we just return now with a success code so the main thread can expand the task list for the given host
elif self._task.action == 'include_role':
include_variables = self._task.args.copy()
role = templar.template(self._task._role_name)
if not role:
return dict(failed=True, msg="No role was specified to include")
return dict(include_role=role, include_variables=include_variables)
# Now we do final validation on the task, which sets all fields to their final values.
self._task.post_validate(templar=templar)
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
variable_params.update(self._task.args)
self._task.args = variable_params
# get the connection and the handler for this execution
if not self._connection or not getattr(self._connection, 'connected', False) or self._play_context.remote_addr != self._connection._play_context.remote_addr:
self._connection = self._get_connection(variables=variables, templar=templar)
self._connection.set_host_overrides(host=self._host, hostvars=variables.get('hostvars', {}).get(self._host.name, {}))
else:
# if connection is reused, its _play_context is no longer valid and needs
# to be replaced with the one templated above, in case other data changed
self._connection._play_context = self._play_context
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = dict((i[0], i[1]) for i in iteritems(self._task.args) if i[1] != omit_token)
# Read some values from the task, so that we can modify them if need be
if self._task.until:
retries = self._task.retries
if retries is None:
retries = 3
elif retries <= 0:
retries = 1
else:
retries += 1
else:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
display.debug("starting attempt loop")
result = None
for attempt in range(1, retries + 1):
display.debug("running the handler")
try:
result = self._handler.run(task_vars=variables)
except AnsibleConnectionFailure as e:
return dict(unreachable=True, msg=to_text(e))
display.debug("handler run complete")
# preserve no log
result["_ansible_no_log"] = self._play_context.no_log
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
vars_copy[self._task.register] = wrap_var(result.copy())
if self._task.async > 0:
if self._task.poll > 0:
result = self._poll_async_result(result=result, templar=templar, task_vars=vars_copy)
# ensure no log is preserved
result["_ansible_no_log"] = self._play_context.no_log
# helper methods for use below in evaluating changed/failed_when
def _evaluate_changed_when_result(result):
if self._task.changed_when is not None and self._task.changed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.changed_when
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
def _evaluate_failed_when_result(result):
if self._task.failed_when:
cond = Conditional(loader=self._loader)
cond.when = self._task.failed_when
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
else:
failed_when_result = False
return failed_when_result
if 'ansible_facts' in result:
vars_copy.update(result['ansible_facts'])
# set the failed property if the result has a non-zero rc. This will be
# overridden below if the failed_when property is set
if result.get('rc', 0) != 0:
result['failed'] = True
# if we didn't skip this task, use the helpers to evaluate the changed/
# failed_when properties
if 'skipped' not in result:
_evaluate_changed_when_result(result)
_evaluate_failed_when_result(result)
if retries > 1:
cond = Conditional(loader=self._loader)
cond.when = self._task.until
if cond.evaluate_conditional(templar, vars_copy):
break
else:
# no conditional check, or it failed, so sleep for the specified time
if attempt < retries:
result['attempts'] = attempt
result['_ansible_retry'] = True
result['retries'] = retries
display.debug('Retrying task, attempt %d of %d' % (attempt, retries))
self._rslt_q.put(TaskResult(self._host.name, self._task._uuid, result), block=False)
time.sleep(delay)
else:
if retries > 1:
# we ran out of attempts, so mark the result as failed
result['failed'] = True
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = wrap_var(result)
if 'ansible_facts' in result:
variables.update(result['ansible_facts'])
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# add the delegated vars to the result, so we can reference them
# on the results side without having to do any further templating
# FIXME: we only want a limited set of variables here, so this is currently
# hardcoded but should be possibly fixed if we want more or if
# there is another source of truth we can use
delegated_vars = variables.get('ansible_delegated_vars', dict()).get(self._task.delegate_to, dict()).copy()
if len(delegated_vars) > 0:
result["_ansible_delegated_vars"] = dict()
for k in ('ansible_host', ):
result["_ansible_delegated_vars"][k] = delegated_vars.get(k)
# and return
display.debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar, task_vars=None):
'''
Polls for the specified JID to be complete
'''
if task_vars is None:
task_vars = self._job_vars
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new psuedo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
normal_handler = self._shared_loader_obj.action_loader.get(
'normal',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async
while time_left > 0:
time.sleep(self._task.poll)
async_result = normal_handler.run(task_vars=task_vars)
# We do not bail out of the loop in cases where the failure
# is associated with a parsing error. The async_runner can
# have issues which result in a half-written/unparseable result
# file on disk, which manifests to the user as a timeout happening
# before it's time to timeout.
if int(async_result.get('finished', 0)) == 1 or ('failed' in async_result and async_result.get('_ansible_parsed', False)) or 'skipped' in async_result:
break
time_left -= self._task.poll
if int(async_result.get('finished', 0)) != 1:
if async_result.get('_ansible_parsed'):
return dict(failed=True, msg="async task did not complete within the requested time")
else:
return dict(failed=True, msg="async task produced unparseable results", async_result=async_result)
else:
return async_result
def _get_connection(self, variables, templar):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
if self._task.delegate_to is not None:
# since we're delegating, we don't want to use interpreter values
# which would have been set for the original target host
for i in list(variables.keys()):
if isinstance(i, string_types) and i.startswith('ansible_') and i.endswith('_interpreter'):
del variables[i]
# now replace the interpreter values with those that may have come
# from the delegated-to host
delegated_vars = variables.get('ansible_delegated_vars', dict()).get(self._task.delegate_to, dict())
if isinstance(delegated_vars, dict):
for i in delegated_vars:
if isinstance(i, string_types) and i.startswith("ansible_") and i.endswith("_interpreter"):
variables[i] = delegated_vars[i]
conn_type = self._play_context.connection
if conn_type == 'smart':
conn_type = 'ssh'
if sys.platform.startswith('darwin') and self._play_context.password:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
conn_type = "paramiko"
else:
# see if SSH can support ControlPersist if not use paramiko
try:
ssh_executable = C.ANSIBLE_SSH_EXECUTABLE
cmd = subprocess.Popen([ssh_executable, '-o', 'ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
err = to_text(err)
if u"Bad configuration option" in err or u"Usage:" in err:
conn_type = "paramiko"
except OSError:
conn_type = "paramiko"
connection = self._shared_loader_obj.connection_loader.get(conn_type, self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
if self._play_context.accelerate:
# accelerate is deprecated as of 2.1...
display.deprecated('Accelerated mode is deprecated. Consider using SSH with ControlPersist and pipelining enabled instead')
# launch the accelerated daemon here
ssh_connection = connection
handler = self._shared_loader_obj.action_loader.get(
'normal',
task=self._task,
connection=ssh_connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
key = key_for_hostname(self._play_context.remote_addr)
accelerate_args = dict(
password=base64.b64encode(key.__str__()),
port=self._play_context.accelerate_port,
minutes=C.ACCELERATE_DAEMON_TIMEOUT,
ipv6=self._play_context.accelerate_ipv6,
debug=self._play_context.verbosity,
)
connection = self._shared_loader_obj.connection_loader.get('accelerate', self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
try:
connection._connect()
except AnsibleConnectionFailure:
display.debug('connection failed, fallback to accelerate')
res = handler._execute_module(module_name='accelerate', module_args=accelerate_args, task_vars=variables, delete_remote_tmp=False)
display.debug(res)
connection._connect()
return connection
def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
if self._task.action in self._shared_loader_obj.action_loader:
if self._task.async != 0:
raise AnsibleError("async mode is not supported with the %s module" % self._task.action)
handler_name = self._task.action
elif self._task.async == 0:
handler_name = 'normal'
else:
handler_name = 'async'
handler = self._shared_loader_obj.action_loader.get(
handler_name,
task=self._task,
connection=connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
| gpl-3.0 |
linjoahow/w17g | static/Brython3.1.3-20150514-095342/Lib/encodings/aliases.py | 726 | 15414 | """ Encoding Aliases Support
This module is used by the encodings package search function to
map encodings names to module names.
Note that the search function normalizes the encoding names before
doing the lookup, so the mapping will have to map normalized
encoding names to module names.
Contents:
The following aliases dictionary contains mappings of all IANA
character set names for which the Python core library provides
codecs. In addition to these, a few Python specific codec
aliases have also been added.
"""
aliases = {
# Please keep this list sorted alphabetically by value !
# ascii codec
'646' : 'ascii',
'ansi_x3.4_1968' : 'ascii',
'ansi_x3_4_1968' : 'ascii', # some email headers use this non-standard name
'ansi_x3.4_1986' : 'ascii',
'cp367' : 'ascii',
'csascii' : 'ascii',
'ibm367' : 'ascii',
'iso646_us' : 'ascii',
'iso_646.irv_1991' : 'ascii',
'iso_ir_6' : 'ascii',
'us' : 'ascii',
'us_ascii' : 'ascii',
# base64_codec codec
'base64' : 'base64_codec',
'base_64' : 'base64_codec',
# big5 codec
'big5_tw' : 'big5',
'csbig5' : 'big5',
# big5hkscs codec
'big5_hkscs' : 'big5hkscs',
'hkscs' : 'big5hkscs',
# bz2_codec codec
'bz2' : 'bz2_codec',
# cp037 codec
'037' : 'cp037',
'csibm037' : 'cp037',
'ebcdic_cp_ca' : 'cp037',
'ebcdic_cp_nl' : 'cp037',
'ebcdic_cp_us' : 'cp037',
'ebcdic_cp_wt' : 'cp037',
'ibm037' : 'cp037',
'ibm039' : 'cp037',
# cp1026 codec
'1026' : 'cp1026',
'csibm1026' : 'cp1026',
'ibm1026' : 'cp1026',
# cp1125 codec
'1125' : 'cp1125',
'ibm1125' : 'cp1125',
'cp866u' : 'cp1125',
'ruscii' : 'cp1125',
# cp1140 codec
'1140' : 'cp1140',
'ibm1140' : 'cp1140',
# cp1250 codec
'1250' : 'cp1250',
'windows_1250' : 'cp1250',
# cp1251 codec
'1251' : 'cp1251',
'windows_1251' : 'cp1251',
# cp1252 codec
'1252' : 'cp1252',
'windows_1252' : 'cp1252',
# cp1253 codec
'1253' : 'cp1253',
'windows_1253' : 'cp1253',
# cp1254 codec
'1254' : 'cp1254',
'windows_1254' : 'cp1254',
# cp1255 codec
'1255' : 'cp1255',
'windows_1255' : 'cp1255',
# cp1256 codec
'1256' : 'cp1256',
'windows_1256' : 'cp1256',
# cp1257 codec
'1257' : 'cp1257',
'windows_1257' : 'cp1257',
# cp1258 codec
'1258' : 'cp1258',
'windows_1258' : 'cp1258',
# cp273 codec
'273' : 'cp273',
'ibm273' : 'cp273',
'csibm273' : 'cp273',
# cp424 codec
'424' : 'cp424',
'csibm424' : 'cp424',
'ebcdic_cp_he' : 'cp424',
'ibm424' : 'cp424',
# cp437 codec
'437' : 'cp437',
'cspc8codepage437' : 'cp437',
'ibm437' : 'cp437',
# cp500 codec
'500' : 'cp500',
'csibm500' : 'cp500',
'ebcdic_cp_be' : 'cp500',
'ebcdic_cp_ch' : 'cp500',
'ibm500' : 'cp500',
# cp775 codec
'775' : 'cp775',
'cspc775baltic' : 'cp775',
'ibm775' : 'cp775',
# cp850 codec
'850' : 'cp850',
'cspc850multilingual' : 'cp850',
'ibm850' : 'cp850',
# cp852 codec
'852' : 'cp852',
'cspcp852' : 'cp852',
'ibm852' : 'cp852',
# cp855 codec
'855' : 'cp855',
'csibm855' : 'cp855',
'ibm855' : 'cp855',
# cp857 codec
'857' : 'cp857',
'csibm857' : 'cp857',
'ibm857' : 'cp857',
# cp858 codec
'858' : 'cp858',
'csibm858' : 'cp858',
'ibm858' : 'cp858',
# cp860 codec
'860' : 'cp860',
'csibm860' : 'cp860',
'ibm860' : 'cp860',
# cp861 codec
'861' : 'cp861',
'cp_is' : 'cp861',
'csibm861' : 'cp861',
'ibm861' : 'cp861',
# cp862 codec
'862' : 'cp862',
'cspc862latinhebrew' : 'cp862',
'ibm862' : 'cp862',
# cp863 codec
'863' : 'cp863',
'csibm863' : 'cp863',
'ibm863' : 'cp863',
# cp864 codec
'864' : 'cp864',
'csibm864' : 'cp864',
'ibm864' : 'cp864',
# cp865 codec
'865' : 'cp865',
'csibm865' : 'cp865',
'ibm865' : 'cp865',
# cp866 codec
'866' : 'cp866',
'csibm866' : 'cp866',
'ibm866' : 'cp866',
# cp869 codec
'869' : 'cp869',
'cp_gr' : 'cp869',
'csibm869' : 'cp869',
'ibm869' : 'cp869',
# cp932 codec
'932' : 'cp932',
'ms932' : 'cp932',
'mskanji' : 'cp932',
'ms_kanji' : 'cp932',
# cp949 codec
'949' : 'cp949',
'ms949' : 'cp949',
'uhc' : 'cp949',
# cp950 codec
'950' : 'cp950',
'ms950' : 'cp950',
# euc_jis_2004 codec
'jisx0213' : 'euc_jis_2004',
'eucjis2004' : 'euc_jis_2004',
'euc_jis2004' : 'euc_jis_2004',
# euc_jisx0213 codec
'eucjisx0213' : 'euc_jisx0213',
# euc_jp codec
'eucjp' : 'euc_jp',
'ujis' : 'euc_jp',
'u_jis' : 'euc_jp',
# euc_kr codec
'euckr' : 'euc_kr',
'korean' : 'euc_kr',
'ksc5601' : 'euc_kr',
'ks_c_5601' : 'euc_kr',
'ks_c_5601_1987' : 'euc_kr',
'ksx1001' : 'euc_kr',
'ks_x_1001' : 'euc_kr',
# gb18030 codec
'gb18030_2000' : 'gb18030',
# gb2312 codec
'chinese' : 'gb2312',
'csiso58gb231280' : 'gb2312',
'euc_cn' : 'gb2312',
'euccn' : 'gb2312',
'eucgb2312_cn' : 'gb2312',
'gb2312_1980' : 'gb2312',
'gb2312_80' : 'gb2312',
'iso_ir_58' : 'gb2312',
# gbk codec
'936' : 'gbk',
'cp936' : 'gbk',
'ms936' : 'gbk',
# hex_codec codec
'hex' : 'hex_codec',
# hp_roman8 codec
'roman8' : 'hp_roman8',
'r8' : 'hp_roman8',
'csHPRoman8' : 'hp_roman8',
# hz codec
'hzgb' : 'hz',
'hz_gb' : 'hz',
'hz_gb_2312' : 'hz',
# iso2022_jp codec
'csiso2022jp' : 'iso2022_jp',
'iso2022jp' : 'iso2022_jp',
'iso_2022_jp' : 'iso2022_jp',
# iso2022_jp_1 codec
'iso2022jp_1' : 'iso2022_jp_1',
'iso_2022_jp_1' : 'iso2022_jp_1',
# iso2022_jp_2 codec
'iso2022jp_2' : 'iso2022_jp_2',
'iso_2022_jp_2' : 'iso2022_jp_2',
# iso2022_jp_2004 codec
'iso_2022_jp_2004' : 'iso2022_jp_2004',
'iso2022jp_2004' : 'iso2022_jp_2004',
# iso2022_jp_3 codec
'iso2022jp_3' : 'iso2022_jp_3',
'iso_2022_jp_3' : 'iso2022_jp_3',
# iso2022_jp_ext codec
'iso2022jp_ext' : 'iso2022_jp_ext',
'iso_2022_jp_ext' : 'iso2022_jp_ext',
# iso2022_kr codec
'csiso2022kr' : 'iso2022_kr',
'iso2022kr' : 'iso2022_kr',
'iso_2022_kr' : 'iso2022_kr',
# iso8859_10 codec
'csisolatin6' : 'iso8859_10',
'iso_8859_10' : 'iso8859_10',
'iso_8859_10_1992' : 'iso8859_10',
'iso_ir_157' : 'iso8859_10',
'l6' : 'iso8859_10',
'latin6' : 'iso8859_10',
# iso8859_11 codec
'thai' : 'iso8859_11',
'iso_8859_11' : 'iso8859_11',
'iso_8859_11_2001' : 'iso8859_11',
# iso8859_13 codec
'iso_8859_13' : 'iso8859_13',
'l7' : 'iso8859_13',
'latin7' : 'iso8859_13',
# iso8859_14 codec
'iso_8859_14' : 'iso8859_14',
'iso_8859_14_1998' : 'iso8859_14',
'iso_celtic' : 'iso8859_14',
'iso_ir_199' : 'iso8859_14',
'l8' : 'iso8859_14',
'latin8' : 'iso8859_14',
# iso8859_15 codec
'iso_8859_15' : 'iso8859_15',
'l9' : 'iso8859_15',
'latin9' : 'iso8859_15',
# iso8859_16 codec
'iso_8859_16' : 'iso8859_16',
'iso_8859_16_2001' : 'iso8859_16',
'iso_ir_226' : 'iso8859_16',
'l10' : 'iso8859_16',
'latin10' : 'iso8859_16',
# iso8859_2 codec
'csisolatin2' : 'iso8859_2',
'iso_8859_2' : 'iso8859_2',
'iso_8859_2_1987' : 'iso8859_2',
'iso_ir_101' : 'iso8859_2',
'l2' : 'iso8859_2',
'latin2' : 'iso8859_2',
# iso8859_3 codec
'csisolatin3' : 'iso8859_3',
'iso_8859_3' : 'iso8859_3',
'iso_8859_3_1988' : 'iso8859_3',
'iso_ir_109' : 'iso8859_3',
'l3' : 'iso8859_3',
'latin3' : 'iso8859_3',
# iso8859_4 codec
'csisolatin4' : 'iso8859_4',
'iso_8859_4' : 'iso8859_4',
'iso_8859_4_1988' : 'iso8859_4',
'iso_ir_110' : 'iso8859_4',
'l4' : 'iso8859_4',
'latin4' : 'iso8859_4',
# iso8859_5 codec
'csisolatincyrillic' : 'iso8859_5',
'cyrillic' : 'iso8859_5',
'iso_8859_5' : 'iso8859_5',
'iso_8859_5_1988' : 'iso8859_5',
'iso_ir_144' : 'iso8859_5',
# iso8859_6 codec
'arabic' : 'iso8859_6',
'asmo_708' : 'iso8859_6',
'csisolatinarabic' : 'iso8859_6',
'ecma_114' : 'iso8859_6',
'iso_8859_6' : 'iso8859_6',
'iso_8859_6_1987' : 'iso8859_6',
'iso_ir_127' : 'iso8859_6',
# iso8859_7 codec
'csisolatingreek' : 'iso8859_7',
'ecma_118' : 'iso8859_7',
'elot_928' : 'iso8859_7',
'greek' : 'iso8859_7',
'greek8' : 'iso8859_7',
'iso_8859_7' : 'iso8859_7',
'iso_8859_7_1987' : 'iso8859_7',
'iso_ir_126' : 'iso8859_7',
# iso8859_8 codec
'csisolatinhebrew' : 'iso8859_8',
'hebrew' : 'iso8859_8',
'iso_8859_8' : 'iso8859_8',
'iso_8859_8_1988' : 'iso8859_8',
'iso_ir_138' : 'iso8859_8',
# iso8859_9 codec
'csisolatin5' : 'iso8859_9',
'iso_8859_9' : 'iso8859_9',
'iso_8859_9_1989' : 'iso8859_9',
'iso_ir_148' : 'iso8859_9',
'l5' : 'iso8859_9',
'latin5' : 'iso8859_9',
# johab codec
'cp1361' : 'johab',
'ms1361' : 'johab',
# koi8_r codec
'cskoi8r' : 'koi8_r',
# latin_1 codec
#
# Note that the latin_1 codec is implemented internally in C and a
# lot faster than the charmap codec iso8859_1 which uses the same
# encoding. This is why we discourage the use of the iso8859_1
# codec and alias it to latin_1 instead.
#
'8859' : 'latin_1',
'cp819' : 'latin_1',
'csisolatin1' : 'latin_1',
'ibm819' : 'latin_1',
'iso8859' : 'latin_1',
'iso8859_1' : 'latin_1',
'iso_8859_1' : 'latin_1',
'iso_8859_1_1987' : 'latin_1',
'iso_ir_100' : 'latin_1',
'l1' : 'latin_1',
'latin' : 'latin_1',
'latin1' : 'latin_1',
# mac_cyrillic codec
'maccyrillic' : 'mac_cyrillic',
# mac_greek codec
'macgreek' : 'mac_greek',
# mac_iceland codec
'maciceland' : 'mac_iceland',
# mac_latin2 codec
'maccentraleurope' : 'mac_latin2',
'maclatin2' : 'mac_latin2',
# mac_roman codec
'macintosh' : 'mac_roman',
'macroman' : 'mac_roman',
# mac_turkish codec
'macturkish' : 'mac_turkish',
# mbcs codec
'dbcs' : 'mbcs',
# ptcp154 codec
'csptcp154' : 'ptcp154',
'pt154' : 'ptcp154',
'cp154' : 'ptcp154',
'cyrillic_asian' : 'ptcp154',
# quopri_codec codec
'quopri' : 'quopri_codec',
'quoted_printable' : 'quopri_codec',
'quotedprintable' : 'quopri_codec',
# rot_13 codec
'rot13' : 'rot_13',
# shift_jis codec
'csshiftjis' : 'shift_jis',
'shiftjis' : 'shift_jis',
'sjis' : 'shift_jis',
's_jis' : 'shift_jis',
# shift_jis_2004 codec
'shiftjis2004' : 'shift_jis_2004',
'sjis_2004' : 'shift_jis_2004',
's_jis_2004' : 'shift_jis_2004',
# shift_jisx0213 codec
'shiftjisx0213' : 'shift_jisx0213',
'sjisx0213' : 'shift_jisx0213',
's_jisx0213' : 'shift_jisx0213',
# tactis codec
'tis260' : 'tactis',
# tis_620 codec
'tis620' : 'tis_620',
'tis_620_0' : 'tis_620',
'tis_620_2529_0' : 'tis_620',
'tis_620_2529_1' : 'tis_620',
'iso_ir_166' : 'tis_620',
# utf_16 codec
'u16' : 'utf_16',
'utf16' : 'utf_16',
# utf_16_be codec
'unicodebigunmarked' : 'utf_16_be',
'utf_16be' : 'utf_16_be',
# utf_16_le codec
'unicodelittleunmarked' : 'utf_16_le',
'utf_16le' : 'utf_16_le',
# utf_32 codec
'u32' : 'utf_32',
'utf32' : 'utf_32',
# utf_32_be codec
'utf_32be' : 'utf_32_be',
# utf_32_le codec
'utf_32le' : 'utf_32_le',
# utf_7 codec
'u7' : 'utf_7',
'utf7' : 'utf_7',
'unicode_1_1_utf_7' : 'utf_7',
# utf_8 codec
'u8' : 'utf_8',
'utf' : 'utf_8',
'utf8' : 'utf_8',
'utf8_ucs2' : 'utf_8',
'utf8_ucs4' : 'utf_8',
# uu_codec codec
'uu' : 'uu_codec',
# zlib_codec codec
'zip' : 'zlib_codec',
'zlib' : 'zlib_codec',
# temporary mac CJK aliases, will be replaced by proper codecs in 3.1
'x_mac_japanese' : 'shift_jis',
'x_mac_korean' : 'euc_kr',
'x_mac_simp_chinese' : 'gb2312',
'x_mac_trad_chinese' : 'big5',
}
| gpl-3.0 |
iamyooon/study_linux | translate/kerneldoc/Documentation/sphinx/cdomain.py | 296 | 5596 | # -*- coding: utf-8; mode: python -*-
# pylint: disable=W0141,C0113,C0103,C0325
u"""
cdomain
~~~~~~~
Replacement for the sphinx c-domain.
:copyright: Copyright (C) 2016 Markus Heiser
:license: GPL Version 2, June 1991 see Linux/COPYING for details.
List of customizations:
* Moved the *duplicate C object description* warnings for function
declarations in the nitpicky mode. See Sphinx documentation for
the config values for ``nitpick`` and ``nitpick_ignore``.
* Add option 'name' to the "c:function:" directive. With option 'name' the
ref-name of a function can be modified. E.g.::
.. c:function:: int ioctl( int fd, int request )
:name: VIDIOC_LOG_STATUS
The func-name (e.g. ioctl) remains in the output but the ref-name changed
from 'ioctl' to 'VIDIOC_LOG_STATUS'. The function is referenced by::
* :c:func:`VIDIOC_LOG_STATUS` or
* :any:`VIDIOC_LOG_STATUS` (``:any:`` needs sphinx 1.3)
* Handle signatures of function-like macros well. Don't try to deduce
arguments types of function-like macros.
"""
from docutils import nodes
from docutils.parsers.rst import directives
import sphinx
from sphinx import addnodes
from sphinx.domains.c import c_funcptr_sig_re, c_sig_re
from sphinx.domains.c import CObject as Base_CObject
from sphinx.domains.c import CDomain as Base_CDomain
__version__ = '1.0'
# Get Sphinx version
major, minor, patch = sphinx.version_info[:3]
def setup(app):
app.override_domain(CDomain)
return dict(
version = __version__,
parallel_read_safe = True,
parallel_write_safe = True
)
class CObject(Base_CObject):
"""
Description of a C language object.
"""
option_spec = {
"name" : directives.unchanged
}
def handle_func_like_macro(self, sig, signode):
u"""Handles signatures of function-like macros.
If the objtype is 'function' and the the signature ``sig`` is a
function-like macro, the name of the macro is returned. Otherwise
``False`` is returned. """
if not self.objtype == 'function':
return False
m = c_funcptr_sig_re.match(sig)
if m is None:
m = c_sig_re.match(sig)
if m is None:
raise ValueError('no match')
rettype, fullname, arglist, _const = m.groups()
arglist = arglist.strip()
if rettype or not arglist:
return False
arglist = arglist.replace('`', '').replace('\\ ', '') # remove markup
arglist = [a.strip() for a in arglist.split(",")]
# has the first argument a type?
if len(arglist[0].split(" ")) > 1:
return False
# This is a function-like macro, it's arguments are typeless!
signode += addnodes.desc_name(fullname, fullname)
paramlist = addnodes.desc_parameterlist()
signode += paramlist
for argname in arglist:
param = addnodes.desc_parameter('', '', noemph=True)
# separate by non-breaking space in the output
param += nodes.emphasis(argname, argname)
paramlist += param
return fullname
def handle_signature(self, sig, signode):
"""Transform a C signature into RST nodes."""
fullname = self.handle_func_like_macro(sig, signode)
if not fullname:
fullname = super(CObject, self).handle_signature(sig, signode)
if "name" in self.options:
if self.objtype == 'function':
fullname = self.options["name"]
else:
# FIXME: handle :name: value of other declaration types?
pass
return fullname
def add_target_and_index(self, name, sig, signode):
# for C API items we add a prefix since names are usually not qualified
# by a module name and so easily clash with e.g. section titles
targetname = 'c.' + name
if targetname not in self.state.document.ids:
signode['names'].append(targetname)
signode['ids'].append(targetname)
signode['first'] = (not self.names)
self.state.document.note_explicit_target(signode)
inv = self.env.domaindata['c']['objects']
if (name in inv and self.env.config.nitpicky):
if self.objtype == 'function':
if ('c:func', name) not in self.env.config.nitpick_ignore:
self.state_machine.reporter.warning(
'duplicate C object description of %s, ' % name +
'other instance in ' + self.env.doc2path(inv[name][0]),
line=self.lineno)
inv[name] = (self.env.docname, self.objtype)
indextext = self.get_index_text(name)
if indextext:
if major == 1 and minor < 4:
# indexnode's tuple changed in 1.4
# https://github.com/sphinx-doc/sphinx/commit/e6a5a3a92e938fcd75866b4227db9e0524d58f7c
self.indexnode['entries'].append(
('single', indextext, targetname, ''))
else:
self.indexnode['entries'].append(
('single', indextext, targetname, '', None))
class CDomain(Base_CDomain):
"""C language domain."""
name = 'c'
label = 'C'
directives = {
'function': CObject,
'member': CObject,
'macro': CObject,
'type': CObject,
'var': CObject,
}
| apache-2.0 |
mapr/sahara | sahara/plugins/vanilla/v2_3_0/config_helper.py | 3 | 2505 | # Copyright (c) 2014 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from sahara.openstack.common import log as logging
from sahara.plugins.vanilla.hadoop2 import config_helper as c_helper
from sahara.utils import xmlutils as x
CONF = cfg.CONF
CONF.import_opt("enable_data_locality", "sahara.topology.topology_helper")
LOG = logging.getLogger(__name__)
CORE_DEFAULT = x.load_hadoop_xml_defaults(
'plugins/vanilla/v2_3_0/resources/core-default.xml')
HDFS_DEFAULT = x.load_hadoop_xml_defaults(
'plugins/vanilla/v2_3_0/resources/hdfs-default.xml')
MAPRED_DEFAULT = x.load_hadoop_xml_defaults(
'plugins/vanilla/v2_3_0/resources/mapred-default.xml')
YARN_DEFAULT = x.load_hadoop_xml_defaults(
'plugins/vanilla/v2_3_0/resources/yarn-default.xml')
OOZIE_DEFAULT = x.load_hadoop_xml_defaults(
'plugins/vanilla/v2_3_0/resources/oozie-default.xml')
XML_CONFS = {
"Hadoop": [CORE_DEFAULT],
"HDFS": [HDFS_DEFAULT],
"YARN": [YARN_DEFAULT],
"MapReduce": [MAPRED_DEFAULT],
"JobFlow": [OOZIE_DEFAULT]
}
ENV_CONFS = {
"YARN": {
'ResourceManager Heap Size': 1024,
'NodeManager Heap Size': 1024
},
"HDFS": {
'NameNode Heap Size': 1024,
'SecondaryNameNode Heap Size': 1024,
'DataNode Heap Size': 1024
},
"MapReduce": {
'JobHistoryServer Heap Size': 1024
},
"JobFlow": {
'Oozie Heap Size': 1024
}
}
# Initialise plugin Hadoop configurations
PLUGIN_XML_CONFIGS = c_helper.init_xml_configs(XML_CONFS)
PLUGIN_ENV_CONFIGS = c_helper.init_env_configs(ENV_CONFS)
def _init_all_configs():
configs = []
configs.extend(PLUGIN_XML_CONFIGS)
configs.extend(PLUGIN_ENV_CONFIGS)
configs.extend(c_helper.PLUGIN_GENERAL_CONFIGS)
return configs
PLUGIN_CONFIGS = _init_all_configs()
def get_plugin_configs():
return PLUGIN_CONFIGS
def get_xml_configs():
return PLUGIN_XML_CONFIGS
def get_env_configs():
return ENV_CONFS
| apache-2.0 |
omni5cience/django-inlineformfield | .tox/py27/lib/python2.7/site-packages/IPython/__init__.py | 10 | 5677 | # encoding: utf-8
"""
IPython: tools for interactive and parallel computing in Python.
http://ipython.org
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2008-2011, IPython Development Team.
# Copyright (c) 2001-2007, Fernando Perez <fernando.perez@colorado.edu>
# Copyright (c) 2001, Janko Hauser <jhauser@zscout.de>
# Copyright (c) 2001, Nathaniel Gray <n8gray@caltech.edu>
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import
import os
import sys
#-----------------------------------------------------------------------------
# Setup everything
#-----------------------------------------------------------------------------
# Don't forget to also update setup.py when this changes!
v = sys.version_info
if v[:2] < (2,7) or (v[0] >= 3 and v[:2] < (3,3)):
raise ImportError('IPython requires Python version 2.7 or 3.3 or above.')
del v
# Make it easy to import extensions - they are always directly on pythonpath.
# Therefore, non-IPython modules can be added to extensions directory.
# This should probably be in ipapp.py.
sys.path.append(os.path.join(os.path.dirname(__file__), "extensions"))
#-----------------------------------------------------------------------------
# Setup the top level names
#-----------------------------------------------------------------------------
from .config.loader import Config
from .core.getipython import get_ipython
from .core import release
from .core.application import Application
from .terminal.embed import embed
from .core.error import TryNext
from .core.interactiveshell import InteractiveShell
from .testing import test
from .utils.sysinfo import sys_info
from .utils.frame import extract_module_locals
# Release data
__author__ = '%s <%s>' % (release.author, release.author_email)
__license__ = release.license
__version__ = release.version
version_info = release.version_info
def embed_kernel(module=None, local_ns=None, **kwargs):
"""Embed and start an IPython kernel in a given scope.
If you don't want the kernel to initialize the namespace
from the scope of the surrounding function,
and/or you want to load full IPython configuration,
you probably want `IPython.start_kernel()` instead.
Parameters
----------
module : ModuleType, optional
The module to load into IPython globals (default: caller)
local_ns : dict, optional
The namespace to load into IPython user namespace (default: caller)
kwargs : various, optional
Further keyword args are relayed to the IPKernelApp constructor,
allowing configuration of the Kernel. Will only have an effect
on the first embed_kernel call for a given process.
"""
(caller_module, caller_locals) = extract_module_locals(1)
if module is None:
module = caller_module
if local_ns is None:
local_ns = caller_locals
# Only import .zmq when we really need it
from IPython.kernel.zmq.embed import embed_kernel as real_embed_kernel
real_embed_kernel(module=module, local_ns=local_ns, **kwargs)
def start_ipython(argv=None, **kwargs):
"""Launch a normal IPython instance (as opposed to embedded)
`IPython.embed()` puts a shell in a particular calling scope,
such as a function or method for debugging purposes,
which is often not desirable.
`start_ipython()` does full, regular IPython initialization,
including loading startup files, configuration, etc.
much of which is skipped by `embed()`.
This is a public API method, and will survive implementation changes.
Parameters
----------
argv : list or None, optional
If unspecified or None, IPython will parse command-line options from sys.argv.
To prevent any command-line parsing, pass an empty list: `argv=[]`.
user_ns : dict, optional
specify this dictionary to initialize the IPython user namespace with particular values.
kwargs : various, optional
Any other kwargs will be passed to the Application constructor,
such as `config`.
"""
from IPython.terminal.ipapp import launch_new_instance
return launch_new_instance(argv=argv, **kwargs)
def start_kernel(argv=None, **kwargs):
"""Launch a normal IPython kernel instance (as opposed to embedded)
`IPython.embed_kernel()` puts a shell in a particular calling scope,
such as a function or method for debugging purposes,
which is often not desirable.
`start_kernel()` does full, regular IPython initialization,
including loading startup files, configuration, etc.
much of which is skipped by `embed()`.
Parameters
----------
argv : list or None, optional
If unspecified or None, IPython will parse command-line options from sys.argv.
To prevent any command-line parsing, pass an empty list: `argv=[]`.
user_ns : dict, optional
specify this dictionary to initialize the IPython user namespace with particular values.
kwargs : various, optional
Any other kwargs will be passed to the Application constructor,
such as `config`.
"""
from IPython.kernel.zmq.kernelapp import launch_new_instance
return launch_new_instance(argv=argv, **kwargs)
| mit |
jhuapl-marti/marti | env-crits/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py | 252 | 3265 | import socket
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except socket.error:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
# This is the only addition urllib3 makes to this function.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise socket.error("getaddrinfo returns an empty list")
def _set_socket_options(sock, options):
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
| mit |
allanino/nupic | nupic/regions/ImageSensorFilters/Rotation2D.py | 17 | 4436 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from PIL import Image
from nupic.regions.ImageSensorFilters.BaseFilter import BaseFilter
class Rotation2D(BaseFilter):
"""
Created rotated versions of the image.
"""
def __init__(self, angles=[0], expand=False, targetRatio=None,
highQuality=True):
"""
angles -- List of angles by which to rotate, in degrees.
expand -- Whether to expand the output image to contain the entire
rotated image. If False, the output image will match the dimensions of
the input image, but cropping may occur.
targetRatio -- Ratio of the sensor. If specified, used if expand == False
to grow the image to the target ratio to avoid unnecessary clipping.
highQuality -- Whether to use bicubic interpolation for rotating.
instead of nearest neighbor.
"""
BaseFilter.__init__(self)
self.angles = angles
self.expand = expand
self.targetRatio = targetRatio
self.highQuality = highQuality
if not expand:
for i, angle in enumerate(angles):
if angle != 0 and angle % 90 == 0:
angles[i] -= .01 # Oh, PIL...
def process(self, image):
"""
image -- The image to process.
Returns a single image, or a list containing one or more images.
"""
BaseFilter.process(self, image)
if not self.expand and self.targetRatio:
# Pad the image to the aspect ratio of the sensor
# This allows us to rotate in expand=False without cutting off parts
# of the image unnecessarily
# Unlike expand=True, the object doesn't get smaller
ratio = (image.size[0] / float(image.size[1]))
if ratio < self.targetRatio:
# Make image wider
size = (int(image.size[0] * self.targetRatio / ratio), image.size[1])
newImage = Image.new('LA', size, (self.background, 0))
newImage.paste(image, ((newImage.size[0] - image.size[0])/2, 0))
image = newImage
elif ratio > self.targetRatio:
# Make image taller
size = (image.size[0], int(image.size[1] * ratio / self.targetRatio))
newImage = Image.new('LA', size, (self.background, 0))
newImage.paste(image, (0, (newImage.size[1] - image.size[1])/2))
image = newImage
if self.highQuality:
resample = Image.BICUBIC
else:
resample = Image.NEAREST
outputs = []
for angle in self.angles:
# Rotate the image, which expands it and pads it with black and a 0
# alpha value
rotatedImage = image.rotate(angle,
resample=resample,
expand=self.expand)
# Create a new larger image to hold the rotated image
# It is filled with the background color and an alpha value of 0
outputImage = Image.new('LA', rotatedImage.size, (self.background, 0))
# Paste the rotated image into the new image, using the rotated image's
# alpha channel as a mask
# This effectively just fills the area around the rotation with the
# background color, and imports the alpha channel from the rotated image
outputImage.paste(rotatedImage, None, rotatedImage.split()[1])
outputs.append(outputImage)
return outputs
def getOutputCount(self):
"""
Return the number of images returned by each call to process().
If the filter creates multiple simultaneous outputs, return a tuple:
(outputCount, simultaneousOutputCount).
"""
return len(self.angles)
| agpl-3.0 |
thuehn/RegMon | analysis_scripts/parse_traces/one-column-ewma.py | 1 | 2520 | #!/usr/bin/env python2.7
import sys, os
import pprint
import numpy as np
from optparse import OptionParser
getopt = OptionParser()
getopt.add_option('-d', '--inputdelimiter', dest='inputsep', help='specify the input delimiter used in trace files', default="\t")
getopt.add_option('-D', '--outputdelimiter', dest='outputsep', help='specify the output delimiter for merged traces', default="\t")
getopt.add_option('-H', '--headers', dest='headers', help='file has headers', action='store_true', default=False)
getopt.add_option('-c', '--column', dest='column', help='column of interest', default=2)
#getopt.add_option('-w', '--window', dest='window', help='moving average time window', default=1)
getopt.add_option('-a', '--alpha', dest='alpha', help='exponential moving average with alpha', default=0.4)
getopt.add_option('-o', '--output', dest='output', help='output file name', default="result")
getopt.add_option('-t', '--timestamp', dest='timestamp', help='index of timestamp column', default=1)
getopt.add_option('-u', '--undefined', dest='undefined', help='string that is filled into undefined cells', default="")
#getopt.add_option('-v', '--verbosity', action='count', dest='verbosity', help='set verbosity level', default=1)
(sopts, sargs) = getopt.parse_args()
def print_err(msg):
print >>sys.stderr, "Error: %s" % msg
def near(a, b, eps = 0.0000001):
"""
returns whether numerical values a and b are within a specific epsilon environment
"""
diff = abs(a-b)
return diff < eps
# check if files are specified
try:
fname = sys.argv[-1]
if (not os.path.isfile(fname) or len(sys.argv) <= 1):
raise Exception("")
except:
getopt.print_help()
sys.exit(-1)
fh = open(fname,"r")
inputsep = str(sopts.inputsep)
outputsep = str(sopts.outputsep)
try:
sopts.alpha = float(sopts.alpha)
sopts.column = int(sopts.column)
sopts.timestamp = int(sopts.timestamp)
except:
pass
lf = fh.readline()
if (sopts.headers):
lf = fh.readline()
fh_ewma = open("%s-ewma.csv" % sopts.output, "w")
# 0, -1
ewma = [0]
while (lf != ""):
values = lf.replace("\n", "").split(inputsep)
currentval = float(values[sopts.column-1])
if (currentval == sopts.undefined):
lf = fh.readline()
continue
if (len(ewma) < 2):
ewma.insert(0, currentval)
ewma.insert(0, currentval)
ewma.pop()
ts = float(values[sopts.timestamp-1])
ewma[0] = sopts.alpha * currentval + (1 - sopts.alpha) * ewma[1]
ewma[1] = ewma[0]
fh_ewma.write("%f%s%f\n" % (ts, outputsep, ewma[0]))
lf = fh.readline()
fh_ewma.close()
fh.close()
| gpl-2.0 |
cgstudiomap/cgstudiomap | main/parts/odoo/addons/hr_payroll_account/__openerp__.py | 260 | 1739 | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Payroll Accounting',
'version': '1.0',
'category': 'Human Resources',
'description': """
Generic Payroll system Integrated with Accounting.
==================================================
* Expense Encoding
* Payment Encoding
* Company Contribution Management
""",
'author':'OpenERP SA',
'website': 'https://www.odoo.com/page/employees',
'depends': [
'hr_payroll',
'account',
'hr_expense'
],
'data': ['hr_payroll_account_view.xml'],
'demo': ['hr_payroll_account_demo.xml'],
'test': ['test/hr_payroll_account.yml'],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
axelxod/braincoin | qa/rpc-tests/bipdersig.py | 136 | 3261 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test the BIP66 changeover logic
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
import os
import shutil
class BIP66Test(BitcoinTestFramework):
def setup_network(self):
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, []))
self.nodes.append(start_node(1, self.options.tmpdir, ["-blockversion=2"]))
self.nodes.append(start_node(2, self.options.tmpdir, ["-blockversion=3"]))
connect_nodes(self.nodes[1], 0)
connect_nodes(self.nodes[2], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
cnt = self.nodes[0].getblockcount()
# Mine some old-version blocks
self.nodes[1].setgenerate(True, 100)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 100):
raise AssertionError("Failed to mine 100 version=2 blocks")
# Mine 750 new-version blocks
for i in xrange(15):
self.nodes[2].setgenerate(True, 50)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 850):
raise AssertionError("Failed to mine 750 version=3 blocks")
# TODO: check that new DERSIG rules are not enforced
# Mine 1 new-version block
self.nodes[2].setgenerate(True, 1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 851):
raise AssertionFailure("Failed to mine a version=3 blocks")
# TODO: check that new DERSIG rules are enforced
# Mine 198 new-version blocks
for i in xrange(2):
self.nodes[2].setgenerate(True, 99)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1049):
raise AssertionError("Failed to mine 198 version=3 blocks")
# Mine 1 old-version block
self.nodes[1].setgenerate(True, 1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1050):
raise AssertionError("Failed to mine a version=2 block after 949 version=3 blocks")
# Mine 1 new-version blocks
self.nodes[2].setgenerate(True, 1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1051):
raise AssertionError("Failed to mine a version=3 block")
# Mine 1 old-version blocks
try:
self.nodes[1].setgenerate(True, 1)
raise AssertionError("Succeeded to mine a version=2 block after 950 version=3 blocks")
except JSONRPCException:
pass
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1051):
raise AssertionError("Accepted a version=2 block after 950 version=3 blocks")
# Mine 1 new-version blocks
self.nodes[2].setgenerate(True, 1)
self.sync_all()
if (self.nodes[0].getblockcount() != cnt + 1052):
raise AssertionError("Failed to mine a version=3 block")
if __name__ == '__main__':
BIP66Test().main()
| mit |
materialsproject/pymatgen | pymatgen/io/tests/test_zeopp.py | 5 | 11239 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
__author__ = "Bharat Medasani"
__copyright__ = "Copyright 2013, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "bkmedasani@lbl.gov"
__date__ = "Aug 2, 2013"
import os
import re
import unittest
from pymatgen.analysis.bond_valence import BVAnalyzer
from pymatgen.core.periodic_table import Species
from pymatgen.core.structure import Molecule, Structure
from pymatgen.io.cif import CifParser
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.io.zeopp import (
ZeoCssr,
ZeoVoronoiXYZ,
get_free_sphere_params,
get_high_accuracy_voronoi_nodes,
get_void_volume_surfarea,
get_voronoi_nodes,
)
from pymatgen.util.testing import PymatgenTest
try:
import zeo
except ImportError:
zeo = None
@unittest.skipIf(not zeo, "zeo not present.")
class ZeoCssrTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.zeocssr = ZeoCssr(p.structure)
def test_str(self):
expected_string = """4.7595 10.4118 6.0672
90.00 90.00 90.00 SPGR = 1 P 1 OPT = 1
24 0
0 Fe4 P4 O16
1 Fe 0.4749 0.2187 0.7500 0 0 0 0 0 0 0 0 0.0000
2 Fe 0.9749 0.2813 0.2500 0 0 0 0 0 0 0 0 0.0000
3 Fe 0.0251 0.7187 0.7500 0 0 0 0 0 0 0 0 0.0000
4 Fe 0.5251 0.7813 0.2500 0 0 0 0 0 0 0 0 0.0000
5 P 0.4182 0.0946 0.2500 0 0 0 0 0 0 0 0 0.0000
6 P 0.9182 0.4054 0.7500 0 0 0 0 0 0 0 0 0.0000
7 P 0.0818 0.5946 0.2500 0 0 0 0 0 0 0 0 0.0000
8 P 0.5818 0.9054 0.7500 0 0 0 0 0 0 0 0 0.0000
9 O 0.7071 0.0434 0.7500 0 0 0 0 0 0 0 0 0.0000
10 O 0.7413 0.0966 0.2500 0 0 0 0 0 0 0 0 0.0000
11 O 0.2854 0.1657 0.0461 0 0 0 0 0 0 0 0 0.0000
12 O 0.2854 0.1657 0.4539 0 0 0 0 0 0 0 0 0.0000
13 O 0.7854 0.3343 0.5461 0 0 0 0 0 0 0 0 0.0000
14 O 0.7854 0.3343 0.9539 0 0 0 0 0 0 0 0 0.0000
15 O 0.2413 0.4034 0.7500 0 0 0 0 0 0 0 0 0.0000
16 O 0.2071 0.4566 0.2500 0 0 0 0 0 0 0 0 0.0000
17 O 0.7929 0.5434 0.7500 0 0 0 0 0 0 0 0 0.0000
18 O 0.7587 0.5966 0.2500 0 0 0 0 0 0 0 0 0.0000
19 O 0.2146 0.6657 0.0461 0 0 0 0 0 0 0 0 0.0000
20 O 0.2146 0.6657 0.4539 0 0 0 0 0 0 0 0 0.0000
21 O 0.7146 0.8343 0.5461 0 0 0 0 0 0 0 0 0.0000
22 O 0.7146 0.8343 0.9539 0 0 0 0 0 0 0 0 0.0000
23 O 0.2587 0.9034 0.7500 0 0 0 0 0 0 0 0 0.0000
24 O 0.2929 0.9566 0.2500 0 0 0 0 0 0 0 0 0.0000"""
self.assertEqual(str(self.zeocssr), expected_string)
def test_from_file(self):
filename = os.path.join(PymatgenTest.TEST_FILES_DIR, "EDI.cssr")
zeocssr = ZeoCssr.from_file(filename)
self.assertIsInstance(zeocssr.structure, Structure)
# @unittest.skipIf(not zeo, "zeo not present.")
class ZeoCssrOxiTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
structure = BVAnalyzer().get_oxi_state_decorated_structure(p.structure)
self.zeocssr = ZeoCssr(structure)
def test_str(self):
expected_string = """4.7595 10.4118 6.0672
90.00 90.00 90.00 SPGR = 1 P 1 OPT = 1
24 0
0 Fe4 P4 O16
1 Fe3+ 0.4749 0.2187 0.7500 0 0 0 0 0 0 0 0 0.0000
2 Fe3+ 0.9749 0.2813 0.2500 0 0 0 0 0 0 0 0 0.0000
3 Fe3+ 0.0251 0.7187 0.7500 0 0 0 0 0 0 0 0 0.0000
4 Fe3+ 0.5251 0.7813 0.2500 0 0 0 0 0 0 0 0 0.0000
5 P5+ 0.4182 0.0946 0.2500 0 0 0 0 0 0 0 0 0.0000
6 P5+ 0.9182 0.4054 0.7500 0 0 0 0 0 0 0 0 0.0000
7 P5+ 0.0818 0.5946 0.2500 0 0 0 0 0 0 0 0 0.0000
8 P5+ 0.5818 0.9054 0.7500 0 0 0 0 0 0 0 0 0.0000
9 O2- 0.7071 0.0434 0.7500 0 0 0 0 0 0 0 0 0.0000
10 O2- 0.7413 0.0966 0.2500 0 0 0 0 0 0 0 0 0.0000
11 O2- 0.2854 0.1657 0.0461 0 0 0 0 0 0 0 0 0.0000
12 O2- 0.2854 0.1657 0.4539 0 0 0 0 0 0 0 0 0.0000
13 O2- 0.7854 0.3343 0.5461 0 0 0 0 0 0 0 0 0.0000
14 O2- 0.7854 0.3343 0.9539 0 0 0 0 0 0 0 0 0.0000
15 O2- 0.2413 0.4034 0.7500 0 0 0 0 0 0 0 0 0.0000
16 O2- 0.2071 0.4566 0.2500 0 0 0 0 0 0 0 0 0.0000
17 O2- 0.7929 0.5434 0.7500 0 0 0 0 0 0 0 0 0.0000
18 O2- 0.7587 0.5966 0.2500 0 0 0 0 0 0 0 0 0.0000
19 O2- 0.2146 0.6657 0.0461 0 0 0 0 0 0 0 0 0.0000
20 O2- 0.2146 0.6657 0.4539 0 0 0 0 0 0 0 0 0.0000
21 O2- 0.7146 0.8343 0.5461 0 0 0 0 0 0 0 0 0.0000
22 O2- 0.7146 0.8343 0.9539 0 0 0 0 0 0 0 0 0.0000
23 O2- 0.2587 0.9034 0.7500 0 0 0 0 0 0 0 0 0.0000
24 O2- 0.2929 0.9566 0.2500 0 0 0 0 0 0 0 0 0.0000"""
self.assertEqual(str(self.zeocssr), expected_string)
def test_from_file(self):
filename = os.path.join(PymatgenTest.TEST_FILES_DIR, "EDI_oxistate_decorated.cssr")
zeocssr = ZeoCssr.from_file(filename)
self.assertIsInstance(zeocssr.structure, Structure)
@unittest.skipIf(not zeo, "zeo not present.")
class ZeoVoronoiXYZTest(unittest.TestCase):
def setUp(self):
coords = [
[0.000000, 0.000000, 0.000000],
[0.000000, 0.000000, 1.089000],
[1.026719, 0.000000, -0.363000],
[-0.513360, -0.889165, -0.363000],
[-0.513360, 0.889165, -0.363000],
]
prop = [0.4, 0.2, 0.2, 0.2, 0.2]
self.mol = Molecule(["C", "H", "H", "H", "H"], coords, site_properties={"voronoi_radius": prop})
self.xyz = ZeoVoronoiXYZ(self.mol)
def test_str(self):
ans = """5
H4 C1
C 0.000000 0.000000 0.000000 0.400000
H 1.089000 0.000000 0.000000 0.200000
H -0.363000 1.026719 0.000000 0.200000
H -0.363000 -0.513360 -0.889165 0.200000
H -0.363000 -0.513360 0.889165 0.200000"""
self.assertEqual(str(self.xyz), ans)
self.assertEqual(str(self.xyz), ans)
def test_from_file(self):
filename = os.path.join(PymatgenTest.TEST_FILES_DIR, "EDI_voro.xyz")
vor = ZeoVoronoiXYZ.from_file(filename)
self.assertIsInstance(vor.molecule, Molecule)
@unittest.skipIf(not zeo, "zeo not present.")
class GetVoronoiNodesTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.structure = p.structure
bv = BVAnalyzer()
valences = bv.get_valences(self.structure)
el = [site.species_string for site in self.structure.sites]
valence_dict = dict(zip(el, valences))
self.rad_dict = {}
for k, v in valence_dict.items():
self.rad_dict[k] = float(Species(k, v).ionic_radius)
assert len(self.rad_dict) == len(self.structure.composition)
def test_get_voronoi_nodes(self):
(
vor_node_struct,
vor_edge_center_struct,
vor_face_center_struct,
) = get_voronoi_nodes(self.structure, self.rad_dict)
self.assertIsInstance(vor_node_struct, Structure)
self.assertIsInstance(vor_edge_center_struct, Structure)
self.assertIsInstance(vor_face_center_struct, Structure)
print(len(vor_node_struct.sites))
print(len(vor_face_center_struct.sites))
@unittest.skip("file free_sph.cif not present")
class GetFreeSphereParamsTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "free_sph.cif")
self.structure = Structure.from_file(filepath)
self.rad_dict = {
"Ge": 0.67,
"P": 0.52,
"S": 1.7,
"La": 1.17,
"Zr": 0.86,
"O": 1.26,
}
def test_get_free_sphere_params(self):
free_sph_params = get_free_sphere_params(self.structure, rad_dict=self.rad_dict)
# Zeo results can change in future. Hence loose comparison
self.assertAlmostEqual(free_sph_params["inc_sph_max_dia"], 2.58251, places=1)
self.assertAlmostEqual(free_sph_params["free_sph_max_dia"], 1.29452, places=1)
self.assertAlmostEqual(free_sph_params["inc_sph_along_free_sph_path_max_dia"], 2.58251, places=1)
@unittest.skipIf(not zeo, "zeo not present.")
class GetHighAccuracyVoronoiNodesTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.structure = p.structure
bv = BVAnalyzer()
valences = bv.get_valences(self.structure)
el = [site.species_string for site in self.structure.sites]
valence_dict = dict(zip(el, valences))
self.rad_dict = {}
for k, v in valence_dict.items():
self.rad_dict[k] = float(Species(k, v).ionic_radius)
assert len(self.rad_dict) == len(self.structure.composition)
def test_get_voronoi_nodes(self):
# vor_node_struct, vor_ec_struct, vor_fc_struct = \
# get_high_accuracy_voronoi_nodes(self.structure, self.rad_dict)
vor_node_struct = get_high_accuracy_voronoi_nodes(self.structure, self.rad_dict)
self.assertIsInstance(vor_node_struct, Structure)
# self.assertIsInstance(vor_ec_struct, Structure)
# self.assertIsInstance(vor_fc_struct, Structure)
print(len(vor_node_struct.sites))
# print(len(vor_fc_struct.sites))
@unittest.skipIf(not zeo, "zeo not present.")
class GetVoronoiNodesMultiOxiTest(unittest.TestCase):
def setUp(self):
filepath = os.path.join(PymatgenTest.TEST_FILES_DIR, "POSCAR")
p = Poscar.from_file(filepath)
self.structure = p.structure
bv = BVAnalyzer()
self.structure = bv.get_oxi_state_decorated_structure(self.structure)
valences = bv.get_valences(self.structure)
radii = []
for i in range(len(valences)):
el = self.structure.sites[i].specie.symbol
radius = Species(el, valences[i]).ionic_radius
radii.append(radius)
el = [site.species_string for site in self.structure.sites]
self.rad_dict = dict(zip(el, radii))
for el in self.rad_dict.keys():
print((el, self.rad_dict[el].real))
def test_get_voronoi_nodes(self):
(
vor_node_struct,
vor_edge_center_struct,
vor_face_center_struct,
) = get_voronoi_nodes(self.structure, self.rad_dict)
self.assertIsInstance(vor_node_struct, Structure)
self.assertIsInstance(vor_edge_center_struct, Structure)
self.assertIsInstance(vor_face_center_struct, Structure)
@unittest.skip("The function is deprecated")
class GetVoidVolumeSurfaceTest(unittest.TestCase):
def setUp(self):
filepath1 = os.path.join(PymatgenTest.TEST_FILES_DIR, "Li2O.cif")
p = CifParser(filepath1).get_structures(False)[0]
bv = BVAnalyzer()
valences = bv.get_valences(p)
el = [site.species_string for site in p.sites]
val_dict = dict(zip(el, valences))
self._radii = {}
for k, v in val_dict.items():
k1 = re.sub(r"[1-9,+,\-]", "", k)
self._radii[k1] = float(Species(k1, v).ionic_radius)
p.remove(0)
self._vac_struct = p
def test_void_volume_surface_area(self):
pass
vol, sa = get_void_volume_surfarea(self._vac_struct, self._radii)
# print "vol: ", vol, "sa: ", sa
self.assertIsInstance(vol, float)
self.assertIsInstance(sa, float)
if __name__ == "__main__":
unittest.main()
| mit |
strets123/rdkit | rdkit/Chem/test_list.py | 1 | 2294 |
tests=[
("python","UnitTestChem.py",{}),
("python","UnitTestChemv2.py",{}),
("python","UnitTestChemAtom.py",{}),
("python","UnitTestChemBond.py",{}),
("python","UnitTestChemSmarts.py",{}),
("python","UnitTestFragmentDescriptors.py",{}),
("python","UnitTestGraphDescriptors.2.py",{}),
("python","UnitTestLipinski.py",{}),
("python","MCS.py",{}),
("python","UnitTestMCS.py",{}),
("python","UnitTestOldBugs.py",{}),
("python","UnitTestSATIS.py",{}),
("python","UnitTestSmiles.py",{}),
("python","UnitTestSuppliers.py",{}),
("python","UnitTestSurf.py",{}),
("python","UnitTestMol3D.py",{}),
("python","FragmentMatcher.py",{}),
("python","MACCSkeys.py",{}),
("python","Descriptors.py",{}),
("python","UnitTestCatalog.py",{}),
("python","TemplateAlign.py",{}),
("python","Recap.py",{}),
("python","BRICS.py",{}),
("python","UnitTestDescriptors.py",{}),
("python","AllChem.py",{}),
("python","PropertyMol.py",{}),
("python","UnitTestInchi.py",{}),
("python","SaltRemover.py",{}),
("python","UnitTestFunctionalGroups.py",{}),
("python","UnitTestCrippen.py",{}),
("python","__init__.py",{}),
("python","PandasTools.py",{}),
("python","test_list.py",{'dir':'AtomPairs'}),
("python","test_list.py",{'dir':'ChemUtils'}),
("python","test_list.py",{'dir':'EState'}),
("python","test_list.py",{'dir':'FeatMaps'}),
("python","test_list.py",{'dir':'Fingerprints'}),
("python","test_list.py",{'dir':'Pharm2D'}),
("python","test_list.py",{'dir':'Pharm3D'}),
#("python","test_list.py",{'dir':'Subshape'}),
("python","test_list.py",{'dir':'Suppliers'}),
("python","test_list.py",{'dir':'Scaffolds'}),
("python","test_list.py",{'dir':'Draw'}),
("python","test_list.py",{'dir':'Fraggle'}),
("python","test_list.py",{'dir':'SimpleEnum'}),
]
# only attempt the MolKey tests if we have the pre-reqs:
try:
from rdkit.Chem.MolKey import MolKey
tests.append(("python","test_list.py",{'dir':'MolKey'}))
except ImportError:
pass
longTests=[
("python","UnitTestArom.py",{}),
("python","UnitTestGraphDescriptors.2.py -l",{}),
("python","UnitTestSurf.py -l",{}),
]
if __name__=='__main__':
import sys
from rdkit import TestRunner
failed,tests = TestRunner.RunScript('test_list.py',0,1)
sys.exit(len(failed))
| bsd-3-clause |
suneeth51/neutron | neutron/tests/unit/plugins/oneconvergence/test_nvsd_plugin.py | 28 | 4590 | # Copyright 2014 OneConvergence, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Test Library for OneConvergencePlugin."""
import uuid
import mock
from oslo_config import cfg
from neutron import context
from neutron.extensions import portbindings
from neutron import manager
from neutron.plugins.oneconvergence import plugin as nvsd_plugin
from neutron.tests.unit import _test_extension_portbindings as test_bindings
from neutron.tests.unit.db import test_db_base_plugin_v2 as test_plugin
from neutron.tests.unit.extensions import test_l3
PLUGIN_NAME = 'neutron.plugins.oneconvergence.plugin.OneConvergencePluginV2'
class OneConvergencePluginV2TestCase(test_plugin.NeutronDbPluginV2TestCase):
_plugin_name = PLUGIN_NAME
def setUp(self):
if 'v6' in self._testMethodName:
self.skipTest("NVSD Plugin does not support IPV6.")
def mocked_oneconvergence_init(self):
def side_effect(*args, **kwargs):
return {'id': str(uuid.uuid4())}
self.nvsdlib = mock.Mock()
self.nvsdlib.create_network.side_effect = side_effect
with mock.patch.object(nvsd_plugin.OneConvergencePluginV2,
'oneconvergence_init',
new=mocked_oneconvergence_init):
super(OneConvergencePluginV2TestCase,
self).setUp(self._plugin_name)
class TestOneConvergencePluginNetworksV2(test_plugin.TestNetworksV2,
OneConvergencePluginV2TestCase):
pass
class TestOneConvergencePluginSubnetsV2(test_plugin.TestSubnetsV2,
OneConvergencePluginV2TestCase):
pass
class TestOneConvergencePluginPortsV2(test_plugin.TestPortsV2,
test_bindings.PortBindingsTestCase,
OneConvergencePluginV2TestCase):
VIF_TYPE = portbindings.VIF_TYPE_OVS
def test_port_vif_details(self):
plugin = manager.NeutronManager.get_plugin()
with self.port(name='name') as port1:
ctx = context.get_admin_context()
port = plugin.get_port(ctx, port1['port']['id'])
self.assertEqual(port['binding:vif_type'],
portbindings.VIF_TYPE_OVS)
def test_ports_vif_details(self):
cfg.CONF.set_default('allow_overlapping_ips', True)
plugin = manager.NeutronManager.get_plugin()
with self.port(), self.port():
ctx = context.get_admin_context()
ports = plugin.get_ports(ctx)
self.assertEqual(len(ports), 2)
for port in ports:
self.assertEqual(port['binding:vif_type'],
portbindings.VIF_TYPE_OVS)
class TestOneConvergenceBasicGet(test_plugin.TestBasicGet,
OneConvergencePluginV2TestCase):
pass
class TestOneConvergenceV2HTTPResponse(test_plugin.TestV2HTTPResponse,
OneConvergencePluginV2TestCase):
pass
class TestOneConvergenceL3NatTestCase(test_l3.L3NatDBIntTestCase):
_plugin_name = PLUGIN_NAME
def setUp(self):
if 'v6' in self._testMethodName:
self.skipTest("NVSD Plugin does not support IPV6.")
def mocked_oneconvergence_init(self):
def side_effect(*args, **kwargs):
return {'id': str(uuid.uuid4())}
self.nvsdlib = mock.Mock()
self.nvsdlib.create_network.side_effect = side_effect
ext_mgr = test_l3.L3TestExtensionManager()
with mock.patch.object(nvsd_plugin.OneConvergencePluginV2,
'oneconvergence_init',
new=mocked_oneconvergence_init):
super(TestOneConvergenceL3NatTestCase,
self).setUp(plugin=self._plugin_name, ext_mgr=ext_mgr)
def test_floatingip_with_invalid_create_port(self):
self._test_floatingip_with_invalid_create_port(self._plugin_name)
| apache-2.0 |
ridfrustum/lettuce | tests/integration/lib/Django-1.2.5/tests/regressiontests/forms/localflavor/utils.py | 38 | 2068 | from unittest import TestCase
from django.core.exceptions import ValidationError
from django.core.validators import EMPTY_VALUES
class LocalFlavorTestCase(TestCase):
def assertFieldOutput(self, fieldclass, valid, invalid, field_args=[],
field_kwargs={}, empty_value=u''):
"""
Asserts that a field behaves correctly with various inputs.
Args:
fieldclass: the class of the field to be tested.
valid: a dictionary mapping valid inputs to their expected
cleaned values.
invalid: a dictionary mapping invalid inputs to one or more
raised error messages.
field_args: the args passed to instantiate the field
field_kwargs: the kwargs passed to instantiate the field
empty_value: the expected clean output for inputs in EMPTY_VALUES
"""
required = fieldclass(*field_args, **field_kwargs)
optional = fieldclass(*field_args, **dict(field_kwargs, required=False))
# test valid inputs
for input, output in valid.items():
self.assertEqual(required.clean(input), output)
self.assertEqual(optional.clean(input), output)
# test invalid inputs
for input, errors in invalid.items():
try:
required.clean(input)
except ValidationError, e:
self.assertEqual(errors, e.messages)
else:
self.fail()
try:
optional.clean(input)
except ValidationError, e:
self.assertEqual(errors, e.messages)
else:
self.fail()
# test required inputs
error_required = [u'This field is required.']
for val in EMPTY_VALUES:
try:
required.clean(val)
except ValidationError, e:
self.assertEqual(error_required, e.messages)
else:
self.fail()
self.assertEqual(optional.clean(val), empty_value)
| gpl-3.0 |
nkgilley/home-assistant | tests/components/wsdot/test_sensor.py | 7 | 2293 | """The tests for the WSDOT platform."""
from datetime import datetime, timedelta, timezone
import re
import unittest
import requests_mock
import homeassistant.components.wsdot.sensor as wsdot
from homeassistant.components.wsdot.sensor import (
ATTR_DESCRIPTION,
ATTR_TIME_UPDATED,
CONF_API_KEY,
CONF_ID,
CONF_NAME,
CONF_TRAVEL_TIMES,
RESOURCE,
SCAN_INTERVAL,
)
from homeassistant.setup import setup_component
from tests.common import get_test_home_assistant, load_fixture
class TestWSDOT(unittest.TestCase):
"""Test the WSDOT platform."""
def add_entities(self, new_entities, update_before_add=False):
"""Mock add entities."""
if update_before_add:
for entity in new_entities:
entity.update()
for entity in new_entities:
self.entities.append(entity)
def setUp(self):
"""Initialize values for this testcase class."""
self.hass = get_test_home_assistant()
self.config = {
CONF_API_KEY: "foo",
SCAN_INTERVAL: timedelta(seconds=120),
CONF_TRAVEL_TIMES: [{CONF_ID: 96, CONF_NAME: "I90 EB"}],
}
self.entities = []
self.addCleanup(self.tear_down_cleanup)
def tear_down_cleanup(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_with_config(self):
"""Test the platform setup with configuration."""
assert setup_component(self.hass, "sensor", {"wsdot": self.config})
@requests_mock.Mocker()
def test_setup(self, mock_req):
"""Test for operational WSDOT sensor with proper attributes."""
uri = re.compile(RESOURCE + "*")
mock_req.get(uri, text=load_fixture("wsdot.json"))
wsdot.setup_platform(self.hass, self.config, self.add_entities)
assert len(self.entities) == 1
sensor = self.entities[0]
assert sensor.name == "I90 EB"
assert sensor.state == 11
assert (
sensor.device_state_attributes[ATTR_DESCRIPTION]
== "Downtown Seattle to Downtown Bellevue via I-90"
)
assert sensor.device_state_attributes[ATTR_TIME_UPDATED] == datetime(
2017, 1, 21, 15, 10, tzinfo=timezone(timedelta(hours=-8))
)
| apache-2.0 |
ppanczyk/ansible | lib/ansible/modules/cloud/smartos/imgadm.py | 21 | 9835 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, 2017 Jasper Lievisse Adriaanse <j@jasper.la>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: imgadm
short_description: Manage SmartOS images
description:
- Manage SmartOS virtual machine images through imgadm(1M)
version_added: "2.3"
author: Jasper Lievisse Adriaanse (@jasperla)
options:
force:
required: false
choices: [ yes, no ]
description:
- Force a given operation (where supported by imgadm(1M)).
pool:
required: false
default: zones
description:
- zpool to import to or delete images from.
source:
required: false
description:
- URI for the image source.
state:
required: true
choices: [ present, absent, deleted, imported, updated, vacuumed ]
description:
- State the object operated on should be in. C(imported) is an alias for
for C(present) and C(deleted) for C(absent). When set to C(vacuumed)
and C(uuid) to C(*), it will remove all unused images.
type:
required: false
choices: [ imgapi, docker, dsapi ]
default: imgapi
description:
- Type for image sources.
uuid:
required: false
description:
- Image UUID. Can either be a full UUID or C(*) for all images.
requirements:
- python >= 2.6
'''
EXAMPLES = '''
- name: Import an image
imgadm:
uuid: '70e3ae72-96b6-11e6-9056-9737fd4d0764'
state: imported
- name: Delete an image
imgadm:
uuid: '70e3ae72-96b6-11e6-9056-9737fd4d0764'
state: deleted
- name: Update all images
imgadm:
uuid: '*'
state: updated
- name: Update a single image
imgadm:
uuid: '70e3ae72-96b6-11e6-9056-9737fd4d0764'
state: updated
- name: Add a source
imgadm:
source: 'https://datasets.project-fifo.net'
state: present
- name: Add a Docker source
imgadm:
source: 'https://docker.io'
type: docker
state: present
- name: Remove a source
imgadm:
source: 'https://docker.io'
state: absent
'''
RETURN = '''
source:
description: Source that is managed.
returned: When not managing an image.
type: string
sample: https://datasets.project-fifo.net
uuid:
description: UUID for an image operated on.
returned: When not managing an image source.
type: string
sample: 70e3ae72-96b6-11e6-9056-9737fd4d0764
state:
description: State of the target, after execution.
returned: success
type: string
sample: 'present'
'''
import re
from ansible.module_utils.basic import AnsibleModule
# Shortcut for the imgadm(1M) command. While imgadm(1M) supports a
# -E option to return any errors in JSON, the generated JSON does not play well
# with the JSON parsers of Python. The returned message contains '\n' as part of
# the stacktrace, which breaks the parsers.
class Imgadm(object):
def __init__(self, module):
self.module = module
self.params = module.params
self.cmd = module.get_bin_path('imgadm', required=True)
self.changed = False
self.uuid = module.params['uuid']
# Since there are a number of (natural) aliases, prevent having to look
# them up everytime we operate on `state`.
if self.params['state'] in ['present', 'imported', 'updated']:
self.present = True
else:
self.present = False
# Perform basic UUID validation upfront.
if self.uuid and self.uuid != '*':
if not re.match('^[0-9a-f]{8}-([0-9a-f]{4}-){3}[0-9a-f]{12}$', self.uuid, re.IGNORECASE):
module.fail_json(msg='Provided value for uuid option is not a valid UUID.')
# Helper method to massage stderr
def errmsg(self, stderr):
match = re.match('^imgadm .*?: error \(\w+\): (.*): .*', stderr)
if match:
return match.groups()[0]
else:
return 'Unexpected failure'
def update_images(self):
if self.uuid == '*':
cmd = '{0} update'.format(self.cmd)
else:
cmd = '{0} update {1}'.format(self.cmd, self.uuid)
(rc, stdout, stderr) = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg='Failed to update images: {0}'.format(self.errmsg(stderr)))
# There is no feedback from imgadm(1M) to determine if anything
# was actually changed. So treat this as an 'always-changes' operation.
# Note that 'imgadm -v' produces unparseable JSON...
self.changed = True
def manage_sources(self):
force = self.params['force']
source = self.params['source']
imgtype = self.params['type']
cmd = '{0} sources'.format(self.cmd)
if force:
cmd += ' -f'
if self.present:
cmd = '{0} -a {1} -t {2}'.format(cmd, source, imgtype)
(rc, stdout, stderr) = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg='Failed to add source: {0}'.format(self.errmsg(stderr)))
# Check the various responses.
# Note that trying to add a source with the wrong type is handled
# above as it results in a non-zero status.
regex = 'Already have "{0}" image source "{1}", no change'.format(imgtype, source)
if re.match(regex, stdout):
self.changed = False
regex = 'Added "%s" image source "%s"' % (imgtype, source)
if re.match(regex, stdout):
self.changed = True
else:
# Type is ignored by imgadm(1M) here
cmd += ' -d %s' % source
(rc, stdout, stderr) = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg='Failed to remove source: {0}'.format(self.errmsg(stderr)))
regex = 'Do not have image source "%s", no change' % source
if re.match(regex, stdout):
self.changed = False
regex = 'Deleted ".*" image source "%s"' % source
if re.match(regex, stdout):
self.changed = True
def manage_images(self):
pool = self.params['pool']
state = self.params['state']
if state == 'vacuumed':
# Unconditionally pass '--force', otherwise we're prompted with 'y/N'
cmd = '{0} vacuum -f'.format(self.cmd)
(rc, stdout, stderr) = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg='Failed to vacuum images: {0}'.format(self.errmsg(stderr)))
else:
if stdout == '':
self.changed = False
else:
self.changed = True
if self.present:
cmd = '{0} import -P {1} -q {2}'.format(self.cmd, pool, self.uuid)
(rc, stdout, stderr) = self.module.run_command(cmd)
if rc != 0:
self.module.fail_json(msg='Failed to import image: {0}'.format(self.errmsg(stderr)))
regex = 'Image {0} \(.*\) is already installed, skipping'.format(self.uuid)
if re.match(regex, stdout):
self.changed = False
regex = '.*ActiveImageNotFound.*'
if re.match(regex, stderr):
self.changed = False
regex = 'Imported image {0}.*'.format(self.uuid)
if re.match(regex, stdout.splitlines()[-1]):
self.changed = True
else:
cmd = '{0} delete -P {1} {2}'.format(self.cmd, pool, self.uuid)
(rc, stdout, stderr) = self.module.run_command(cmd)
regex = '.*ImageNotInstalled.*'
if re.match(regex, stderr):
# Even if the 'rc' was non-zero (3), we handled the situation
# in order to determine if there was a change.
self.changed = False
regex = 'Deleted image {0}'.format(self.uuid)
if re.match(regex, stdout):
self.changed = True
def main():
module = AnsibleModule(
argument_spec=dict(
force=dict(default=None, type='bool'),
pool=dict(default='zones'),
source=dict(default=None),
state=dict(default=None, required=True, choices=['present', 'absent', 'deleted', 'imported', 'updated', 'vacuumed']),
type=dict(default='imgapi', choices=['imgapi', 'docker', 'dsapi']),
uuid=dict(default=None)
),
# This module relies largely on imgadm(1M) to enforce idempotency, which does not
# provide a "noop" (or equivalent) mode to do a dry-run.
supports_check_mode=False,
)
imgadm = Imgadm(module)
uuid = module.params['uuid']
source = module.params['source']
state = module.params['state']
result = {'state': state}
# Either manage sources or images.
if source:
result['source'] = source
imgadm.manage_sources()
else:
result['uuid'] = uuid
if state == 'updated':
imgadm.update_images()
else:
# Make sure operate on a single image for the following actions
if (uuid == '*') and (state != 'vacuumed'):
module.fail_json(msg='Can only specify uuid as "*" when updating image(s)')
imgadm.manage_images()
result['changed'] = imgadm.changed
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
chugunovyar/factoryForBuild | env/lib/python2.7/site-packages/numpy/core/getlimits.py | 35 | 9904 | """Machine limits for Float32 and Float64 and (long double) if available...
"""
from __future__ import division, absolute_import, print_function
__all__ = ['finfo', 'iinfo']
from .machar import MachAr
from . import numeric
from . import numerictypes as ntypes
from .numeric import array
def _frz(a):
"""fix rank-0 --> rank-1"""
if a.ndim == 0:
a.shape = (1,)
return a
_convert_to_float = {
ntypes.csingle: ntypes.single,
ntypes.complex_: ntypes.float_,
ntypes.clongfloat: ntypes.longfloat
}
class finfo(object):
"""
finfo(dtype)
Machine limits for floating point types.
Attributes
----------
bits : int
The number of bits occupied by the type.
eps : float
The smallest representable positive number such that
``1.0 + eps != 1.0``. Type of `eps` is an appropriate floating
point type.
epsneg : floating point number of the appropriate type
The smallest representable positive number such that
``1.0 - epsneg != 1.0``.
iexp : int
The number of bits in the exponent portion of the floating point
representation.
machar : MachAr
The object which calculated these parameters and holds more
detailed information.
machep : int
The exponent that yields `eps`.
max : floating point number of the appropriate type
The largest representable number.
maxexp : int
The smallest positive power of the base (2) that causes overflow.
min : floating point number of the appropriate type
The smallest representable number, typically ``-max``.
minexp : int
The most negative power of the base (2) consistent with there
being no leading 0's in the mantissa.
negep : int
The exponent that yields `epsneg`.
nexp : int
The number of bits in the exponent including its sign and bias.
nmant : int
The number of bits in the mantissa.
precision : int
The approximate number of decimal digits to which this kind of
float is precise.
resolution : floating point number of the appropriate type
The approximate decimal resolution of this type, i.e.,
``10**-precision``.
tiny : float
The smallest positive usable number. Type of `tiny` is an
appropriate floating point type.
Parameters
----------
dtype : float, dtype, or instance
Kind of floating point data-type about which to get information.
See Also
--------
MachAr : The implementation of the tests that produce this information.
iinfo : The equivalent for integer data types.
Notes
-----
For developers of NumPy: do not instantiate this at the module level.
The initial calculation of these parameters is expensive and negatively
impacts import times. These objects are cached, so calling ``finfo()``
repeatedly inside your functions is not a problem.
"""
_finfo_cache = {}
def __new__(cls, dtype):
try:
dtype = numeric.dtype(dtype)
except TypeError:
# In case a float instance was given
dtype = numeric.dtype(type(dtype))
obj = cls._finfo_cache.get(dtype, None)
if obj is not None:
return obj
dtypes = [dtype]
newdtype = numeric.obj2sctype(dtype)
if newdtype is not dtype:
dtypes.append(newdtype)
dtype = newdtype
if not issubclass(dtype, numeric.inexact):
raise ValueError("data type %r not inexact" % (dtype))
obj = cls._finfo_cache.get(dtype, None)
if obj is not None:
return obj
if not issubclass(dtype, numeric.floating):
newdtype = _convert_to_float[dtype]
if newdtype is not dtype:
dtypes.append(newdtype)
dtype = newdtype
obj = cls._finfo_cache.get(dtype, None)
if obj is not None:
return obj
obj = object.__new__(cls)._init(dtype)
for dt in dtypes:
cls._finfo_cache[dt] = obj
return obj
def _init(self, dtype):
self.dtype = numeric.dtype(dtype)
if dtype is ntypes.double:
itype = ntypes.int64
fmt = '%24.16e'
precname = 'double'
elif dtype is ntypes.single:
itype = ntypes.int32
fmt = '%15.7e'
precname = 'single'
elif dtype is ntypes.longdouble:
itype = ntypes.longlong
fmt = '%s'
precname = 'long double'
elif dtype is ntypes.half:
itype = ntypes.int16
fmt = '%12.5e'
precname = 'half'
else:
raise ValueError(repr(dtype))
machar = MachAr(lambda v:array([v], dtype),
lambda v:_frz(v.astype(itype))[0],
lambda v:array(_frz(v)[0], dtype),
lambda v: fmt % array(_frz(v)[0], dtype),
'numpy %s precision floating point number' % precname)
for word in ['precision', 'iexp',
'maxexp', 'minexp', 'negep',
'machep']:
setattr(self, word, getattr(machar, word))
for word in ['tiny', 'resolution', 'epsneg']:
setattr(self, word, getattr(machar, word).flat[0])
self.bits = self.dtype.itemsize * 8
self.max = machar.huge.flat[0]
self.min = -self.max
self.eps = machar.eps.flat[0]
self.nexp = machar.iexp
self.nmant = machar.it
self.machar = machar
self._str_tiny = machar._str_xmin.strip()
self._str_max = machar._str_xmax.strip()
self._str_epsneg = machar._str_epsneg.strip()
self._str_eps = machar._str_eps.strip()
self._str_resolution = machar._str_resolution.strip()
return self
def __str__(self):
fmt = (
'Machine parameters for %(dtype)s\n'
'---------------------------------------------------------------\n'
'precision = %(precision)3s resolution = %(_str_resolution)s\n'
'machep = %(machep)6s eps = %(_str_eps)s\n'
'negep = %(negep)6s epsneg = %(_str_epsneg)s\n'
'minexp = %(minexp)6s tiny = %(_str_tiny)s\n'
'maxexp = %(maxexp)6s max = %(_str_max)s\n'
'nexp = %(nexp)6s min = -max\n'
'---------------------------------------------------------------\n'
)
return fmt % self.__dict__
def __repr__(self):
c = self.__class__.__name__
d = self.__dict__.copy()
d['klass'] = c
return (("%(klass)s(resolution=%(resolution)s, min=-%(_str_max)s,"
" max=%(_str_max)s, dtype=%(dtype)s)") % d)
class iinfo(object):
"""
iinfo(type)
Machine limits for integer types.
Attributes
----------
bits : int
The number of bits occupied by the type.
min : int
The smallest integer expressible by the type.
max : int
The largest integer expressible by the type.
Parameters
----------
int_type : integer type, dtype, or instance
The kind of integer data type to get information about.
See Also
--------
finfo : The equivalent for floating point data types.
Examples
--------
With types:
>>> ii16 = np.iinfo(np.int16)
>>> ii16.min
-32768
>>> ii16.max
32767
>>> ii32 = np.iinfo(np.int32)
>>> ii32.min
-2147483648
>>> ii32.max
2147483647
With instances:
>>> ii32 = np.iinfo(np.int32(10))
>>> ii32.min
-2147483648
>>> ii32.max
2147483647
"""
_min_vals = {}
_max_vals = {}
def __init__(self, int_type):
try:
self.dtype = numeric.dtype(int_type)
except TypeError:
self.dtype = numeric.dtype(type(int_type))
self.kind = self.dtype.kind
self.bits = self.dtype.itemsize * 8
self.key = "%s%d" % (self.kind, self.bits)
if self.kind not in 'iu':
raise ValueError("Invalid integer data type.")
def min(self):
"""Minimum value of given dtype."""
if self.kind == 'u':
return 0
else:
try:
val = iinfo._min_vals[self.key]
except KeyError:
val = int(-(1 << (self.bits-1)))
iinfo._min_vals[self.key] = val
return val
min = property(min)
def max(self):
"""Maximum value of given dtype."""
try:
val = iinfo._max_vals[self.key]
except KeyError:
if self.kind == 'u':
val = int((1 << self.bits) - 1)
else:
val = int((1 << (self.bits-1)) - 1)
iinfo._max_vals[self.key] = val
return val
max = property(max)
def __str__(self):
"""String representation."""
fmt = (
'Machine parameters for %(dtype)s\n'
'---------------------------------------------------------------\n'
'min = %(min)s\n'
'max = %(max)s\n'
'---------------------------------------------------------------\n'
)
return fmt % {'dtype': self.dtype, 'min': self.min, 'max': self.max}
def __repr__(self):
return "%s(min=%s, max=%s, dtype=%s)" % (self.__class__.__name__,
self.min, self.max, self.dtype)
if __name__ == '__main__':
f = finfo(ntypes.single)
print('single epsilon:', f.eps)
print('single tiny:', f.tiny)
f = finfo(ntypes.float)
print('float epsilon:', f.eps)
print('float tiny:', f.tiny)
f = finfo(ntypes.longfloat)
print('longfloat epsilon:', f.eps)
print('longfloat tiny:', f.tiny)
| gpl-3.0 |
davidecaminati/Handcart-lift-rotary | Python/digits.py | 1 | 5470 | #!/usr/bin/env python
'''
SVM and KNearest digit recognition.
Sample loads a dataset of handwritten digits from 'digits.png'.
Then it trains a SVM and KNearest classifiers on it and evaluates
their accuracy.
Following preprocessing is applied to the dataset:
- Moment-based image deskew (see deskew())
- Digit images are split into 4 10x10 cells and 16-bin
histogram of oriented gradients is computed for each
cell
- Transform histograms to space with Hellinger metric (see [1] (RootSIFT))
[1] R. Arandjelovic, A. Zisserman
"Three things everyone should know to improve object retrieval"
http://www.robots.ox.ac.uk/~vgg/publications/2012/Arandjelovic12/arandjelovic12.pdf
Usage:
digits.py
'''
import numpy as np
import cv2
from multiprocessing.pool import ThreadPool
from common import clock, mosaic
from numpy.linalg import norm
#SZ = 20 # size of each digit is SZ x SZ
SZ = 100 # size of each digit is SZ x SZ
CLASS_N = 3
#DIGITS_FN = 'data/digits.png'
DIGITS_FN = 'out.png'
def split2d(img, cell_size, flatten=True):
h, w = img.shape[:2]
sx, sy = cell_size
cells = [np.hsplit(row, w//sx) for row in np.vsplit(img, h//sy)]
cells = np.array(cells)
if flatten:
cells = cells.reshape(-1, sy, sx)
return cells
def load_digits(fn):
print 'loading "%s" ...' % fn
digits_img = cv2.imread(fn, 0)
digits = split2d(digits_img, (SZ, SZ))
labels = np.repeat(np.arange(CLASS_N), len(digits)/CLASS_N)
return digits, labels
def deskew(img):
m = cv2.moments(img)
if abs(m['mu02']) < 1e-2:
return img.copy()
skew = m['mu11']/m['mu02']
M = np.float32([[1, skew, -0.5*SZ*skew], [0, 1, 0]])
img = cv2.warpAffine(img, M, (SZ, SZ), flags=cv2.WARP_INVERSE_MAP | cv2.INTER_LINEAR)
return img
class StatModel(object):
def load(self, fn):
self.model.load(fn)
def save(self, fn):
self.model.save(fn)
class KNearest(StatModel):
#def __init__(self, k = 3):
def __init__(self, k = 1):
self.k = k
self.model = cv2.KNearest()
def train(self, samples, responses):
self.model = cv2.KNearest()
self.model.train(samples, responses)
def predict(self, samples):
retval, results, neigh_resp, dists = self.model.find_nearest(samples, self.k)
return results.ravel()
class SVM(StatModel):
def __init__(self, C = 1, gamma = 0.5):
self.params = dict( kernel_type = cv2.SVM_RBF,
svm_type = cv2.SVM_C_SVC,
C = C,
gamma = gamma )
self.model = cv2.SVM()
def train(self, samples, responses):
self.model = cv2.SVM()
self.model.train(samples, responses, params = self.params)
def predict(self, samples):
return self.model.predict_all(samples).ravel()
def evaluate_model(model, digits, samples, labels):
resp = model.predict(samples)
err = (labels != resp).mean()
print 'error: %.2f %%' % (err*100)
#confusion = np.zeros((10, 10), np.int32)
confusion = np.zeros((100, 100), np.int32)
for i, j in zip(labels, resp):
confusion[i, j] += 1
print 'confusion matrix:'
print confusion
print
vis = []
for img, flag in zip(digits, resp == labels):
img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR)
if not flag:
img[...,:2] = 0
vis.append(img)
return mosaic(20, vis)
def preprocess_simple(digits):
return np.float32(digits).reshape(-1, SZ*SZ) / 255.0
def preprocess_hog(digits):
samples = []
for img in digits:
gx = cv2.Sobel(img, cv2.CV_32F, 1, 0)
gy = cv2.Sobel(img, cv2.CV_32F, 0, 1)
mag, ang = cv2.cartToPolar(gx, gy)
bin_n = 16
bin = np.int32(bin_n*ang/(2*np.pi))
bin_cells = bin[:100,:100], bin[100:,:100], bin[:100,100:], bin[100:,100:]
mag_cells = mag[:100,:100], mag[100:,:100], mag[:100,100:], mag[100:,100:]
hists = [np.bincount(b.ravel(), m.ravel(), bin_n) for b, m in zip(bin_cells, mag_cells)]
hist = np.hstack(hists)
# transform to Hellinger kernel
eps = 1e-7
hist /= hist.sum() + eps
hist = np.sqrt(hist)
hist /= norm(hist) + eps
samples.append(hist)
return np.float32(samples)
if __name__ == '__main__':
print __doc__
digits, labels = load_digits(DIGITS_FN)
print 'preprocessing...'
# shuffle digits
rand = np.random.RandomState(321)
shuffle = rand.permutation(len(digits))
digits, labels = digits[shuffle], labels[shuffle]
digits2 = map(deskew, digits)
samples = preprocess_hog(digits2)
train_n = int(0.9*len(samples))
cv2.imshow('test set', mosaic(20, digits[train_n:]))
digits_train, digits_test = np.split(digits2, [train_n])
samples_train, samples_test = np.split(samples, [train_n])
labels_train, labels_test = np.split(labels, [train_n])
print 'training KNearest...'
model = KNearest(k=4)
model.train(samples_train, labels_train)
vis = evaluate_model(model, digits_test, samples_test, labels_test)
cv2.imshow('KNearest test', vis)
print 'training SVM...'
model = SVM(C=2.67, gamma=5.383)
model.train(samples_train, labels_train)
vis = evaluate_model(model, digits_test, samples_test, labels_test)
cv2.imshow('SVM test', vis)
print 'saving SVM as "digits_svm.dat"...'
model.save('digits_svm.dat')
cv2.waitKey(0)
| gpl-2.0 |
promptworks/keystone | keystone/exception.py | 5 | 15721 | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log
from oslo_utils import encodeutils
import six
from keystone.i18n import _, _LW
CONF = cfg.CONF
LOG = log.getLogger(__name__)
# Tests use this to make exception message format errors fatal
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class Error(Exception):
"""Base error class.
Child classes should define an HTTP status code, title, and a
message_format.
"""
code = None
title = None
message_format = None
def __init__(self, message=None, **kwargs):
try:
message = self._build_message(message, **kwargs)
except KeyError:
# if you see this warning in your logs, please raise a bug report
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
LOG.warning(_LW('missing exception kwargs (programmer error)'))
message = self.message_format
super(Error, self).__init__(message)
def _build_message(self, message, **kwargs):
"""Builds and returns an exception message.
:raises: KeyError given insufficient kwargs
"""
if not message:
try:
message = self.message_format % kwargs
except UnicodeDecodeError:
try:
kwargs = {k: encodeutils.safe_decode(v)
for k, v in six.iteritems(kwargs)}
except UnicodeDecodeError:
# NOTE(jamielennox): This is the complete failure case
# at least by showing the template we have some idea
# of where the error is coming from
message = self.message_format
else:
message = self.message_format % kwargs
return message
class ValidationError(Error):
message_format = _("Expecting to find %(attribute)s in %(target)s -"
" the server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class SchemaValidationError(ValidationError):
# NOTE(lbragstad): For whole OpenStack message consistency, this error
# message has been written in a format consistent with WSME.
message_format = _("%(detail)s")
class ValidationTimeStampError(Error):
message_format = _("Timestamp not in expected format."
" The server could not comply with the request"
" since it is either malformed or otherwise"
" incorrect. The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class StringLengthExceeded(ValidationError):
message_format = _("String length exceeded.The length of"
" string '%(string)s' exceeded the limit"
" of column %(type)s(CHAR(%(length)d)).")
class ValidationSizeError(Error):
message_format = _("Request attribute %(attribute)s must be"
" less than or equal to %(size)i. The server"
" could not comply with the request because"
" the attribute size is invalid (too large)."
" The client is assumed to be in error.")
code = 400
title = 'Bad Request'
class CircularRegionHierarchyError(Error):
message_format = _("The specified parent region %(parent_region_id)s "
"would create a circular region hierarchy.")
code = 400
title = 'Bad Request'
class PasswordVerificationError(Error):
message_format = _("The password length must be less than or equal "
"to %(size)i. The server could not comply with the "
"request because the password is invalid.")
code = 403
title = 'Forbidden'
class RegionDeletionError(Error):
message_format = _("Unable to delete region %(region_id)s because it or "
"its child regions have associated endpoints.")
code = 403
title = 'Forbidden'
class PKITokenExpected(Error):
message_format = _('The certificates you requested are not available. '
'It is likely that this server does not use PKI tokens '
'otherwise this is the result of misconfiguration.')
code = 403
title = 'Cannot retrieve certificates'
class SecurityError(Error):
"""Avoids exposing details of security failures, unless in debug mode."""
amendment = _('(Disable debug mode to suppress these details.)')
def _build_message(self, message, **kwargs):
"""Only returns detailed messages in debug mode."""
if CONF.debug:
return _('%(message)s %(amendment)s') % {
'message': message or self.message_format % kwargs,
'amendment': self.amendment}
else:
return self.message_format % kwargs
class Unauthorized(SecurityError):
message_format = _("The request you have made requires authentication.")
code = 401
title = 'Unauthorized'
class AuthPluginException(Unauthorized):
message_format = _("Authentication plugin error.")
def __init__(self, *args, **kwargs):
super(AuthPluginException, self).__init__(*args, **kwargs)
self.authentication = {}
class MissingGroups(Unauthorized):
message_format = _("Unable to find valid groups while using "
"mapping %(mapping_id)s")
class AuthMethodNotSupported(AuthPluginException):
message_format = _("Attempted to authenticate with an unsupported method.")
def __init__(self, *args, **kwargs):
super(AuthMethodNotSupported, self).__init__(*args, **kwargs)
self.authentication = {'methods': CONF.auth.methods}
class AdditionalAuthRequired(AuthPluginException):
message_format = _("Additional authentications steps required.")
def __init__(self, auth_response=None, **kwargs):
super(AdditionalAuthRequired, self).__init__(message=None, **kwargs)
self.authentication = auth_response
class Forbidden(SecurityError):
message_format = _("You are not authorized to perform the"
" requested action.")
code = 403
title = 'Forbidden'
class ForbiddenAction(Forbidden):
message_format = _("You are not authorized to perform the"
" requested action: %(action)s")
class ImmutableAttributeError(Forbidden):
message_format = _("Could not change immutable attribute(s) "
"'%(attributes)s' in target %(target)s")
class CrossBackendNotAllowed(Forbidden):
message_format = _("Group membership across backend boundaries is not "
"allowed, group in question is %(group_id)s, "
"user is %(user_id)s")
class InvalidPolicyAssociation(Forbidden):
message_format = _("Invalid mix of entities for policy association - "
"only Endpoint, Service or Region+Service allowed. "
"Request was - Endpoint: %(endpoint_id)s, "
"Service: %(service_id)s, Region: %(region_id)s")
class InvalidDomainConfig(Forbidden):
message_format = _("Invalid domain specific configuration: %(reason)s")
class NotFound(Error):
message_format = _("Could not find: %(target)s")
code = 404
title = 'Not Found'
class EndpointNotFound(NotFound):
message_format = _("Could not find endpoint: %(endpoint_id)s")
class MetadataNotFound(NotFound):
"""(dolph): metadata is not a user-facing concept,
so this exception should not be exposed
"""
message_format = _("An unhandled exception has occurred:"
" Could not find metadata.")
class PolicyNotFound(NotFound):
message_format = _("Could not find policy: %(policy_id)s")
class PolicyAssociationNotFound(NotFound):
message_format = _("Could not find policy association")
class RoleNotFound(NotFound):
message_format = _("Could not find role: %(role_id)s")
class RoleAssignmentNotFound(NotFound):
message_format = _("Could not find role assignment with role: "
"%(role_id)s, user or group: %(actor_id)s, "
"project or domain: %(target_id)s")
class RegionNotFound(NotFound):
message_format = _("Could not find region: %(region_id)s")
class ServiceNotFound(NotFound):
message_format = _("Could not find service: %(service_id)s")
class DomainNotFound(NotFound):
message_format = _("Could not find domain: %(domain_id)s")
class ProjectNotFound(NotFound):
message_format = _("Could not find project: %(project_id)s")
class InvalidParentProject(NotFound):
message_format = _("Cannot create project with parent: %(project_id)s")
class TokenNotFound(NotFound):
message_format = _("Could not find token: %(token_id)s")
class UserNotFound(NotFound):
message_format = _("Could not find user: %(user_id)s")
class GroupNotFound(NotFound):
message_format = _("Could not find group: %(group_id)s")
class MappingNotFound(NotFound):
message_format = _("Could not find mapping: %(mapping_id)s")
class TrustNotFound(NotFound):
message_format = _("Could not find trust: %(trust_id)s")
class TrustUseLimitReached(Forbidden):
message_format = _("No remaining uses for trust: %(trust_id)s")
class CredentialNotFound(NotFound):
message_format = _("Could not find credential: %(credential_id)s")
class VersionNotFound(NotFound):
message_format = _("Could not find version: %(version)s")
class EndpointGroupNotFound(NotFound):
message_format = _("Could not find Endpoint Group: %(endpoint_group_id)s")
class IdentityProviderNotFound(NotFound):
message_format = _("Could not find Identity Provider: %(idp_id)s")
class ServiceProviderNotFound(NotFound):
message_format = _("Could not find Service Provider: %(sp_id)s")
class FederatedProtocolNotFound(NotFound):
message_format = _("Could not find federated protocol %(protocol_id)s for"
" Identity Provider: %(idp_id)s")
class PublicIDNotFound(NotFound):
# This is used internally and mapped to either User/GroupNotFound or,
# Assertion before the exception leaves Keystone.
message_format = "%(id)s"
class DomainConfigNotFound(NotFound):
message_format = _('Could not find %(group_or_option)s in domain '
'configuration for domain %(domain_id)s')
class Conflict(Error):
message_format = _("Conflict occurred attempting to store %(type)s -"
" %(details)s")
code = 409
title = 'Conflict'
class UnexpectedError(SecurityError):
"""Avoids exposing details of failures, unless in debug mode."""
_message_format = _("An unexpected error prevented the server "
"from fulfilling your request.")
debug_message_format = _("An unexpected error prevented the server "
"from fulfilling your request: %(exception)s")
@property
def message_format(self):
"""Return the generic message format string unless debug is enabled."""
if CONF.debug:
return self.debug_message_format
return self._message_format
def _build_message(self, message, **kwargs):
if CONF.debug and 'exception' not in kwargs:
# Ensure that exception has a value to be extra defensive for
# substitutions and make sure the exception doesn't raise an
# exception.
kwargs['exception'] = ''
return super(UnexpectedError, self)._build_message(message, **kwargs)
code = 500
title = 'Internal Server Error'
class TrustConsumeMaximumAttempt(UnexpectedError):
debug_message_format = _("Unable to consume trust %(trust_id)s, unable to "
"acquire lock.")
class CertificateFilesUnavailable(UnexpectedError):
debug_message_format = _("Expected signing certificates are not available "
"on the server. Please check Keystone "
"configuration.")
class MalformedEndpoint(UnexpectedError):
debug_message_format = _("Malformed endpoint URL (%(endpoint)s),"
" see ERROR log for details.")
class MappedGroupNotFound(UnexpectedError):
debug_message_format = _("Group %(group_id)s returned by mapping "
"%(mapping_id)s was not found in the backend.")
class MetadataFileError(UnexpectedError):
message_format = _("Error while reading metadata file, %(reason)s")
class AssignmentTypeCalculationError(UnexpectedError):
message_format = _(
'Unexpected combination of grant attributes - '
'User: %(user_id)s, Group: %(group_id)s, Project: %(project_id)s, '
'Domain: %(domain_id)s')
class NotImplemented(Error):
message_format = _("The action you have requested has not"
" been implemented.")
code = 501
title = 'Not Implemented'
class Gone(Error):
message_format = _("The service you have requested is no"
" longer available on this server.")
code = 410
title = 'Gone'
class ConfigFileNotFound(UnexpectedError):
debug_message_format = _("The Keystone configuration file %(config_file)s "
"could not be found.")
class KeysNotFound(UnexpectedError):
message_format = _('No encryption keys found; run keystone-manage '
'fernet_setup to bootstrap one.')
class MultipleSQLDriversInConfig(UnexpectedError):
message_format = _('The Keystone domain-specific configuration has '
'specified more than one SQL driver (only one is '
'permitted): %(source)s.')
class MigrationNotProvided(Exception):
def __init__(self, mod_name, path):
super(MigrationNotProvided, self).__init__(_(
"%(mod_name)s doesn't provide database migrations. The migration"
" repository path at %(path)s doesn't exist or isn't a directory."
) % {'mod_name': mod_name, 'path': path})
class UnsupportedTokenVersionException(Exception):
"""Token version is unrecognizable or unsupported."""
pass
class SAMLSigningError(UnexpectedError):
debug_message_format = _('Unable to sign SAML assertion. It is likely '
'that this server does not have xmlsec1 '
'installed, or this is the result of '
'misconfiguration. Reason %(reason)s')
title = 'Error signing SAML assertion'
class OAuthHeadersMissingError(UnexpectedError):
debug_message_format = _('No Authorization headers found, cannot proceed '
'with OAuth related calls, if running under '
'HTTPd or Apache, ensure WSGIPassAuthorization '
'is set to On.')
title = 'Error retrieving OAuth headers'
| apache-2.0 |
imsukmin/codingTheMatrix | chap2_the_field/image.py | 1 | 5149 | # Copyright 2013 Philip N. Klein
"""
Basic types:
file - a png file on disk
image - a list of list of pixels. pixels can be triples of RGB intensities,
or single grayscale values.
display - not a type per se, but rather causing the type to be shown on screen
Functions convert between these formats, and also can write to temporary files
and display them with a web browser.
"""
# To do: check types of arguments, check that image has no alpha channel
# Note that right now, we ignore the alpha channel, but allow it. - @dbp
import png
import numbers
import collections
# Native imports
import webbrowser
import tempfile
import os
import atexit
# Round color coordinate to nearest int and clamp to [0, 255]
def _color_int(col):
return max(min(round(col), 255), 0)
# utility conversions, between boxed pixel and flat pixel formats
# the png library uses flat, we use boxed.
def _boxed2flat(row):
return [_color_int(x) for box in row for x in box]
def _flat2boxed(row):
# Note we skip every 4th element, thus eliminating the alpha channel
return [tuple(row[i:i+3]) for i in range(0, len(row), 4)]
## Image conversions
def isgray(image):
"tests whether the image is grayscale"
col = image[0][0]
if isinstance(col, numbers.Number):
return True
elif isinstance(col, collections.Iterable) and len(col) == 3:
return False
else:
raise TypeError('Unrecognized image type')
def color2gray(image):
""" Converts a color image to grayscale """
# we use HDTV grayscale conversion as per https://en.wikipedia.org/wiki/Grayscale
image = [[x for x in row] for row in image]
return [[int(0.2126*p[0] + 0.7152*p[1] + 0.0722*p[2]) for p in row]
for row in image]
def gray2color(image):
""" Converts a grayscale image to color """
return [[(p,p,p) for p in row] for row in image]
#extracting and combining color channels
def rgbsplit(image):
""" Converts an RGB image to a 3-element list of grayscale images, one for each color channel"""
return [[[pixel[i] for pixel in row] for row in image] for i in (0,1,2)]
def rgpsplice(R,G,B):
return [[(R[row][col],G[row][col],B[row][col]) for col in range(len(R[0]))] for row in range(len(R))]
## To and from files
def file2image(path):
""" Reads an image into a list of lists of pixel values (tuples with
three values). This is a color image. """
(w, h, p, m) = png.Reader(filename = path).asRGBA() # force RGB and alpha
return [_flat2boxed(r) for r in p]
def image2file(image, path):
""" Writes an image in list of lists format to a file. Will work with
either color or grayscale. """
if isgray(image):
img = gray2color(image)
else:
img = image
with open(path, 'wb') as f:
png.Writer(width=len(image[0]), height=len(image)).write(f,
[_boxed2flat(r) for r in img])
## Display functions
def image2display(image, browser=None):
""" Stores an image in a temporary location and displays it on screen
using a web browser. """
path = _create_temp('.png')
image2file(image, path)
hpath = _create_temp('.html')
with open(hpath, 'w') as h:
h.writelines(["<html><body><img src='file://%s'/></body></html>" % path])
openinbrowser('file://%s' % hpath, browser)
# print("Hit Enter once the image is displayed.... ", end="")
input()
_browser = None
def setbrowser(browser=None):
""" Registers the given browser and saves it as the module default.
This is used to control which browser is used to display the plot.
The argument should be a value that can be passed to webbrowser.get()
to obtain a browser. If no argument is given, the default is reset
to the system default.
webbrowser provides some predefined browser names, including:
'firefox'
'opera'
If the browser string contains '%s', it is interpreted as a literal
browser command line. The URL will be substituted for '%s' in the command.
For example:
'google-chrome %s'
'cmd "start iexplore.exe %s"'
See the webbrowser documentation for more detailed information.
Note: Safari does not reliably work with the webbrowser module,
so we recommend using a different browser.
"""
global _browser
if browser is None:
_browser = None # Use system default
else:
webbrowser.register(browser, None, webbrowser.get(browser))
_browser = browser
def getbrowser():
""" Returns the module's default browser """
return _browser
def openinbrowser(url, browser=None):
if browser is None:
browser = _browser
webbrowser.get(browser).open(url)
# Create a temporary file that will be removed at exit
# Returns a path to the file
def _create_temp(suffix='', prefix='tmp', dir=None):
_f, path = tempfile.mkstemp(suffix, prefix, dir)
os.close(_f)
_remove_at_exit(path)
return path
# Register a file to be removed at exit
def _remove_at_exit(path):
atexit.register(os.remove, path)
| mit |
ychfan/tensorflow | tensorflow/contrib/graph_editor/util.py | 42 | 17095 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions for the graph_editor.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
from six import iteritems
from tensorflow.python.framework import ops as tf_ops
from tensorflow.python.ops import array_ops as tf_array_ops
__all__ = [
"make_list_of_op",
"get_tensors",
"make_list_of_t",
"get_generating_ops",
"get_consuming_ops",
"ControlOutputs",
"placeholder_name",
"make_placeholder_from_tensor",
"make_placeholder_from_dtype_and_shape",
]
def concatenate_unique(la, lb):
"""Add all the elements of `lb` to `la` if they are not there already.
The elements added to `la` maintain ordering with respect to `lb`.
Args:
la: List of Python objects.
lb: List of Python objects.
Returns:
`la`: The list `la` with missing elements from `lb`.
"""
la_set = set(la)
for l in lb:
if l not in la_set:
la.append(l)
la_set.add(l)
return la
# TODO(fkp): very generic code, it should be moved in a more generic place.
class ListView(object):
"""Immutable list wrapper.
This class is strongly inspired by the one in tf.Operation.
"""
def __init__(self, list_):
if not isinstance(list_, list):
raise TypeError("Expected a list, got: {}.".format(type(list_)))
self._list = list_
def __iter__(self):
return iter(self._list)
def __len__(self):
return len(self._list)
def __bool__(self):
return bool(self._list)
# Python 3 wants __bool__, Python 2.7 wants __nonzero__
__nonzero__ = __bool__
def __getitem__(self, i):
return self._list[i]
def __add__(self, other):
if not isinstance(other, list):
other = list(other)
return list(self) + other
# TODO(fkp): very generic code, it should be moved in a more generic place.
def is_iterable(obj):
"""Return true if the object is iterable."""
try:
_ = iter(obj)
except Exception: # pylint: disable=broad-except
return False
return True
def flatten_tree(tree, leaves=None):
"""Flatten a tree into a list.
Args:
tree: iterable or not. If iterable, its elements (child) can also be
iterable or not.
leaves: list to which the tree leaves are appended (None by default).
Returns:
A list of all the leaves in the tree.
"""
if leaves is None:
leaves = []
if isinstance(tree, dict):
for _, child in iteritems(tree):
flatten_tree(child, leaves)
elif is_iterable(tree):
for child in tree:
flatten_tree(child, leaves)
else:
leaves.append(tree)
return leaves
def transform_tree(tree, fn, iterable_type=tuple):
"""Transform all the nodes of a tree.
Args:
tree: iterable or not. If iterable, its elements (child) can also be
iterable or not.
fn: function to apply to each leaves.
iterable_type: type use to construct the resulting tree for unknown
iterable, typically `list` or `tuple`.
Returns:
A tree whose leaves has been transformed by `fn`.
The hierarchy of the output tree mimics the one of the input tree.
"""
if is_iterable(tree):
if isinstance(tree, dict):
res = tree.__new__(type(tree))
res.__init__(
(k, transform_tree(child, fn)) for k, child in iteritems(tree))
return res
elif isinstance(tree, tuple):
# NamedTuple?
if hasattr(tree, "_asdict"):
res = tree.__new__(type(tree), **transform_tree(tree._asdict(), fn))
else:
res = tree.__new__(type(tree),
(transform_tree(child, fn) for child in tree))
return res
elif isinstance(tree, collections.Sequence):
res = tree.__new__(type(tree))
res.__init__(transform_tree(child, fn) for child in tree)
return res
else:
return iterable_type(transform_tree(child, fn) for child in tree)
else:
return fn(tree)
def check_graphs(*args):
"""Check that all the element in args belong to the same graph.
Args:
*args: a list of object with a obj.graph property.
Raises:
ValueError: if all the elements do not belong to the same graph.
"""
graph = None
for i, sgv in enumerate(args):
if graph is None and sgv.graph is not None:
graph = sgv.graph
elif sgv.graph is not None and sgv.graph is not graph:
raise ValueError("Argument[{}]: Wrong graph!".format(i))
def get_unique_graph(tops, check_types=None, none_if_empty=False):
"""Return the unique graph used by the all the elements in tops.
Args:
tops: list of elements to check (usually a list of tf.Operation and/or
tf.Tensor). Or a tf.Graph.
check_types: check that the element in tops are of given type(s). If None,
the types (tf.Operation, tf.Tensor) are used.
none_if_empty: don't raise an error if tops is an empty list, just return
None.
Returns:
The unique graph used by all the tops.
Raises:
TypeError: if tops is not a iterable of tf.Operation.
ValueError: if the graph is not unique.
"""
if isinstance(tops, tf_ops.Graph):
return tops
if not is_iterable(tops):
raise TypeError("{} is not iterable".format(type(tops)))
if check_types is None:
check_types = (tf_ops.Operation, tf_ops.Tensor)
elif not is_iterable(check_types):
check_types = (check_types,)
g = None
for op in tops:
if not isinstance(op, check_types):
raise TypeError("Expected a type in ({}), got: {}".format(", ".join([str(
t) for t in check_types]), type(op)))
if g is None:
g = op.graph
elif g is not op.graph:
raise ValueError("Operation {} does not belong to given graph".format(op))
if g is None and not none_if_empty:
raise ValueError("Can't find the unique graph of an empty list")
return g
def make_list_of_op(ops, check_graph=True, allow_graph=True, ignore_ts=False):
"""Convert ops to a list of `tf.Operation`.
Args:
ops: can be an iterable of `tf.Operation`, a `tf.Graph` or a single
operation.
check_graph: if `True` check if all the operations belong to the same graph.
allow_graph: if `False` a `tf.Graph` cannot be converted.
ignore_ts: if True, silently ignore `tf.Tensor`.
Returns:
A newly created list of `tf.Operation`.
Raises:
TypeError: if ops cannot be converted to a list of `tf.Operation` or,
if `check_graph` is `True`, if all the ops do not belong to the
same graph.
"""
if isinstance(ops, tf_ops.Graph):
if allow_graph:
return ops.get_operations()
else:
raise TypeError("allow_graph is False: cannot convert a tf.Graph.")
else:
if not is_iterable(ops):
ops = [ops]
if not ops:
return []
if check_graph:
check_types = None if ignore_ts else tf_ops.Operation
get_unique_graph(ops, check_types=check_types)
return [op for op in ops if isinstance(op, tf_ops.Operation)]
# TODO(fkp): move this function in tf.Graph?
def get_tensors(graph):
"""get all the tensors which are input or output of an op in the graph.
Args:
graph: a `tf.Graph`.
Returns:
A list of `tf.Tensor`.
Raises:
TypeError: if graph is not a `tf.Graph`.
"""
if not isinstance(graph, tf_ops.Graph):
raise TypeError("Expected a graph, got: {}".format(type(graph)))
ts = []
for op in graph.get_operations():
ts += op.outputs
return ts
def make_list_of_t(ts, check_graph=True, allow_graph=True, ignore_ops=False):
"""Convert ts to a list of `tf.Tensor`.
Args:
ts: can be an iterable of `tf.Tensor`, a `tf.Graph` or a single tensor.
check_graph: if `True` check if all the tensors belong to the same graph.
allow_graph: if `False` a `tf.Graph` cannot be converted.
ignore_ops: if `True`, silently ignore `tf.Operation`.
Returns:
A newly created list of `tf.Tensor`.
Raises:
TypeError: if `ts` cannot be converted to a list of `tf.Tensor` or,
if `check_graph` is `True`, if all the ops do not belong to the same graph.
"""
if isinstance(ts, tf_ops.Graph):
if allow_graph:
return get_tensors(ts)
else:
raise TypeError("allow_graph is False: cannot convert a tf.Graph.")
else:
if not is_iterable(ts):
ts = [ts]
if not ts:
return []
if check_graph:
check_types = None if ignore_ops else tf_ops.Tensor
get_unique_graph(ts, check_types=check_types)
return [t for t in ts if isinstance(t, tf_ops.Tensor)]
def get_generating_ops(ts):
"""Return all the generating ops of the tensors in `ts`.
Args:
ts: a list of `tf.Tensor`
Returns:
A list of all the generating `tf.Operation` of the tensors in `ts`.
Raises:
TypeError: if `ts` cannot be converted to a list of `tf.Tensor`.
"""
ts = make_list_of_t(ts, allow_graph=False)
return [t.op for t in ts]
def get_consuming_ops(ts):
"""Return all the consuming ops of the tensors in ts.
Args:
ts: a list of `tf.Tensor`
Returns:
A list of all the consuming `tf.Operation` of the tensors in `ts`.
Raises:
TypeError: if ts cannot be converted to a list of `tf.Tensor`.
"""
ts = make_list_of_t(ts, allow_graph=False)
ops = []
for t in ts:
for op in t.consumers():
if op not in ops:
ops.append(op)
return ops
class ControlOutputs(object):
"""The control outputs topology."""
def __init__(self, graph):
"""Create a dictionary of control-output dependencies.
Args:
graph: a `tf.Graph`.
Returns:
A dictionary where a key is a `tf.Operation` instance and the
corresponding value is a list of all the ops which have the key
as one of their control-input dependencies.
Raises:
TypeError: graph is not a `tf.Graph`.
"""
if not isinstance(graph, tf_ops.Graph):
raise TypeError("Expected a tf.Graph, got: {}".format(type(graph)))
self._control_outputs = {}
self._graph = graph
self._version = None
self._build()
def update(self):
"""Update the control outputs if the graph has changed."""
if self._version != self._graph.version:
self._build()
return self
def _build(self):
"""Build the control outputs dictionary."""
self._control_outputs.clear()
ops = self._graph.get_operations()
for op in ops:
for control_input in op.control_inputs:
if control_input not in self._control_outputs:
self._control_outputs[control_input] = []
if op not in self._control_outputs[control_input]:
self._control_outputs[control_input].append(op)
self._version = self._graph.version
def get_all(self):
return self._control_outputs
def get(self, op):
"""return the control outputs of op."""
if op in self._control_outputs:
return self._control_outputs[op]
else:
return ()
@property
def graph(self):
return self._graph
def scope_finalize(scope):
if scope and scope[-1] != "/":
scope += "/"
return scope
def scope_dirname(scope):
slash = scope.rfind("/")
if slash == -1:
return ""
return scope[:slash + 1]
def scope_basename(scope):
slash = scope.rfind("/")
if slash == -1:
return scope
return scope[slash + 1:]
def placeholder_name(t=None, scope=None):
"""Create placeholder name for the graph editor.
Args:
t: optional tensor on which the placeholder operation's name will be based
on
scope: absolute scope with which to prefix the placeholder's name. None
means that the scope of t is preserved. "" means the root scope.
Returns:
A new placeholder name prefixed by "geph". Note that "geph" stands for
Graph Editor PlaceHolder. This convention allows to quickly identify the
placeholder generated by the Graph Editor.
Raises:
TypeError: if t is not None or a tf.Tensor.
"""
if scope is not None:
scope = scope_finalize(scope)
if t is not None:
if not isinstance(t, tf_ops.Tensor):
raise TypeError("Expected a tf.Tenfor, got: {}".format(type(t)))
op_dirname = scope_dirname(t.op.name)
op_basename = scope_basename(t.op.name)
if scope is None:
scope = op_dirname
if op_basename.startswith("geph__"):
ph_name = op_basename
else:
ph_name = "geph__{}_{}".format(op_basename, t.value_index)
return scope + ph_name
else:
if scope is None:
scope = ""
return scope + "geph"
def make_placeholder_from_tensor(t, scope=None):
"""Create a `tf.placeholder` for the Graph Editor.
Note that the correct graph scope must be set by the calling function.
Args:
t: a `tf.Tensor` whose name will be used to create the placeholder
(see function placeholder_name).
scope: absolute scope within which to create the placeholder. None
means that the scope of `t` is preserved. `""` means the root scope.
Returns:
A newly created `tf.placeholder`.
Raises:
TypeError: if `t` is not `None` or a `tf.Tensor`.
"""
return tf_array_ops.placeholder(
dtype=t.dtype, shape=t.get_shape(), name=placeholder_name(
t, scope=scope))
def make_placeholder_from_dtype_and_shape(dtype, shape=None, scope=None):
"""Create a tf.placeholder for the Graph Editor.
Note that the correct graph scope must be set by the calling function.
The placeholder is named using the function placeholder_name (with no
tensor argument).
Args:
dtype: the tensor type.
shape: the tensor shape (optional).
scope: absolute scope within which to create the placeholder. None
means that the scope of t is preserved. "" means the root scope.
Returns:
A newly created tf.placeholder.
"""
return tf_array_ops.placeholder(
dtype=dtype, shape=shape, name=placeholder_name(scope=scope))
_INTERNAL_VARIABLE_RE = re.compile(r"^__\w+__$")
def get_predefined_collection_names():
"""Return all the predefined collection names."""
return [getattr(tf_ops.GraphKeys, key) for key in dir(tf_ops.GraphKeys)
if not _INTERNAL_VARIABLE_RE.match(key)]
def find_corresponding_elem(target, dst_graph, dst_scope="", src_scope=""):
"""Find corresponding op/tensor in a different graph.
Args:
target: A `tf.Tensor` or a `tf.Operation` belonging to the original graph.
dst_graph: The graph in which the corresponding graph element must be found.
dst_scope: A scope which is prepended to the name to look for.
src_scope: A scope which is removed from the original of `target` name.
Returns:
The corresponding tf.Tensor` or a `tf.Operation`.
Raises:
ValueError: if `src_name` does not start with `src_scope`.
TypeError: if `target` is not a `tf.Tensor` or a `tf.Operation`
KeyError: If the corresponding graph element cannot be found.
"""
src_name = target.name
if src_scope:
src_scope = scope_finalize(src_scope)
if not src_name.startswidth(src_scope):
raise ValueError("{} does not start with {}".format(src_name, src_scope))
src_name = src_name[len(src_scope):]
dst_name = src_name
if dst_scope:
dst_scope = scope_finalize(dst_scope)
dst_name = dst_scope + dst_name
if isinstance(target, tf_ops.Tensor):
return dst_graph.get_tensor_by_name(dst_name)
if isinstance(target, tf_ops.Operation):
return dst_graph.get_operation_by_name(dst_name)
raise TypeError("Expected tf.Tensor or tf.Operation, got: {}", type(target))
def find_corresponding(targets, dst_graph, dst_scope="", src_scope=""):
"""Find corresponding ops/tensors in a different graph.
`targets` is a Python tree, that is, a nested structure of iterable
(list, tupple, dictionary) whose leaves are instances of
`tf.Tensor` or `tf.Operation`
Args:
targets: A Python tree containing `tf.Tensor` or `tf.Operation`
belonging to the original graph.
dst_graph: The graph in which the corresponding graph element must be found.
dst_scope: A scope which is prepended to the name to look for.
src_scope: A scope which is removed from the original of `top` name.
Returns:
A Python tree containin the corresponding tf.Tensor` or a `tf.Operation`.
Raises:
ValueError: if `src_name` does not start with `src_scope`.
TypeError: if `top` is not a `tf.Tensor` or a `tf.Operation`
KeyError: If the corresponding graph element cannot be found.
"""
def func(top):
return find_corresponding_elem(top, dst_graph, dst_scope, src_scope)
return transform_tree(targets, func)
| apache-2.0 |
mrev11/ccc3 | jt/jtpython/jtlib/jttabpane.py | 2 | 1447 | ##! /usr/bin/env python
# _*_ coding: latin-1 _*_
import jtdom
import jtutil
from jtelem import jtelem
class new(jtelem):
def __init__(self,top=None,left=None,bottom=None,right=None):
jtelem.__init__(self,top,left,bottom,right)
self.itemlist=[]
self.selectedindex=1
self.placement=None
def classname(self):
return "jttabpane"
def setdialogid(self,id):
self.dialogid=id
for c in self.itemlist:
c.setdialogid(self.dialogid)
def additem(self,item):
item.setdialogid(self.dialogid)
self.itemlist.append(item)
return item
def xmladd(self):
x=""
for c in self.itemlist:
x+=c.xmlout()+jtutil.EOL
if self.selectedindex!=1:
x+="<selectedindex>"+str(self.selectedindex)+"</selectedindex>"+jtutil.EOL
if self.placement:
x+="<placement>"+self.placement+"</placement>"+jtutil.EOL
return x
def xmlput(self,x):
self.selectedindex=int(jtdom.domtext(x))
def xmlget(self):
return str(self.selectedindex)
def changed(self):
return self.laststate!=self.selectedindex
def savestate(self):
self.laststate=self.selectedindex
def varput(self,x):
self.selectedindex=x
return x
def varget(self):
return self.selectedindex
def select(self,x):
self.selectedindex=x
| lgpl-2.1 |
dhruve/spark | python/pyspark/ml/clustering.py | 35 | 41548 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pyspark import since, keyword_only
from pyspark.ml.util import *
from pyspark.ml.wrapper import JavaEstimator, JavaModel, JavaWrapper
from pyspark.ml.param.shared import *
from pyspark.ml.common import inherit_doc
__all__ = ['BisectingKMeans', 'BisectingKMeansModel', 'BisectingKMeansSummary',
'KMeans', 'KMeansModel',
'GaussianMixture', 'GaussianMixtureModel', 'GaussianMixtureSummary',
'LDA', 'LDAModel', 'LocalLDAModel', 'DistributedLDAModel']
class ClusteringSummary(JavaWrapper):
"""
.. note:: Experimental
Clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("2.1.0")
def predictionCol(self):
"""
Name for column of predicted clusters in `predictions`.
"""
return self._call_java("predictionCol")
@property
@since("2.1.0")
def predictions(self):
"""
DataFrame produced by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.1.0")
def featuresCol(self):
"""
Name for column of features in `predictions`.
"""
return self._call_java("featuresCol")
@property
@since("2.1.0")
def k(self):
"""
The number of clusters the model was trained with.
"""
return self._call_java("k")
@property
@since("2.1.0")
def cluster(self):
"""
DataFrame of predicted cluster centers for each training data point.
"""
return self._call_java("cluster")
@property
@since("2.1.0")
def clusterSizes(self):
"""
Size of (number of data points in) each cluster.
"""
return self._call_java("clusterSizes")
class GaussianMixtureModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by GaussianMixture.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def weights(self):
"""
Weight for each Gaussian distribution in the mixture.
This is a multinomial probability distribution over the k Gaussians,
where weights[i] is the weight for Gaussian i, and weights sum to 1.
"""
return self._call_java("weights")
@property
@since("2.0.0")
def gaussiansDF(self):
"""
Retrieve Gaussian distributions as a DataFrame.
Each row represents a Gaussian Distribution.
The DataFrame has two columns: mean (Vector) and cov (Matrix).
"""
return self._call_java("gaussiansDF")
@property
@since("2.1.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model
instance.
"""
return self._call_java("hasSummary")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return GaussianMixtureSummary(self._call_java("summary"))
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class GaussianMixture(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol, HasSeed,
HasProbabilityCol, JavaMLWritable, JavaMLReadable):
"""
GaussianMixture clustering.
This class performs expectation maximization for multivariate Gaussian
Mixture Models (GMMs). A GMM represents a composite distribution of
independent Gaussian distributions with associated "mixing" weights
specifying each's contribution to the composite.
Given a set of sample points, this class will maximize the log-likelihood
for a mixture of k Gaussians, iterating until the log-likelihood changes by
less than convergenceTol, or until it has reached the max number of iterations.
While this process is generally guaranteed to converge, it is not guaranteed
to find a global optimum.
.. note:: For high-dimensional data (with many features), this algorithm may perform poorly.
This is due to high-dimensional data (a) making it difficult to cluster at all
(based on statistical/theoretical arguments) and (b) numerical issues with
Gaussian distributions.
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([-0.1, -0.05 ]),),
... (Vectors.dense([-0.01, -0.1]),),
... (Vectors.dense([0.9, 0.8]),),
... (Vectors.dense([0.75, 0.935]),),
... (Vectors.dense([-0.83, -0.68]),),
... (Vectors.dense([-0.91, -0.76]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> gm = GaussianMixture(k=3, tol=0.0001,
... maxIter=10, seed=10)
>>> model = gm.fit(df)
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
3
>>> summary.clusterSizes
[2, 2, 2]
>>> summary.logLikelihood
8.14636...
>>> weights = model.weights
>>> len(weights)
3
>>> model.gaussiansDF.select("mean").head()
Row(mean=DenseVector([0.825, 0.8675]))
>>> model.gaussiansDF.select("cov").head()
Row(cov=DenseMatrix(2, 2, [0.0056, -0.0051, -0.0051, 0.0046], False))
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[4].prediction == rows[5].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> gmm_path = temp_path + "/gmm"
>>> gm.save(gmm_path)
>>> gm2 = GaussianMixture.load(gmm_path)
>>> gm2.getK()
3
>>> model_path = temp_path + "/gmm_model"
>>> model.save(model_path)
>>> model2 = GaussianMixtureModel.load(model_path)
>>> model2.hasSummary
False
>>> model2.weights == model.weights
True
>>> model2.gaussiansDF.select("mean").head()
Row(mean=DenseVector([0.825, 0.8675]))
>>> model2.gaussiansDF.select("cov").head()
Row(cov=DenseMatrix(2, 2, [0.0056, -0.0051, -0.0051, 0.0046], False))
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "Number of independent Gaussians in the mixture model. " +
"Must be > 1.", typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", k=2,
probabilityCol="probability", tol=0.01, maxIter=100, seed=None):
"""
__init__(self, featuresCol="features", predictionCol="prediction", k=2, \
probabilityCol="probability", tol=0.01, maxIter=100, seed=None)
"""
super(GaussianMixture, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.GaussianMixture",
self.uid)
self._setDefault(k=2, tol=0.01, maxIter=100)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
return GaussianMixtureModel(java_model)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", predictionCol="prediction", k=2,
probabilityCol="probability", tol=0.01, maxIter=100, seed=None):
"""
setParams(self, featuresCol="features", predictionCol="prediction", k=2, \
probabilityCol="probability", tol=0.01, maxIter=100, seed=None)
Sets params for GaussianMixture.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of `k`
"""
return self.getOrDefault(self.k)
class GaussianMixtureSummary(ClusteringSummary):
"""
.. note:: Experimental
Gaussian mixture clustering results for a given model.
.. versionadded:: 2.1.0
"""
@property
@since("2.1.0")
def probabilityCol(self):
"""
Name for column of predicted probability of each cluster in `predictions`.
"""
return self._call_java("probabilityCol")
@property
@since("2.1.0")
def probability(self):
"""
DataFrame of probabilities of each cluster for each training data point.
"""
return self._call_java("probability")
@property
@since("2.2.0")
def logLikelihood(self):
"""
Total log-likelihood for this model on the given data.
"""
return self._call_java("logLikelihood")
class KMeansSummary(ClusteringSummary):
"""
.. note:: Experimental
Summary of KMeans.
.. versionadded:: 2.1.0
"""
pass
class KMeansModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by KMeans.
.. versionadded:: 1.5.0
"""
@since("1.5.0")
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return [c.toArray() for c in self._call_java("clusterCenters")]
@since("2.0.0")
def computeCost(self, dataset):
"""
Return the K-means cost (sum of squared distances of points to their nearest center)
for this model on the given data.
"""
return self._call_java("computeCost", dataset)
@property
@since("2.1.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model instance.
"""
return self._call_java("hasSummary")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return KMeansSummary(self._call_java("summary"))
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class KMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasTol, HasSeed,
JavaMLWritable, JavaMLReadable):
"""
K-means clustering with a k-means++ like initialization mode
(the k-means|| algorithm by Bahmani et al).
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
... (Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> kmeans = KMeans(k=2, seed=1)
>>> model = kmeans.fit(df)
>>> centers = model.clusterCenters()
>>> len(centers)
2
>>> model.computeCost(df)
2.000...
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[0].prediction == rows[1].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
2
>>> summary.clusterSizes
[2, 2]
>>> kmeans_path = temp_path + "/kmeans"
>>> kmeans.save(kmeans_path)
>>> kmeans2 = KMeans.load(kmeans_path)
>>> kmeans2.getK()
2
>>> model_path = temp_path + "/kmeans_model"
>>> model.save(model_path)
>>> model2 = KMeansModel.load(model_path)
>>> model2.hasSummary
False
>>> model.clusterCenters()[0] == model2.clusterCenters()[0]
array([ True, True], dtype=bool)
>>> model.clusterCenters()[1] == model2.clusterCenters()[1]
array([ True, True], dtype=bool)
.. versionadded:: 1.5.0
"""
k = Param(Params._dummy(), "k", "The number of clusters to create. Must be > 1.",
typeConverter=TypeConverters.toInt)
initMode = Param(Params._dummy(), "initMode",
"The initialization algorithm. This can be either \"random\" to " +
"choose random points as initial cluster centers, or \"k-means||\" " +
"to use a parallel variant of k-means++",
typeConverter=TypeConverters.toString)
initSteps = Param(Params._dummy(), "initSteps", "The number of steps for k-means|| " +
"initialization mode. Must be > 0.", typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", k=2,
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None):
"""
__init__(self, featuresCol="features", predictionCol="prediction", k=2, \
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None)
"""
super(KMeans, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.KMeans", self.uid)
self._setDefault(k=2, initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
return KMeansModel(java_model)
@keyword_only
@since("1.5.0")
def setParams(self, featuresCol="features", predictionCol="prediction", k=2,
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None):
"""
setParams(self, featuresCol="features", predictionCol="prediction", k=2, \
initMode="k-means||", initSteps=2, tol=1e-4, maxIter=20, seed=None)
Sets params for KMeans.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("1.5.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("1.5.0")
def getK(self):
"""
Gets the value of `k`
"""
return self.getOrDefault(self.k)
@since("1.5.0")
def setInitMode(self, value):
"""
Sets the value of :py:attr:`initMode`.
"""
return self._set(initMode=value)
@since("1.5.0")
def getInitMode(self):
"""
Gets the value of `initMode`
"""
return self.getOrDefault(self.initMode)
@since("1.5.0")
def setInitSteps(self, value):
"""
Sets the value of :py:attr:`initSteps`.
"""
return self._set(initSteps=value)
@since("1.5.0")
def getInitSteps(self):
"""
Gets the value of `initSteps`
"""
return self.getOrDefault(self.initSteps)
class BisectingKMeansModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by BisectingKMeans.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def clusterCenters(self):
"""Get the cluster centers, represented as a list of NumPy arrays."""
return [c.toArray() for c in self._call_java("clusterCenters")]
@since("2.0.0")
def computeCost(self, dataset):
"""
Computes the sum of squared distances between the input points
and their corresponding cluster centers.
"""
return self._call_java("computeCost", dataset)
@property
@since("2.1.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model instance.
"""
return self._call_java("hasSummary")
@property
@since("2.1.0")
def summary(self):
"""
Gets summary (e.g. cluster assignments, cluster sizes) of the model trained on the
training set. An exception is thrown if no summary exists.
"""
if self.hasSummary:
return BisectingKMeansSummary(self._call_java("summary"))
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@inherit_doc
class BisectingKMeans(JavaEstimator, HasFeaturesCol, HasPredictionCol, HasMaxIter, HasSeed,
JavaMLWritable, JavaMLReadable):
"""
A bisecting k-means algorithm based on the paper "A comparison of document clustering
techniques" by Steinbach, Karypis, and Kumar, with modification to fit Spark.
The algorithm starts from a single cluster that contains all points.
Iteratively it finds divisible clusters on the bottom level and bisects each of them using
k-means, until there are `k` leaf clusters in total or no leaf clusters are divisible.
The bisecting steps of clusters on the same level are grouped together to increase parallelism.
If bisecting all divisible clusters on the bottom level would result more than `k` leaf
clusters, larger clusters get higher priority.
>>> from pyspark.ml.linalg import Vectors
>>> data = [(Vectors.dense([0.0, 0.0]),), (Vectors.dense([1.0, 1.0]),),
... (Vectors.dense([9.0, 8.0]),), (Vectors.dense([8.0, 9.0]),)]
>>> df = spark.createDataFrame(data, ["features"])
>>> bkm = BisectingKMeans(k=2, minDivisibleClusterSize=1.0)
>>> model = bkm.fit(df)
>>> centers = model.clusterCenters()
>>> len(centers)
2
>>> model.computeCost(df)
2.000...
>>> model.hasSummary
True
>>> summary = model.summary
>>> summary.k
2
>>> summary.clusterSizes
[2, 2]
>>> transformed = model.transform(df).select("features", "prediction")
>>> rows = transformed.collect()
>>> rows[0].prediction == rows[1].prediction
True
>>> rows[2].prediction == rows[3].prediction
True
>>> bkm_path = temp_path + "/bkm"
>>> bkm.save(bkm_path)
>>> bkm2 = BisectingKMeans.load(bkm_path)
>>> bkm2.getK()
2
>>> model_path = temp_path + "/bkm_model"
>>> model.save(model_path)
>>> model2 = BisectingKMeansModel.load(model_path)
>>> model2.hasSummary
False
>>> model.clusterCenters()[0] == model2.clusterCenters()[0]
array([ True, True], dtype=bool)
>>> model.clusterCenters()[1] == model2.clusterCenters()[1]
array([ True, True], dtype=bool)
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "The desired number of leaf clusters. Must be > 1.",
typeConverter=TypeConverters.toInt)
minDivisibleClusterSize = Param(Params._dummy(), "minDivisibleClusterSize",
"The minimum number of points (if >= 1.0) or the minimum " +
"proportion of points (if < 1.0) of a divisible cluster.",
typeConverter=TypeConverters.toFloat)
@keyword_only
def __init__(self, featuresCol="features", predictionCol="prediction", maxIter=20,
seed=None, k=4, minDivisibleClusterSize=1.0):
"""
__init__(self, featuresCol="features", predictionCol="prediction", maxIter=20, \
seed=None, k=4, minDivisibleClusterSize=1.0)
"""
super(BisectingKMeans, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.BisectingKMeans",
self.uid)
self._setDefault(maxIter=20, k=4, minDivisibleClusterSize=1.0)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", predictionCol="prediction", maxIter=20,
seed=None, k=4, minDivisibleClusterSize=1.0):
"""
setParams(self, featuresCol="features", predictionCol="prediction", maxIter=20, \
seed=None, k=4, minDivisibleClusterSize=1.0)
Sets params for BisectingKMeans.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of `k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.0.0")
def setMinDivisibleClusterSize(self, value):
"""
Sets the value of :py:attr:`minDivisibleClusterSize`.
"""
return self._set(minDivisibleClusterSize=value)
@since("2.0.0")
def getMinDivisibleClusterSize(self):
"""
Gets the value of `minDivisibleClusterSize` or its default value.
"""
return self.getOrDefault(self.minDivisibleClusterSize)
def _create_model(self, java_model):
return BisectingKMeansModel(java_model)
class BisectingKMeansSummary(ClusteringSummary):
"""
.. note:: Experimental
Bisecting KMeans clustering results for a given model.
.. versionadded:: 2.1.0
"""
pass
@inherit_doc
class LDAModel(JavaModel):
"""
Latent Dirichlet Allocation (LDA) model.
This abstraction permits for different underlying representations,
including local and distributed data structures.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def isDistributed(self):
"""
Indicates whether this instance is of type DistributedLDAModel
"""
return self._call_java("isDistributed")
@since("2.0.0")
def vocabSize(self):
"""Vocabulary size (number of terms or words in the vocabulary)"""
return self._call_java("vocabSize")
@since("2.0.0")
def topicsMatrix(self):
"""
Inferred topics, where each topic is represented by a distribution over terms.
This is a matrix of size vocabSize x k, where each column is a topic.
No guarantees are given about the ordering of the topics.
WARNING: If this model is actually a :py:class:`DistributedLDAModel` instance produced by
the Expectation-Maximization ("em") `optimizer`, then this method could involve
collecting a large amount of data to the driver (on the order of vocabSize x k).
"""
return self._call_java("topicsMatrix")
@since("2.0.0")
def logLikelihood(self, dataset):
"""
Calculates a lower bound on the log likelihood of the entire corpus.
See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
WARNING: If this model is an instance of :py:class:`DistributedLDAModel` (produced when
:py:attr:`optimizer` is set to "em"), this involves collecting a large
:py:func:`topicsMatrix` to the driver. This implementation may be changed in the future.
"""
return self._call_java("logLikelihood", dataset)
@since("2.0.0")
def logPerplexity(self, dataset):
"""
Calculate an upper bound on perplexity. (Lower is better.)
See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
WARNING: If this model is an instance of :py:class:`DistributedLDAModel` (produced when
:py:attr:`optimizer` is set to "em"), this involves collecting a large
:py:func:`topicsMatrix` to the driver. This implementation may be changed in the future.
"""
return self._call_java("logPerplexity", dataset)
@since("2.0.0")
def describeTopics(self, maxTermsPerTopic=10):
"""
Return the topics described by their top-weighted terms.
"""
return self._call_java("describeTopics", maxTermsPerTopic)
@since("2.0.0")
def estimatedDocConcentration(self):
"""
Value for :py:attr:`LDA.docConcentration` estimated from data.
If Online LDA was used and :py:attr:`LDA.optimizeDocConcentration` was set to false,
then this returns the fixed (given) value for the :py:attr:`LDA.docConcentration` parameter.
"""
return self._call_java("estimatedDocConcentration")
@inherit_doc
class DistributedLDAModel(LDAModel, JavaMLReadable, JavaMLWritable):
"""
Distributed model fitted by :py:class:`LDA`.
This type of model is currently only produced by Expectation-Maximization (EM).
This model stores the inferred topics, the full training dataset, and the topic distribution
for each training document.
.. versionadded:: 2.0.0
"""
@since("2.0.0")
def toLocal(self):
"""
Convert this distributed model to a local representation. This discards info about the
training dataset.
WARNING: This involves collecting a large :py:func:`topicsMatrix` to the driver.
"""
return LocalLDAModel(self._call_java("toLocal"))
@since("2.0.0")
def trainingLogLikelihood(self):
"""
Log likelihood of the observed tokens in the training set,
given the current parameter estimates:
log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters)
Notes:
- This excludes the prior; for that, use :py:func:`logPrior`.
- Even with :py:func:`logPrior`, this is NOT the same as the data log likelihood given
the hyperparameters.
- This is computed from the topic distributions computed during training. If you call
:py:func:`logLikelihood` on the same training dataset, the topic distributions
will be computed again, possibly giving different results.
"""
return self._call_java("trainingLogLikelihood")
@since("2.0.0")
def logPrior(self):
"""
Log probability of the current parameter estimate:
log P(topics, topic distributions for docs | alpha, eta)
"""
return self._call_java("logPrior")
@since("2.0.0")
def getCheckpointFiles(self):
"""
If using checkpointing and :py:attr:`LDA.keepLastCheckpoint` is set to true, then there may
be saved checkpoint files. This method is provided so that users can manage those files.
.. note:: Removing the checkpoints can cause failures if a partition is lost and is needed
by certain :py:class:`DistributedLDAModel` methods. Reference counting will clean up
the checkpoints when this model and derivative data go out of scope.
:return List of checkpoint files from training
"""
return self._call_java("getCheckpointFiles")
@inherit_doc
class LocalLDAModel(LDAModel, JavaMLReadable, JavaMLWritable):
"""
Local (non-distributed) model fitted by :py:class:`LDA`.
This model stores the inferred topics only; it does not store info about the training dataset.
.. versionadded:: 2.0.0
"""
pass
@inherit_doc
class LDA(JavaEstimator, HasFeaturesCol, HasMaxIter, HasSeed, HasCheckpointInterval,
JavaMLReadable, JavaMLWritable):
"""
Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
Terminology:
- "term" = "word": an el
- "token": instance of a term appearing in a document
- "topic": multinomial distribution over terms representing some concept
- "document": one piece of text, corresponding to one row in the input data
Original LDA paper (journal version):
Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003.
Input data (featuresCol):
LDA is given a collection of documents as input data, via the featuresCol parameter.
Each document is specified as a :py:class:`Vector` of length vocabSize, where each entry is the
count for the corresponding term (word) in the document. Feature transformers such as
:py:class:`pyspark.ml.feature.Tokenizer` and :py:class:`pyspark.ml.feature.CountVectorizer`
can be useful for converting text to word count vectors.
>>> from pyspark.ml.linalg import Vectors, SparseVector
>>> from pyspark.ml.clustering import LDA
>>> df = spark.createDataFrame([[1, Vectors.dense([0.0, 1.0])],
... [2, SparseVector(2, {0: 1.0})],], ["id", "features"])
>>> lda = LDA(k=2, seed=1, optimizer="em")
>>> model = lda.fit(df)
>>> model.isDistributed()
True
>>> localModel = model.toLocal()
>>> localModel.isDistributed()
False
>>> model.vocabSize()
2
>>> model.describeTopics().show()
+-----+-----------+--------------------+
|topic|termIndices| termWeights|
+-----+-----------+--------------------+
| 0| [1, 0]|[0.50401530077160...|
| 1| [0, 1]|[0.50401530077160...|
+-----+-----------+--------------------+
...
>>> model.topicsMatrix()
DenseMatrix(2, 2, [0.496, 0.504, 0.504, 0.496], 0)
>>> lda_path = temp_path + "/lda"
>>> lda.save(lda_path)
>>> sameLDA = LDA.load(lda_path)
>>> distributed_model_path = temp_path + "/lda_distributed_model"
>>> model.save(distributed_model_path)
>>> sameModel = DistributedLDAModel.load(distributed_model_path)
>>> local_model_path = temp_path + "/lda_local_model"
>>> localModel.save(local_model_path)
>>> sameLocalModel = LocalLDAModel.load(local_model_path)
.. versionadded:: 2.0.0
"""
k = Param(Params._dummy(), "k", "The number of topics (clusters) to infer. Must be > 1.",
typeConverter=TypeConverters.toInt)
optimizer = Param(Params._dummy(), "optimizer",
"Optimizer or inference algorithm used to estimate the LDA model. "
"Supported: online, em", typeConverter=TypeConverters.toString)
learningOffset = Param(Params._dummy(), "learningOffset",
"A (positive) learning parameter that downweights early iterations."
" Larger values make early iterations count less",
typeConverter=TypeConverters.toFloat)
learningDecay = Param(Params._dummy(), "learningDecay", "Learning rate, set as an"
"exponential decay rate. This should be between (0.5, 1.0] to "
"guarantee asymptotic convergence.", typeConverter=TypeConverters.toFloat)
subsamplingRate = Param(Params._dummy(), "subsamplingRate",
"Fraction of the corpus to be sampled and used in each iteration "
"of mini-batch gradient descent, in range (0, 1].",
typeConverter=TypeConverters.toFloat)
optimizeDocConcentration = Param(Params._dummy(), "optimizeDocConcentration",
"Indicates whether the docConcentration (Dirichlet parameter "
"for document-topic distribution) will be optimized during "
"training.", typeConverter=TypeConverters.toBoolean)
docConcentration = Param(Params._dummy(), "docConcentration",
"Concentration parameter (commonly named \"alpha\") for the "
"prior placed on documents' distributions over topics (\"theta\").",
typeConverter=TypeConverters.toListFloat)
topicConcentration = Param(Params._dummy(), "topicConcentration",
"Concentration parameter (commonly named \"beta\" or \"eta\") for "
"the prior placed on topic' distributions over terms.",
typeConverter=TypeConverters.toFloat)
topicDistributionCol = Param(Params._dummy(), "topicDistributionCol",
"Output column with estimates of the topic mixture distribution "
"for each document (often called \"theta\" in the literature). "
"Returns a vector of zeros for an empty document.",
typeConverter=TypeConverters.toString)
keepLastCheckpoint = Param(Params._dummy(), "keepLastCheckpoint",
"(For EM optimizer) If using checkpointing, this indicates whether"
" to keep the last checkpoint. If false, then the checkpoint will be"
" deleted. Deleting the checkpoint can cause failures if a data"
" partition is lost, so set this bit with care.",
TypeConverters.toBoolean)
@keyword_only
def __init__(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
docConcentration=None, topicConcentration=None,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
__init__(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,\
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,\
subsamplingRate=0.05, optimizeDocConcentration=True,\
docConcentration=None, topicConcentration=None,\
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
super(LDA, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.clustering.LDA", self.uid)
self._setDefault(maxIter=20, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True)
kwargs = self._input_kwargs
self.setParams(**kwargs)
def _create_model(self, java_model):
if self.getOptimizer() == "em":
return DistributedLDAModel(java_model)
else:
return LocalLDAModel(java_model)
@keyword_only
@since("2.0.0")
def setParams(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,
subsamplingRate=0.05, optimizeDocConcentration=True,
docConcentration=None, topicConcentration=None,
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
"""
setParams(self, featuresCol="features", maxIter=20, seed=None, checkpointInterval=10,\
k=10, optimizer="online", learningOffset=1024.0, learningDecay=0.51,\
subsamplingRate=0.05, optimizeDocConcentration=True,\
docConcentration=None, topicConcentration=None,\
topicDistributionCol="topicDistribution", keepLastCheckpoint=True):
Sets params for LDA.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
@since("2.0.0")
def setK(self, value):
"""
Sets the value of :py:attr:`k`.
>>> algo = LDA().setK(10)
>>> algo.getK()
10
"""
return self._set(k=value)
@since("2.0.0")
def getK(self):
"""
Gets the value of :py:attr:`k` or its default value.
"""
return self.getOrDefault(self.k)
@since("2.0.0")
def setOptimizer(self, value):
"""
Sets the value of :py:attr:`optimizer`.
Currenlty only support 'em' and 'online'.
>>> algo = LDA().setOptimizer("em")
>>> algo.getOptimizer()
'em'
"""
return self._set(optimizer=value)
@since("2.0.0")
def getOptimizer(self):
"""
Gets the value of :py:attr:`optimizer` or its default value.
"""
return self.getOrDefault(self.optimizer)
@since("2.0.0")
def setLearningOffset(self, value):
"""
Sets the value of :py:attr:`learningOffset`.
>>> algo = LDA().setLearningOffset(100)
>>> algo.getLearningOffset()
100.0
"""
return self._set(learningOffset=value)
@since("2.0.0")
def getLearningOffset(self):
"""
Gets the value of :py:attr:`learningOffset` or its default value.
"""
return self.getOrDefault(self.learningOffset)
@since("2.0.0")
def setLearningDecay(self, value):
"""
Sets the value of :py:attr:`learningDecay`.
>>> algo = LDA().setLearningDecay(0.1)
>>> algo.getLearningDecay()
0.1...
"""
return self._set(learningDecay=value)
@since("2.0.0")
def getLearningDecay(self):
"""
Gets the value of :py:attr:`learningDecay` or its default value.
"""
return self.getOrDefault(self.learningDecay)
@since("2.0.0")
def setSubsamplingRate(self, value):
"""
Sets the value of :py:attr:`subsamplingRate`.
>>> algo = LDA().setSubsamplingRate(0.1)
>>> algo.getSubsamplingRate()
0.1...
"""
return self._set(subsamplingRate=value)
@since("2.0.0")
def getSubsamplingRate(self):
"""
Gets the value of :py:attr:`subsamplingRate` or its default value.
"""
return self.getOrDefault(self.subsamplingRate)
@since("2.0.0")
def setOptimizeDocConcentration(self, value):
"""
Sets the value of :py:attr:`optimizeDocConcentration`.
>>> algo = LDA().setOptimizeDocConcentration(True)
>>> algo.getOptimizeDocConcentration()
True
"""
return self._set(optimizeDocConcentration=value)
@since("2.0.0")
def getOptimizeDocConcentration(self):
"""
Gets the value of :py:attr:`optimizeDocConcentration` or its default value.
"""
return self.getOrDefault(self.optimizeDocConcentration)
@since("2.0.0")
def setDocConcentration(self, value):
"""
Sets the value of :py:attr:`docConcentration`.
>>> algo = LDA().setDocConcentration([0.1, 0.2])
>>> algo.getDocConcentration()
[0.1..., 0.2...]
"""
return self._set(docConcentration=value)
@since("2.0.0")
def getDocConcentration(self):
"""
Gets the value of :py:attr:`docConcentration` or its default value.
"""
return self.getOrDefault(self.docConcentration)
@since("2.0.0")
def setTopicConcentration(self, value):
"""
Sets the value of :py:attr:`topicConcentration`.
>>> algo = LDA().setTopicConcentration(0.5)
>>> algo.getTopicConcentration()
0.5...
"""
return self._set(topicConcentration=value)
@since("2.0.0")
def getTopicConcentration(self):
"""
Gets the value of :py:attr:`topicConcentration` or its default value.
"""
return self.getOrDefault(self.topicConcentration)
@since("2.0.0")
def setTopicDistributionCol(self, value):
"""
Sets the value of :py:attr:`topicDistributionCol`.
>>> algo = LDA().setTopicDistributionCol("topicDistributionCol")
>>> algo.getTopicDistributionCol()
'topicDistributionCol'
"""
return self._set(topicDistributionCol=value)
@since("2.0.0")
def getTopicDistributionCol(self):
"""
Gets the value of :py:attr:`topicDistributionCol` or its default value.
"""
return self.getOrDefault(self.topicDistributionCol)
@since("2.0.0")
def setKeepLastCheckpoint(self, value):
"""
Sets the value of :py:attr:`keepLastCheckpoint`.
>>> algo = LDA().setKeepLastCheckpoint(False)
>>> algo.getKeepLastCheckpoint()
False
"""
return self._set(keepLastCheckpoint=value)
@since("2.0.0")
def getKeepLastCheckpoint(self):
"""
Gets the value of :py:attr:`keepLastCheckpoint` or its default value.
"""
return self.getOrDefault(self.keepLastCheckpoint)
if __name__ == "__main__":
import doctest
import pyspark.ml.clustering
from pyspark.sql import SparkSession
globs = pyspark.ml.clustering.__dict__.copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
spark = SparkSession.builder\
.master("local[2]")\
.appName("ml.clustering tests")\
.getOrCreate()
sc = spark.sparkContext
globs['sc'] = sc
globs['spark'] = spark
import tempfile
temp_path = tempfile.mkdtemp()
globs['temp_path'] = temp_path
try:
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
finally:
from shutil import rmtree
try:
rmtree(temp_path)
except OSError:
pass
if failure_count:
exit(-1)
| apache-2.0 |
DocMarty84/oomusic | controllers/subsonic/media_retrieval.py | 1 | 8511 | # -*- coding: utf-8 -*-
import base64
import imghdr
import logging
import os
from io import BytesIO
from lxml import etree
from werkzeug.wrappers import Response
from werkzeug.wsgi import wrap_file
from odoo import http
from odoo.exceptions import AccessError
from odoo.http import request
from .common import SubsonicREST
_logger = logging.getLogger(__name__)
class MusicSubsonicMediaRetrieval(http.Controller):
@http.route(
["/rest/stream.view", "/rest/stream"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def stream(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
trackId = kwargs.get("id")
if trackId:
track = request.env["oomusic.track"].browse([int(trackId)])
if not track.exists():
return rest.make_error(code="70", message="Song not found")
else:
return rest.make_error(code="10", message='Required int parameter "id" is not present')
# Specific case of transcoding disabled globally
ConfigParam = request.env["ir.config_parameter"].sudo()
if ConfigParam.get_param("oomusic.trans_disabled"):
return http.send_file(track.path)
output_format = kwargs.get("format", rest._get_format())
maxBitRate = int(kwargs.get("maxBitRate", 0))
estimateContentLength = kwargs.get("estimateContentLength", False)
# Only for video
# timeOffset = kwargs.get('timeOffset')
# size = kwargs.get('size')
# converted = kwargs.get('size', False)
fn_ext = os.path.splitext(track.path)[1]
# As specified in Subsonic API: if maxBitRate is set to zero, no limit is imposed. We also
# avoid any upsampling.
if output_format == "raw" or (
fn_ext[1:] == output_format and (not maxBitRate or maxBitRate >= track.bitrate)
):
return http.send_file(track.path)
Transcoder = (
request.env["oomusic.transcoder"]
.search([("output_format.name", "=", output_format)])
.filtered(lambda r: fn_ext[1:] not in r.mapped("black_formats.name"))
)
Transcoder = Transcoder[0] if Transcoder else False
if Transcoder:
generator = Transcoder.transcode(int(trackId), bitrate=maxBitRate).stdout
mimetype = Transcoder.output_format.mimetype
else:
_logger.warning("Could not find converter from '%s' to '%s'", fn_ext[1:], output_format)
return http.send_file(track.path)
data = wrap_file(
request.httprequest.environ, generator, buffer_size=Transcoder.buffer_size * 1024
)
return Response(data, mimetype=mimetype, direct_passthrough=True)
@http.route(
["/rest/download.view", "/rest/download"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def download(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
trackId = kwargs.get("id")
if trackId:
track = request.env["oomusic.track"].browse([int(trackId)])
if not track.exists():
return rest.make_error(code="70", message="Song not found")
else:
return rest.make_error(code="10", message='Required int parameter "id" is not present')
return http.send_file(track.path, as_attachment=True)
@http.route(
["/rest/hls.view", "/rest/hls"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def hls(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
return rest.make_error(code="30", message="Feature not supported by server.")
@http.route(
["/rest/getCaptions.view", "/rest/getCaptions"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def getCaptions(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
return rest.make_error(code="30", message="Feature not supported by server.")
@http.route(
["/rest/getCoverArt.view", "/rest/getCoverArt"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def getCoverArt(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
folderId = kwargs.get("id")
if folderId:
try:
found = False
if "al-" in folderId:
folder = request.env["oomusic.album"].browse([int(folderId.split("-")[-1])])
else:
folder = request.env["oomusic.folder"].browse([int(folderId.split("-")[-1])])
if folder.exists():
try:
folder.check_access_rights("read")
folder.check_access_rule("read")
found = True
except AccessError:
pass
if not found:
track = request.env["oomusic.track"].browse([int(folderId.split("-")[-1])])
if track.exists():
try:
track.check_access_rights("read")
track.check_access_rule("read")
folder = track.folder_id
found = True
except AccessError:
pass
if not found:
return rest.make_error(code="70", message="Cover art not found")
except:
_logger.warning(
"An error occurred while searching for folderId %s", folderId, exc_info=True
)
folder = request.env["oomusic.folder"]
else:
return rest.make_error(code="10", message='Required int parameter "id" is not present')
# Only retrieve big image if necessary
image_cache = "image_big_cache"
image = "image_big"
if "size" in kwargs and int(kwargs["size"]) < 256:
image_cache = "image_medium_cache"
image = "image_medium"
image = folder[image_cache] or folder[image] or b"R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs="
image_stream = BytesIO(base64.b64decode(image))
image_ext = "." + (imghdr.what(image_stream) or "png")
return http.send_file(image_stream, filename=folderId + image_ext)
@http.route(
["/rest/getLyrics.view", "/rest/getLyrics"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def getLyrics(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
artist = kwargs.get("artist")
title = kwargs.get("title")
root = etree.Element("subsonic-response", status="ok", version=rest.version_server)
xml_lyrics = rest.make_Lyrics(artist, title)
root.append(xml_lyrics)
return rest.make_response(root)
@http.route(
["/rest/getAvatar.view", "/rest/getAvatar"],
type="http",
auth="public",
csrf=False,
methods=["GET", "POST"],
)
def getAvatar(self, **kwargs):
rest = SubsonicREST(kwargs)
success, response = rest.check_login()
if not success:
return response
username = kwargs.get("username")
if not username:
return rest.make_error(
code="10", message='Required str parameter "username" is not present'
)
user = request.env["res.users"].search([("login", "=", username)])
image = user.partner_id.image_512 or b"R0lGODlhAQABAAD/ACwAAAAAAQABAAACADs="
image_stream = BytesIO(base64.b64decode(image))
image_ext = "." + (imghdr.what(image_stream) or "png")
return http.send_file(image_stream, filename=str(user.id) + image_ext)
| mit |
kaiix/depot_tools | tests/git_common_test.py | 10 | 22700 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for git_common.py"""
import binascii
import collections
import os
import signal
import sys
import tempfile
import time
import unittest
DEPOT_TOOLS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, DEPOT_TOOLS_ROOT)
from testing_support import coverage_utils
from testing_support import git_test_utils
class GitCommonTestBase(unittest.TestCase):
@classmethod
def setUpClass(cls):
super(GitCommonTestBase, cls).setUpClass()
import git_common
cls.gc = git_common
cls.gc.TEST_MODE = True
class Support(GitCommonTestBase):
def _testMemoizeOneBody(self, threadsafe):
calls = collections.defaultdict(int)
def double_if_even(val):
calls[val] += 1
return val * 2 if val % 2 == 0 else None
# Use this explicitly as a wrapper fn instead of a decorator. Otherwise
# pylint crashes (!!)
double_if_even = self.gc.memoize_one(threadsafe=threadsafe)(double_if_even)
self.assertEqual(4, double_if_even(2))
self.assertEqual(4, double_if_even(2))
self.assertEqual(None, double_if_even(1))
self.assertEqual(None, double_if_even(1))
self.assertDictEqual({1: 2, 2: 1}, calls)
double_if_even.set(10, 20)
self.assertEqual(20, double_if_even(10))
self.assertDictEqual({1: 2, 2: 1}, calls)
double_if_even.clear()
self.assertEqual(4, double_if_even(2))
self.assertEqual(4, double_if_even(2))
self.assertEqual(None, double_if_even(1))
self.assertEqual(None, double_if_even(1))
self.assertEqual(20, double_if_even(10))
self.assertDictEqual({1: 4, 2: 2, 10: 1}, calls)
def testMemoizeOne(self):
self._testMemoizeOneBody(threadsafe=False)
def testMemoizeOneThreadsafe(self):
self._testMemoizeOneBody(threadsafe=True)
def testOnce(self):
testlist = []
# This works around a bug in pylint
once = self.gc.once
@once
def add_to_list():
testlist.append('dog')
add_to_list()
add_to_list()
add_to_list()
add_to_list()
self.assertEquals(testlist, ['dog'])
def slow_square(i):
"""Helper for ScopedPoolTest.
Must be global because non top-level functions aren't pickleable.
"""
return i ** 2
class ScopedPoolTest(GitCommonTestBase):
CTRL_C = signal.CTRL_C_EVENT if sys.platform == 'win32' else signal.SIGINT
def testThreads(self):
result = []
with self.gc.ScopedPool(kind='threads') as pool:
result = list(pool.imap(slow_square, xrange(10)))
self.assertEqual([0, 1, 4, 9, 16, 25, 36, 49, 64, 81], result)
def testThreadsCtrlC(self):
result = []
with self.assertRaises(KeyboardInterrupt):
with self.gc.ScopedPool(kind='threads') as pool:
# Make sure this pool is interrupted in mid-swing
for i in pool.imap(slow_square, xrange(20)):
if i > 32:
os.kill(os.getpid(), self.CTRL_C)
result.append(i)
self.assertEqual([0, 1, 4, 9, 16, 25], result)
def testProcs(self):
result = []
with self.gc.ScopedPool() as pool:
result = list(pool.imap(slow_square, xrange(10)))
self.assertEqual([0, 1, 4, 9, 16, 25, 36, 49, 64, 81], result)
def testProcsCtrlC(self):
result = []
with self.assertRaises(KeyboardInterrupt):
with self.gc.ScopedPool() as pool:
# Make sure this pool is interrupted in mid-swing
for i in pool.imap(slow_square, xrange(20)):
if i > 32:
os.kill(os.getpid(), self.CTRL_C)
result.append(i)
self.assertEqual([0, 1, 4, 9, 16, 25], result)
class ProgressPrinterTest(GitCommonTestBase):
class FakeStream(object):
def __init__(self):
self.data = set()
self.count = 0
def write(self, line):
self.data.add(line)
def flush(self):
self.count += 1
@unittest.expectedFailure
def testBasic(self):
"""This test is probably racy, but I don't have a better alternative."""
fmt = '%(count)d/10'
stream = self.FakeStream()
pp = self.gc.ProgressPrinter(fmt, enabled=True, fout=stream, period=0.01)
with pp as inc:
for _ in xrange(10):
time.sleep(0.02)
inc()
filtered = {x.strip() for x in stream.data}
rslt = {fmt % {'count': i} for i in xrange(11)}
self.assertSetEqual(filtered, rslt)
self.assertGreaterEqual(stream.count, 10)
class GitReadOnlyFunctionsTest(git_test_utils.GitRepoReadOnlyTestBase,
GitCommonTestBase):
REPO_SCHEMA = """
A B C D
B E D
"""
COMMIT_A = {
'some/files/file1': {'data': 'file1'},
'some/files/file2': {'data': 'file2'},
'some/files/file3': {'data': 'file3'},
'some/other/file': {'data': 'otherfile'},
}
COMMIT_C = {
'some/files/file2': {
'mode': 0755,
'data': 'file2 - vanilla'},
}
COMMIT_E = {
'some/files/file2': {'data': 'file2 - merged'},
}
COMMIT_D = {
'some/files/file2': {'data': 'file2 - vanilla\nfile2 - merged'},
}
def testHashes(self):
ret = self.repo.run(
self.gc.hash_multi, *[
'master',
'master~3',
self.repo['E']+'~',
self.repo['D']+'^2',
'tag_C^{}',
]
)
self.assertEqual([
self.repo['D'],
self.repo['A'],
self.repo['B'],
self.repo['E'],
self.repo['C'],
], ret)
self.assertEquals(
self.repo.run(self.gc.hash_one, 'branch_D'),
self.repo['D']
)
self.assertTrue(self.repo['D'].startswith(
self.repo.run(self.gc.hash_one, 'branch_D', short=True)))
def testStream(self):
items = set(self.repo.commit_map.itervalues())
def testfn():
for line in self.gc.run_stream('log', '--format=%H').xreadlines():
line = line.strip()
self.assertIn(line, items)
items.remove(line)
self.repo.run(testfn)
def testStreamWithRetcode(self):
items = set(self.repo.commit_map.itervalues())
def testfn():
with self.gc.run_stream_with_retcode('log', '--format=%H') as stdout:
for line in stdout.xreadlines():
line = line.strip()
self.assertIn(line, items)
items.remove(line)
self.repo.run(testfn)
def testStreamWithRetcodeException(self):
import subprocess2
with self.assertRaises(subprocess2.CalledProcessError):
with self.gc.run_stream_with_retcode('checkout', 'unknown-branch'):
pass
def testCurrentBranch(self):
def cur_branch_out_of_git():
os.chdir('..')
return self.gc.current_branch()
self.assertIsNone(self.repo.run(cur_branch_out_of_git))
self.repo.git('checkout', 'branch_D')
self.assertEqual(self.repo.run(self.gc.current_branch), 'branch_D')
def testBranches(self):
# This check fails with git 2.4 (see crbug.com/487172)
self.assertEqual(self.repo.run(set, self.gc.branches()),
{'master', 'branch_D', 'root_A'})
def testDormant(self):
self.assertFalse(self.repo.run(self.gc.is_dormant, 'master'))
self.repo.git('config', 'branch.master.dormant', 'true')
self.assertTrue(self.repo.run(self.gc.is_dormant, 'master'))
def testParseCommitrefs(self):
ret = self.repo.run(
self.gc.parse_commitrefs, *[
'master',
'master~3',
self.repo['E']+'~',
self.repo['D']+'^2',
'tag_C^{}',
]
)
self.assertEqual(ret, map(binascii.unhexlify, [
self.repo['D'],
self.repo['A'],
self.repo['B'],
self.repo['E'],
self.repo['C'],
]))
with self.assertRaisesRegexp(Exception, r"one of \('master', 'bananas'\)"):
self.repo.run(self.gc.parse_commitrefs, 'master', 'bananas')
def testTags(self):
self.assertEqual(set(self.repo.run(self.gc.tags)),
{'tag_'+l for l in 'ABCDE'})
def testTree(self):
tree = self.repo.run(self.gc.tree, 'master:some/files')
file1 = self.COMMIT_A['some/files/file1']['data']
file2 = self.COMMIT_D['some/files/file2']['data']
file3 = self.COMMIT_A['some/files/file3']['data']
self.assertEquals(
tree['file1'],
('100644', 'blob', git_test_utils.git_hash_data(file1)))
self.assertEquals(
tree['file2'],
('100755', 'blob', git_test_utils.git_hash_data(file2)))
self.assertEquals(
tree['file3'],
('100644', 'blob', git_test_utils.git_hash_data(file3)))
tree = self.repo.run(self.gc.tree, 'master:some')
self.assertEquals(len(tree), 2)
# Don't check the tree hash because we're lazy :)
self.assertEquals(tree['files'][:2], ('040000', 'tree'))
tree = self.repo.run(self.gc.tree, 'master:wat')
self.assertEqual(tree, None)
def testTreeRecursive(self):
tree = self.repo.run(self.gc.tree, 'master:some', recurse=True)
file1 = self.COMMIT_A['some/files/file1']['data']
file2 = self.COMMIT_D['some/files/file2']['data']
file3 = self.COMMIT_A['some/files/file3']['data']
other = self.COMMIT_A['some/other/file']['data']
self.assertEquals(
tree['files/file1'],
('100644', 'blob', git_test_utils.git_hash_data(file1)))
self.assertEquals(
tree['files/file2'],
('100755', 'blob', git_test_utils.git_hash_data(file2)))
self.assertEquals(
tree['files/file3'],
('100644', 'blob', git_test_utils.git_hash_data(file3)))
self.assertEquals(
tree['other/file'],
('100644', 'blob', git_test_utils.git_hash_data(other)))
class GitMutableFunctionsTest(git_test_utils.GitRepoReadWriteTestBase,
GitCommonTestBase):
REPO_SCHEMA = ''
def _intern_data(self, data):
with tempfile.TemporaryFile() as f:
f.write(data)
f.seek(0)
return self.repo.run(self.gc.intern_f, f)
def testInternF(self):
data = 'CoolBobcatsBro'
data_hash = self._intern_data(data)
self.assertEquals(git_test_utils.git_hash_data(data), data_hash)
self.assertEquals(data, self.repo.git('cat-file', 'blob', data_hash).stdout)
def testMkTree(self):
tree = {}
for i in 1, 2, 3:
name = 'file%d' % i
tree[name] = ('100644', 'blob', self._intern_data(name))
tree_hash = self.repo.run(self.gc.mktree, tree)
self.assertEquals('37b61866d6e061c4ba478e7eb525be7b5752737d', tree_hash)
def testConfig(self):
self.repo.git('config', '--add', 'happy.derpies', 'food')
self.assertEquals(self.repo.run(self.gc.config_list, 'happy.derpies'),
['food'])
self.assertEquals(self.repo.run(self.gc.config_list, 'sad.derpies'), [])
self.repo.git('config', '--add', 'happy.derpies', 'cat')
self.assertEquals(self.repo.run(self.gc.config_list, 'happy.derpies'),
['food', 'cat'])
self.assertEquals('cat', self.repo.run(self.gc.config, 'dude.bob', 'cat'))
self.repo.run(self.gc.set_config, 'dude.bob', 'dog')
self.assertEquals('dog', self.repo.run(self.gc.config, 'dude.bob', 'cat'))
self.repo.run(self.gc.del_config, 'dude.bob')
# This should work without raising an exception
self.repo.run(self.gc.del_config, 'dude.bob')
self.assertEquals('cat', self.repo.run(self.gc.config, 'dude.bob', 'cat'))
self.assertEquals('origin/master', self.repo.run(self.gc.root))
self.repo.git('config', 'depot-tools.upstream', 'catfood')
self.assertEquals('catfood', self.repo.run(self.gc.root))
def testUpstream(self):
self.repo.git('commit', '--allow-empty', '-am', 'foooooo')
self.assertEquals(self.repo.run(self.gc.upstream, 'bobly'), None)
self.assertEquals(self.repo.run(self.gc.upstream, 'master'), None)
self.repo.git('checkout', '-tb', 'happybranch', 'master')
self.assertEquals(self.repo.run(self.gc.upstream, 'happybranch'),
'master')
def testNormalizedVersion(self):
self.assertTrue(all(
isinstance(x, int) for x in self.repo.run(self.gc.get_git_version)))
def testGetBranchesInfo(self):
self.repo.git('commit', '--allow-empty', '-am', 'foooooo')
self.repo.git('checkout', '-tb', 'happybranch', 'master')
self.repo.git('commit', '--allow-empty', '-am', 'foooooo')
self.repo.git('checkout', '-tb', 'child', 'happybranch')
self.repo.git('checkout', '-tb', 'to_delete', 'master')
self.repo.git('checkout', '-tb', 'parent_gone', 'to_delete')
self.repo.git('branch', '-D', 'to_delete')
supports_track = (
self.repo.run(self.gc.get_git_version)
>= self.gc.MIN_UPSTREAM_TRACK_GIT_VERSION)
actual = self.repo.run(self.gc.get_branches_info, supports_track)
expected = {
'happybranch': (
self.repo.run(self.gc.hash_one, 'happybranch', short=True),
'master',
1 if supports_track else None,
None
),
'child': (
self.repo.run(self.gc.hash_one, 'child', short=True),
'happybranch',
None,
None
),
'master': (
self.repo.run(self.gc.hash_one, 'master', short=True),
'',
None,
None
),
'': None,
'parent_gone': (
self.repo.run(self.gc.hash_one, 'parent_gone', short=True),
'to_delete',
None,
None
),
'to_delete': None
}
self.assertEquals(expected, actual)
class GitMutableStructuredTest(git_test_utils.GitRepoReadWriteTestBase,
GitCommonTestBase):
REPO_SCHEMA = """
A B C D E F G
B H I J K
J L
X Y Z
CAT DOG
"""
COMMIT_B = {'file': {'data': 'B'}}
COMMIT_H = {'file': {'data': 'H'}}
COMMIT_I = {'file': {'data': 'I'}}
COMMIT_J = {'file': {'data': 'J'}}
COMMIT_K = {'file': {'data': 'K'}}
COMMIT_L = {'file': {'data': 'L'}}
def setUp(self):
super(GitMutableStructuredTest, self).setUp()
self.repo.git('branch', '--set-upstream-to', 'root_X', 'branch_Z')
self.repo.git('branch', '--set-upstream-to', 'branch_G', 'branch_K')
self.repo.git('branch', '--set-upstream-to', 'branch_K', 'branch_L')
self.repo.git('branch', '--set-upstream-to', 'root_A', 'branch_G')
self.repo.git('branch', '--set-upstream-to', 'root_X', 'root_A')
def testTooManyBranches(self):
for i in xrange(30):
self.repo.git('branch', 'a'*i)
_, rslt = self.repo.capture_stdio(list, self.gc.branches())
self.assertIn('too many branches (39/20)', rslt)
self.repo.git('config', 'depot-tools.branch-limit', 'cat')
_, rslt = self.repo.capture_stdio(list, self.gc.branches())
self.assertIn('too many branches (39/20)', rslt)
self.repo.git('config', 'depot-tools.branch-limit', '100')
# should not raise
# This check fails with git 2.4 (see crbug.com/487172)
self.assertEqual(38, len(self.repo.run(list, self.gc.branches())))
def testMergeBase(self):
self.repo.git('checkout', 'branch_K')
self.assertEqual(
self.repo['B'],
self.repo.run(self.gc.get_or_create_merge_base, 'branch_K', 'branch_G')
)
self.assertEqual(
self.repo['J'],
self.repo.run(self.gc.get_or_create_merge_base, 'branch_L', 'branch_K')
)
self.assertEqual(
self.repo['B'], self.repo.run(self.gc.config, 'branch.branch_K.base')
)
self.assertEqual(
'branch_G', self.repo.run(self.gc.config, 'branch.branch_K.base-upstream')
)
# deadbeef is a bad hash, so this will result in repo['B']
self.repo.run(self.gc.manual_merge_base, 'branch_K', 'deadbeef', 'branch_G')
self.assertEqual(
self.repo['B'],
self.repo.run(self.gc.get_or_create_merge_base, 'branch_K', 'branch_G')
)
# but if we pick a real ancestor, then it'll work
self.repo.run(self.gc.manual_merge_base, 'branch_K', self.repo['I'],
'branch_G')
self.assertEqual(
self.repo['I'],
self.repo.run(self.gc.get_or_create_merge_base, 'branch_K', 'branch_G')
)
self.assertEqual({'branch_K': self.repo['I'], 'branch_L': self.repo['J']},
self.repo.run(self.gc.branch_config_map, 'base'))
self.repo.run(self.gc.remove_merge_base, 'branch_K')
self.repo.run(self.gc.remove_merge_base, 'branch_L')
self.assertEqual(None,
self.repo.run(self.gc.config, 'branch.branch_K.base'))
self.assertEqual({}, self.repo.run(self.gc.branch_config_map, 'base'))
# if it's too old, then it caps at merge-base
self.repo.run(self.gc.manual_merge_base, 'branch_K', self.repo['A'],
'branch_G')
self.assertEqual(
self.repo['B'],
self.repo.run(self.gc.get_or_create_merge_base, 'branch_K', 'branch_G')
)
# If the user does --set-upstream-to something else, then we discard the
# base and recompute it.
self.repo.run(self.gc.run, 'branch', '-u', 'root_A')
self.assertEqual(
self.repo['A'],
self.repo.run(self.gc.get_or_create_merge_base, 'branch_K')
)
self.assertIsNone(
self.repo.run(self.gc.get_or_create_merge_base, 'branch_DOG'))
def testGetBranchTree(self):
skipped, tree = self.repo.run(self.gc.get_branch_tree)
# This check fails with git 2.4 (see crbug.com/487172)
self.assertEqual(skipped, {'master', 'root_X', 'branch_DOG', 'root_CAT'})
self.assertEqual(tree, {
'branch_G': 'root_A',
'root_A': 'root_X',
'branch_K': 'branch_G',
'branch_L': 'branch_K',
'branch_Z': 'root_X'
})
topdown = list(self.gc.topo_iter(tree))
bottomup = list(self.gc.topo_iter(tree, top_down=False))
self.assertEqual(topdown, [
('branch_Z', 'root_X'),
('root_A', 'root_X'),
('branch_G', 'root_A'),
('branch_K', 'branch_G'),
('branch_L', 'branch_K'),
])
self.assertEqual(bottomup, [
('branch_L', 'branch_K'),
('branch_Z', 'root_X'),
('branch_K', 'branch_G'),
('branch_G', 'root_A'),
('root_A', 'root_X'),
])
def testIsGitTreeDirty(self):
self.assertEquals(False, self.repo.run(self.gc.is_dirty_git_tree, 'foo'))
self.repo.open('test.file', 'w').write('test data')
self.repo.git('add', 'test.file')
self.assertEquals(True, self.repo.run(self.gc.is_dirty_git_tree, 'foo'))
def testSquashBranch(self):
self.repo.git('checkout', 'branch_K')
self.assertEquals(True, self.repo.run(self.gc.squash_current_branch,
'cool message'))
lines = ['cool message', '']
for l in 'HIJK':
lines.extend((self.repo[l], l, ''))
lines.pop()
msg = '\n'.join(lines)
self.assertEquals(self.repo.run(self.gc.run, 'log', '-n1', '--format=%B'),
msg)
self.assertEquals(
self.repo.git('cat-file', 'blob', 'branch_K:file').stdout,
'K'
)
def testSquashBranchEmpty(self):
self.repo.git('checkout', 'branch_K')
self.repo.git('checkout', 'branch_G', '.')
self.repo.git('commit', '-m', 'revert all changes no branch')
# Should return False since the quash would result in an empty commit
stdout = self.repo.capture_stdio(self.gc.squash_current_branch)[0]
self.assertEquals(stdout, 'Nothing to commit; squashed branch is empty\n')
def testRebase(self):
self.assertSchema("""
A B C D E F G
B H I J K
J L
X Y Z
CAT DOG
""")
rslt = self.repo.run(
self.gc.rebase, 'branch_G', 'branch_K~4', 'branch_K')
self.assertTrue(rslt.success)
self.assertSchema("""
A B C D E F G H I J K
B H I J L
X Y Z
CAT DOG
""")
rslt = self.repo.run(
self.gc.rebase, 'branch_K', 'branch_L~1', 'branch_L', abort=True)
self.assertFalse(rslt.success)
self.assertFalse(self.repo.run(self.gc.in_rebase))
rslt = self.repo.run(
self.gc.rebase, 'branch_K', 'branch_L~1', 'branch_L', abort=False)
self.assertFalse(rslt.success)
self.assertTrue(self.repo.run(self.gc.in_rebase))
self.assertEqual(self.repo.git('status', '--porcelain').stdout, 'UU file\n')
self.repo.git('checkout', '--theirs', 'file')
self.repo.git('add', 'file')
self.repo.git('rebase', '--continue')
self.assertSchema("""
A B C D E F G H I J K L
X Y Z
CAT DOG
""")
class GitFreezeThaw(git_test_utils.GitRepoReadWriteTestBase):
@classmethod
def setUpClass(cls):
super(GitFreezeThaw, cls).setUpClass()
import git_common
cls.gc = git_common
cls.gc.TEST_MODE = True
REPO_SCHEMA = """
A B C D
B E D
"""
COMMIT_A = {
'some/files/file1': {'data': 'file1'},
'some/files/file2': {'data': 'file2'},
'some/files/file3': {'data': 'file3'},
'some/other/file': {'data': 'otherfile'},
}
COMMIT_C = {
'some/files/file2': {
'mode': 0755,
'data': 'file2 - vanilla'},
}
COMMIT_E = {
'some/files/file2': {'data': 'file2 - merged'},
}
COMMIT_D = {
'some/files/file2': {'data': 'file2 - vanilla\nfile2 - merged'},
}
def testNothing(self):
self.assertIsNotNone(self.repo.run(self.gc.thaw)) # 'Nothing to thaw'
self.assertIsNotNone(self.repo.run(self.gc.freeze)) # 'Nothing to freeze'
def testAll(self):
def inner():
with open('some/files/file2', 'a') as f2:
print >> f2, 'cool appended line'
os.mkdir('some/other_files')
with open('some/other_files/subdir_file', 'w') as f3:
print >> f3, 'new file!'
with open('some/files/file5', 'w') as f5:
print >> f5, 'New file!1!one!'
STATUS_1 = '\n'.join((
' M some/files/file2',
'A some/files/file5',
'?? some/other_files/'
)) + '\n'
self.repo.git('add', 'some/files/file5')
# Freeze group 1
self.assertEquals(self.repo.git('status', '--porcelain').stdout, STATUS_1)
self.assertIsNone(self.gc.freeze())
self.assertEquals(self.repo.git('status', '--porcelain').stdout, '')
# Freeze group 2
with open('some/files/file2', 'a') as f2:
print >> f2, 'new! appended line!'
self.assertEquals(self.repo.git('status', '--porcelain').stdout,
' M some/files/file2\n')
self.assertIsNone(self.gc.freeze())
self.assertEquals(self.repo.git('status', '--porcelain').stdout, '')
# Thaw it out!
self.assertIsNone(self.gc.thaw())
self.assertIsNotNone(self.gc.thaw()) # One thaw should thaw everything
self.assertEquals(self.repo.git('status', '--porcelain').stdout, STATUS_1)
self.repo.run(inner)
if __name__ == '__main__':
sys.exit(coverage_utils.covered_main(
os.path.join(DEPOT_TOOLS_ROOT, 'git_common.py')))
| bsd-3-clause |
placiano/NBKernel_NK4 | scripts/build-all.py | 24 | 11569 | #! /usr/bin/env python
# Copyright (c) 2009-2014, The Linux Foundation. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Linux Foundation nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Build the kernel for all targets using the Android build environment.
import glob
from optparse import OptionParser
import os
import re
import shutil
import subprocess
import sys
version = 'build-all.py, version 1.99'
build_dir = '../all-kernels'
make_command = ["vmlinux", "modules", "dtbs"]
all_options = {}
compile64 = os.environ.get('CROSS_COMPILE64')
def error(msg):
sys.stderr.write("error: %s\n" % msg)
def fail(msg):
"""Fail with a user-printed message"""
error(msg)
sys.exit(1)
if not os.environ.get('CROSS_COMPILE'):
fail("CROSS_COMPILE must be set in the environment")
def check_kernel():
"""Ensure that PWD is a kernel directory"""
if (not os.path.isfile('MAINTAINERS') or
not os.path.isfile('arch/arm/mach-msm/Kconfig')):
fail("This doesn't seem to be an MSM kernel dir")
def check_build():
"""Ensure that the build directory is present."""
if not os.path.isdir(build_dir):
try:
os.makedirs(build_dir)
except OSError as exc:
if exc.errno == errno.EEXIST:
pass
else:
raise
failed_targets = []
class LogRunner:
def __init__(self, logname, make_env):
self.logname = logname
self.fd = open(logname, 'w')
self.make_env = make_env
def run(self, args):
devnull = open('/dev/null', 'r')
proc = subprocess.Popen(args, stdin=devnull,
env=self.make_env,
bufsize=0,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
count = 0
# for line in proc.stdout:
rawfd = proc.stdout.fileno()
while True:
line = os.read(rawfd, 1024)
if not line:
break
self.fd.write(line)
self.fd.flush()
if all_options.verbose:
sys.stdout.write(line)
sys.stdout.flush()
else:
for i in range(line.count('\n')):
count += 1
if count == 64:
count = 0
print
sys.stdout.write('.')
sys.stdout.flush()
print
result = proc.wait()
self.fd.flush()
return result
class Builder():
def __init__(self, name, defconfig):
self.name = name
self.defconfig = defconfig
self.confname = self.defconfig.split('/')[-1]
# Determine if this is a 64-bit target based on the location
# of the defconfig.
self.make_env = os.environ.copy()
if "/arm64/" in defconfig:
if compile64:
self.make_env['CROSS_COMPILE'] = compile64
else:
fail("Attempting to build 64-bit, without setting CROSS_COMPILE64")
self.make_env['ARCH'] = 'arm64'
else:
self.make_env['ARCH'] = 'arm'
self.make_env['KCONFIG_NOTIMESTAMP'] = 'true'
def build(self):
dest_dir = os.path.join(build_dir, self.name)
log_name = "%s/log-%s.log" % (build_dir, self.name)
print 'Building %s in %s log %s' % (self.name, dest_dir, log_name)
if not os.path.isdir(dest_dir):
os.mkdir(dest_dir)
defconfig = self.defconfig
dotconfig = '%s/.config' % dest_dir
savedefconfig = '%s/defconfig' % dest_dir
# shutil.copyfile(defconfig, dotconfig) # Not really right.
staging_dir = 'install_staging'
modi_dir = '%s' % staging_dir
hdri_dir = '%s/usr' % staging_dir
shutil.rmtree(os.path.join(dest_dir, staging_dir), ignore_errors=True)
with open('/dev/null', 'r') as devnull:
subprocess.check_call(['make', 'O=%s' % dest_dir,
'SELINUX_DEFCONFIG=selinux_defconfig',
'SELINUX_LOG_DEFCONFIG=selinux_log_defconfig',
'TIMA_DEFCONFIG=tima_defconfig',
self.confname], env=self.make_env,
stdin=devnull)
if not all_options.updateconfigs:
# Build targets can be dependent upon the completion of
# previous build targets, so build them one at a time.
cmd_line = ['make',
'INSTALL_HDR_PATH=%s' % hdri_dir,
'INSTALL_MOD_PATH=%s' % modi_dir,
'O=%s' % dest_dir]
build_targets = []
for c in make_command:
if re.match(r'^-{1,2}\w', c):
cmd_line.append(c)
else:
build_targets.append(c)
build = LogRunner(log_name, self.make_env)
for t in build_targets:
result = build.run(cmd_line + [t])
if result != 0:
if all_options.keep_going:
failed_targets.append(target)
fail_or_error = error
else:
fail_or_error = fail
fail_or_error("Failed to build %s, see %s" %
(t, build.logname))
# Copy the defconfig back.
if all_options.configs or all_options.updateconfigs:
with open('/dev/null', 'r') as devnull:
subprocess.check_call(['make', 'O=%s' % dest_dir,
'savedefconfig'], env=self.make_env, stdin=devnull)
shutil.copyfile(savedefconfig, defconfig)
def update_config(file, str):
print 'Updating %s with \'%s\'\n' % (file, str)
with open(file, 'a') as defconfig:
defconfig.write(str + '\n')
def scan_configs():
"""Get the full list of defconfigs appropriate for this tree."""
names = []
arch_pats = (
r'[fm]sm[0-9]*_defconfig',
r'apq*_defconfig',
r'qsd*_defconfig',
r'mdm*_defconfig',
r'mpq*_defconfig',
)
arch64_pats = (
r'msm_defconfig',
)
for p in arch_pats:
for n in glob.glob('arch/arm/configs/' + p):
name = os.path.basename(n)[:-10]
names.append(Builder(name, n))
if 'CROSS_COMPILE64' in os.environ:
for p in arch64_pats:
for n in glob.glob('arch/arm64/configs/' + p):
name = os.path.basename(n)[:-10] + "-64"
names.append(Builder(name, n))
return names
def build_many(targets):
print "Building %d target(s)" % len(targets)
for target in targets:
if all_options.updateconfigs:
update_config(target.defconfig, all_options.updateconfigs)
target.build()
if failed_targets:
fail("\n ".join(["Failed targets:"] +
[target.name for target in failed_targets]))
def main():
global make_command
check_kernel()
check_build()
configs = scan_configs()
usage = ("""
%prog [options] all -- Build all targets
%prog [options] target target ... -- List specific targets
%prog [options] perf -- Build all perf targets
%prog [options] noperf -- Build all non-perf targets""")
parser = OptionParser(usage=usage, version=version)
parser.add_option('--configs', action='store_true',
dest='configs',
help="Copy configs back into tree")
parser.add_option('--list', action='store_true',
dest='list',
help='List available targets')
parser.add_option('-v', '--verbose', action='store_true',
dest='verbose',
help='Output to stdout in addition to log file')
parser.add_option('--oldconfig', action='store_true',
dest='oldconfig',
help='Only process "make oldconfig"')
parser.add_option('--updateconfigs',
dest='updateconfigs',
help="Update defconfigs with provided option setting, "
"e.g. --updateconfigs=\'CONFIG_USE_THING=y\'")
parser.add_option('-j', '--jobs', type='int', dest="jobs",
help="Number of simultaneous jobs")
parser.add_option('-l', '--load-average', type='int',
dest='load_average',
help="Don't start multiple jobs unless load is below LOAD_AVERAGE")
parser.add_option('-k', '--keep-going', action='store_true',
dest='keep_going', default=False,
help="Keep building other targets if a target fails")
parser.add_option('-m', '--make-target', action='append',
help='Build the indicated make target (default: %s)' %
' '.join(make_command))
(options, args) = parser.parse_args()
global all_options
all_options = options
if options.list:
print "Available targets:"
for target in configs:
print " %s" % target.name
sys.exit(0)
if options.oldconfig:
make_command = ["oldconfig"]
elif options.make_target:
make_command = options.make_target
if options.jobs:
make_command.append("-j%d" % options.jobs)
if options.load_average:
make_command.append("-l%d" % options.load_average)
if args == ['all']:
build_many(configs)
elif args == ['perf']:
targets = []
for t in configs:
if "perf" in t.name:
targets.append(t)
build_many(targets)
elif args == ['noperf']:
targets = []
for t in configs:
if "perf" not in t.name:
targets.append(t)
build_many(targets)
elif len(args) > 0:
all_configs = {}
for t in configs:
all_configs[t.name] = t
targets = []
for t in args:
if t not in all_configs:
parser.error("Target '%s' not one of %s" % (t, all_configs.keys()))
targets.append(all_configs[t])
build_many(targets)
else:
parser.error("Must specify a target to build, or 'all'")
if __name__ == "__main__":
main()
| gpl-2.0 |
SIDN/spin | scripts/spin_jrpc_client.py | 1 | 2942 | #!/usr/bin/env python3
#
# Very basic json-rpc client for SPIN WEB API
#
# Usage: spin_jrpc_client.py <command> [parameter]
import argparse
import json
import sys
import random
import requests
SPIN_WEB_URI = "192.168.8.1:8080/spin_api/jsonrpc"
class JsonRPCClient(object):
def __init__(self, command, params, uri, verbose=False):
self.command = command
self.params = params
self.uri = uri
self.verbose = verbose
def vprint(self, msg):
if self.verbose:
print(msg)
def build_json_command(self):
result = {
'jsonrpc': '2.0',
'id': random.randint(1, 65535),
'method': self.command
}
# Parameters come in two's:
# <parameter name> <parameter value>
if self.params:
result['params'] = {}
for i in range(0, len(self.params), 2):
# value may be a json value, but we'll quote it if it
# is a string
try:
param_value = json.loads(self.params[i+1])
except json.decoder.JSONDecodeError:
param_value = json.loads('"%s"' % self.params[i+1])
result['params'][self.params[i]] = param_value
return result
def send(self, json_cmd):
response = requests.post(url = self.uri, json=json_cmd)
self.vprint("Return code: %d" % response.status_code)
self.vprint("Raw response content: %s" % response.content.decode("utf-8"))
return response.json()
def run(self):
json_cmd = self.build_json_command()
self.vprint("JSON Command: %s" % json_cmd)
result = self.send(json_cmd)
# TODO: check id?
if 'error' in result:
print("Error from server!")
print("Error code: %d" % result['error']['code'])
print("Error message: %s" % result['error']['message'])
elif 'result' in result:
print(json.dumps(result['result'], indent=2))
else:
print(json.dumps(result, indent=2))
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(prog="spin_jrpc_client.py")
arg_parser.add_argument('-u', '--uri', default='http://192.168.8.1/spin_api/jsonrpc',
help='base URI of the JSON-RPC web api endpoint')
arg_parser.add_argument('-v', '--verbose', action="store_true", help="be verbose")
arg_parser.add_argument('command', help='name of the rpc command')
arg_parser.add_argument('params', nargs='*', help='command parameters; name, value pairs as separate arguments')
args = arg_parser.parse_args()
if len(args.params) % 2 != 0:
sys.stderr.write("Error: method parameters must be parameter_name, parameter value pairs, as separate arguments\n")
sys.exit(1)
client = JsonRPCClient(args.command, args.params, args.uri, args.verbose)
client.run()
| gpl-2.0 |
grevutiu-gabriel/thumbor | vows/sentry_error_handler_vows.py | 11 | 3447 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
from pyvows import Vows, expect
from thumbor import __version__
from thumbor.error_handlers.sentry import ErrorHandler
from thumbor.config import Config
from thumbor.context import Context, ServerParameters
class FakeSentry(object):
def __init__(self, dsn):
self.captured_exceptions = []
def captureException(self, exception, *args, **kw):
self.captured_exceptions.append((exception, args, kw))
class FakeRequest(object):
def __init__(self):
self.headers = {
'header1': 'value1',
'Cookie': 'cookie1=value; cookie2=value2;'
}
self.url = "test/"
self.method = "GET"
self.arguments = []
self.body = "body"
self.query = "a=1&b=2"
self.remote_ip = "127.0.0.1"
def full_url(self):
return "http://test/%s" % self.url
class FakeHandler(object):
def __init__(self):
self.request = FakeRequest()
@Vows.batch
class SentryErrorHandlerVows(Vows.Context):
class WhenInvalidConfiguration(Vows.Context):
@Vows.capture_error
def topic(self):
cfg = Config()
ErrorHandler(cfg)
def should_be_error(self, topic):
expect(topic).to_be_an_error()
expect(topic).to_be_an_error_like(RuntimeError)
class WhenErrorOccurs(Vows.Context):
def topic(self):
cfg = Config(SECURITY_KEY='ACME-SEC', SENTRY_DSN_URL="http://sentry-dsn-url")
server = ServerParameters(8889, 'localhost', 'thumbor.conf', None, 'info', None)
server.security_key = 'ACME-SEC'
ctx = Context(server, cfg, None)
client_mock = FakeSentry("FAKE DSN")
handler = ErrorHandler(cfg, client=client_mock)
http_handler = FakeHandler()
handler.handle_error(ctx, http_handler, RuntimeError("Test"))
return client_mock
def should_have_called_client(self, topic):
expect(topic.captured_exceptions).not_to_be_empty()
expect(topic.captured_exceptions).to_length(1)
exception, args, kw = topic.captured_exceptions[0]
expect(exception.__class__.__name__).to_equal("RuntimeError")
expect(kw).to_include('data')
expect(kw).to_include('extra')
data, extra = kw['data'], kw['extra']
expect(extra).to_include('thumbor-version')
expect(extra['thumbor-version']).to_equal(__version__)
expect(extra).to_include('Headers')
expect(extra['Headers']).to_length(2)
expect(extra['Headers']).to_include('Cookie')
expect(extra['Headers']['Cookie']).to_length(2)
expect(data['modules']).not_to_be_empty()
del data['modules']
expect(data).to_be_like({
'sentry.interfaces.Http': {
'url': "http://test/test/",
'method': "GET",
'data': [],
'body': "body",
'query_string': "a=1&b=2"
},
'sentry.interfaces.User': {
'ip': "127.0.0.1",
}
})
| mit |
chenbaihu/grpc | test/core/end2end/gen_build_json.py | 3 | 5372 | #!/usr/bin/python2.7
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Generates the appropriate build.json data for all the end2end tests."""
import simplejson
END2END_FIXTURES = [
'chttp2_fake_security',
'chttp2_fullstack',
'chttp2_fullstack_uds',
'chttp2_simple_ssl_fullstack',
'chttp2_simple_ssl_with_oauth2_fullstack',
'chttp2_socket_pair',
'chttp2_socket_pair_one_byte_at_a_time',
]
END2END_TESTS = [
'cancel_after_accept',
'cancel_after_accept_and_writes_closed',
'cancel_after_invoke',
'cancel_before_invoke',
'cancel_in_a_vacuum',
'census_simple_request',
'disappearing_server',
'early_server_shutdown_finishes_inflight_calls',
'early_server_shutdown_finishes_tags',
'empty_batch',
'graceful_server_shutdown',
'invoke_large_request',
'max_concurrent_streams',
'no_op',
'ping_pong_streaming',
'request_response_with_binary_metadata_and_payload',
'request_response_with_metadata_and_payload',
'request_response_with_payload',
'request_with_large_metadata',
'request_with_payload',
'simple_delayed_request',
'simple_request',
'thread_stress',
'writes_done_hangs_with_pending_read',
'cancel_after_accept_legacy',
'cancel_after_accept_and_writes_closed_legacy',
'cancel_after_invoke_legacy',
'cancel_before_invoke_legacy',
'cancel_in_a_vacuum_legacy',
'census_simple_request_legacy',
'disappearing_server_legacy',
'early_server_shutdown_finishes_inflight_calls_legacy',
'early_server_shutdown_finishes_tags_legacy',
'graceful_server_shutdown_legacy',
'invoke_large_request_legacy',
'max_concurrent_streams_legacy',
'no_op_legacy',
'ping_pong_streaming_legacy',
'request_response_with_binary_metadata_and_payload_legacy',
'request_response_with_metadata_and_payload_legacy',
'request_response_with_payload_legacy',
'request_response_with_trailing_metadata_and_payload_legacy',
'request_with_large_metadata_legacy',
'request_with_payload_legacy',
'simple_delayed_request_legacy',
'simple_request_legacy',
'thread_stress_legacy',
'writes_done_hangs_with_pending_read_legacy',
]
def main():
json = {
'#': 'generated with test/end2end/gen_build_json.py',
'libs': [
{
'name': 'end2end_fixture_%s' % f,
'build': 'private',
'language': 'c',
'secure': True,
'src': ['test/core/end2end/fixtures/%s.c' % f]
}
for f in END2END_FIXTURES] + [
{
'name': 'end2end_test_%s' % t,
'build': 'private',
'language': 'c',
'secure': False,
'src': ['test/core/end2end/tests/%s.c' % t],
'headers': ['test/core/end2end/tests/cancel_test_helpers.h']
}
for t in END2END_TESTS] + [
{
'name': 'end2end_certs',
'build': 'private',
'language': 'c',
'src': [
"test/core/end2end/data/test_root_cert.c",
"test/core/end2end/data/server1_cert.c",
"test/core/end2end/data/server1_key.c"
]
}
],
'targets': [
{
'name': '%s_%s_test' % (f, t),
'build': 'test',
'language': 'c',
'src': [],
'deps': [
'end2end_fixture_%s' % f,
'end2end_test_%s' % t,
'end2end_certs',
'grpc_test_util',
'grpc',
'gpr_test_util',
'gpr'
]
}
for f in END2END_FIXTURES
for t in END2END_TESTS]}
print simplejson.dumps(json, sort_keys=True, indent=2 * ' ')
if __name__ == '__main__':
main()
| bsd-3-clause |
mlavin/django | tests/staticfiles_tests/cases.py | 39 | 4395 | import codecs
import os
import shutil
import tempfile
from django.conf import settings
from django.core.management import call_command
from django.template import Context, Template
from django.test import SimpleTestCase, override_settings
from .settings import TEST_SETTINGS
class BaseStaticFilesMixin:
"""
Test case with a couple utility assertions.
"""
def assertFileContains(self, filepath, text):
self.assertIn(
text,
self._get_file(filepath),
"'%s' not in '%s'" % (text, filepath),
)
def assertFileNotFound(self, filepath):
with self.assertRaises(IOError):
self._get_file(filepath)
def render_template(self, template, **kwargs):
if isinstance(template, str):
template = Template(template)
return template.render(Context(**kwargs)).strip()
def static_template_snippet(self, path, asvar=False):
if asvar:
return "{%% load static from staticfiles %%}{%% static '%s' as var %%}{{ var }}" % path
return "{%% load static from staticfiles %%}{%% static '%s' %%}" % path
def assertStaticRenders(self, path, result, asvar=False, **kwargs):
template = self.static_template_snippet(path, asvar)
self.assertEqual(self.render_template(template, **kwargs), result)
def assertStaticRaises(self, exc, path, result, asvar=False, **kwargs):
with self.assertRaises(exc):
self.assertStaticRenders(path, result, **kwargs)
@override_settings(**TEST_SETTINGS)
class StaticFilesTestCase(BaseStaticFilesMixin, SimpleTestCase):
pass
@override_settings(**TEST_SETTINGS)
class CollectionTestCase(BaseStaticFilesMixin, SimpleTestCase):
"""
Tests shared by all file finding features (collectstatic,
findstatic, and static serve view).
This relies on the asserts defined in BaseStaticFilesTestCase, but
is separated because some test cases need those asserts without
all these tests.
"""
def setUp(self):
super().setUp()
temp_dir = tempfile.mkdtemp()
# Override the STATIC_ROOT for all tests from setUp to tearDown
# rather than as a context manager
self.patched_settings = self.settings(STATIC_ROOT=temp_dir)
self.patched_settings.enable()
self.run_collectstatic()
# Same comment as in runtests.teardown.
self.addCleanup(shutil.rmtree, temp_dir)
def tearDown(self):
self.patched_settings.disable()
super().tearDown()
def run_collectstatic(self, *, verbosity=0, **kwargs):
call_command('collectstatic', interactive=False, verbosity=verbosity,
ignore_patterns=['*.ignoreme'], **kwargs)
def _get_file(self, filepath):
assert filepath, 'filepath is empty.'
filepath = os.path.join(settings.STATIC_ROOT, filepath)
with codecs.open(filepath, "r", "utf-8") as f:
return f.read()
class TestDefaults:
"""
A few standard test cases.
"""
def test_staticfiles_dirs(self):
"""
Can find a file in a STATICFILES_DIRS directory.
"""
self.assertFileContains('test.txt', 'Can we find')
self.assertFileContains(os.path.join('prefix', 'test.txt'), 'Prefix')
def test_staticfiles_dirs_subdir(self):
"""
Can find a file in a subdirectory of a STATICFILES_DIRS
directory.
"""
self.assertFileContains('subdir/test.txt', 'Can we find')
def test_staticfiles_dirs_priority(self):
"""
File in STATICFILES_DIRS has priority over file in app.
"""
self.assertFileContains('test/file.txt', 'STATICFILES_DIRS')
def test_app_files(self):
"""
Can find a file in an app static/ directory.
"""
self.assertFileContains('test/file1.txt', 'file1 in the app dir')
def test_nonascii_filenames(self):
"""
Can find a file with non-ASCII character in an app static/ directory.
"""
self.assertFileContains('test/⊗.txt', '⊗ in the app dir')
def test_camelcase_filenames(self):
"""
Can find a file with capital letters.
"""
self.assertFileContains('test/camelCase.txt', 'camelCase')
def test_filename_with_percent_sign(self):
self.assertFileContains('test/%2F.txt', '%2F content')
| bsd-3-clause |
Maikflow/django_test | lib/python2.7/site-packages/Django-1.7.1-py2.7.egg/django/contrib/gis/db/backends/mysql/operations.py | 73 | 2319 | from django.db.backends.mysql.base import DatabaseOperations
from django.contrib.gis.db.backends.adapter import WKTAdapter
from django.contrib.gis.db.backends.base import BaseSpatialOperations
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = 'django.contrib.gis.db.backends.mysql.compiler'
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
geometry_functions = {
'bbcontains': 'MBRContains', # For consistency w/PostGIS API
'bboverlaps': 'MBROverlaps', # .. ..
'contained': 'MBRWithin', # .. ..
'contains': 'MBRContains',
'disjoint': 'MBRDisjoint',
'equals': 'MBREqual',
'exact': 'MBREqual',
'intersects': 'MBRIntersects',
'overlaps': 'MBROverlaps',
'same_as': 'MBREqual',
'touches': 'MBRTouches',
'within': 'MBRWithin',
}
gis_terms = set(geometry_functions) | set(['isnull'])
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, value, srid):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'expression'):
placeholder = self.get_expression_column(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn):
geo_col, db_type = lvalue
lookup_info = self.geometry_functions.get(lookup_type, False)
if lookup_info:
sql = "%s(%s, %s)" % (lookup_info, geo_col,
self.get_geom_placeholder(value, field.srid))
return sql, []
# TODO: Is this really necessary? MySQL can't handle NULL geometries
# in its spatial indexes anyways.
if lookup_type == 'isnull':
return "%s IS %sNULL" % (geo_col, ('' if value else 'NOT ')), []
raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type))
| gpl-2.0 |
lgarren/spack | var/spack/repos/builtin/packages/mozjs/package.py | 3 | 2737 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mozjs(AutotoolsPackage):
"""SpiderMonkey is Mozilla's JavaScript engine written in C/C++.
It is used in various Mozilla products, including Firefox, and is
available under the MPL2."""
homepage = "https://developer.mozilla.org/en-US/docs/Mozilla/Projects/SpiderMonkey"
version('24.2.0', '5db79c10e049a2dc117a6e6a3bc78a8e',
url="http://ftp.mozilla.org/pub/js/mozjs-24.2.0.tar.bz2")
version('17.0.0', '20b6f8f1140ef6e47daa3b16965c9202',
url="http://ftp.mozilla.org/pub/js/mozjs17.0.0.tar.gz")
version('1.8.5', 'a4574365938222adca0a6bd33329cb32',
url="http://ftp.mozilla.org/pub/js/js185-1.0.0.tar.gz")
depends_on('perl@5.6:', type='build')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('python@2.7.3:2.8', type='build')
depends_on('nspr', when='@:27')
depends_on('libffi@3.0.9:')
depends_on('readline')
depends_on('zlib@1.2.3')
configure_directory = 'js/src'
build_directory = 'js/src/spack-build'
patch('perl-bug.patch')
def configure_args(self):
spec = self.spec
return [
'--enable-readline', # enables readline support in JS shell
'--enable-threadsafe', # enables support for multiple threads
'--enable-system-ffi',
'--with-system-zlib={0}'.format(spec['zlib'].prefix),
'--with-system-nspr',
'--with-nspr-prefix={0}'.format(spec['nspr'].prefix),
]
| lgpl-2.1 |
blueboxgroup/cinder | cinder/tests/zonemanager/test_brcd_fc_zone_client_cli.py | 4 | 12363 | # (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Unit tests for brcd fc zone client cli."""
import mock
from mock import patch
from oslo_concurrency import processutils
from cinder import exception
from cinder.openstack.common import log as logging
from cinder import test
from cinder.zonemanager.drivers.brocade.brcd_fc_zone_client_cli \
import BrcdFCZoneClientCLI
import cinder.zonemanager.drivers.brocade.fc_zone_constants as ZoneConstant
LOG = logging.getLogger(__name__)
nsshow = '20:1a:00:05:1e:e8:e3:29'
switch_data = [' N 011a00;2,3;20:1a:00:05:1e:e8:e3:29;\
20:1a:00:05:1e:e8:e3:29;na',
' Fabric Port Name: 20:1a:00:05:1e:e8:e3:29']
cfgactvshow = ['Effective configuration:\n',
' cfg:\tOpenStack_Cfg\t\n',
' zone:\topenstack50060b0000c26604201900051ee8e329\t\n',
'\t\t50:06:0b:00:00:c2:66:04\n',
'\t\t20:19:00:05:1e:e8:e3:29\n']
active_zoneset = {
'zones': {
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29']},
'active_zone_config': 'OpenStack_Cfg'}
active_zoneset_multiple_zones = {
'zones': {
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29'],
'openstack50060b0000c26602201900051ee8e327':
['50:06:0b:00:00:c2:66:02', '20:19:00:05:1e:e8:e3:27']},
'active_zone_config': 'OpenStack_Cfg'}
new_zone = {'openstack10000012345678902001009876543210':
['10:00:00:12:34:56:78:90', '20:01:00:98:76:54:32:10']}
new_zones = {'openstack10000012345678902001009876543210':
['10:00:00:12:34:56:78:90', '20:01:00:98:76:54:32:10'],
'openstack10000011111111112001001111111111':
['10:00:00:11:11:11:11:11', '20:01:00:11:11:11:11:11']}
zone_names_to_delete = 'openstack50060b0000c26604201900051ee8e329'
supported_firmware = ['Kernel: 2.6', 'Fabric OS: v7.0.1']
unsupported_firmware = ['Fabric OS: v6.2.1']
class TestBrcdFCZoneClientCLI(BrcdFCZoneClientCLI, test.TestCase):
def setUp(self):
super(TestBrcdFCZoneClientCLI, self).setUp()
# override some of the functions
def __init__(self, *args, **kwargs):
test.TestCase.__init__(self, *args, **kwargs)
@patch.object(BrcdFCZoneClientCLI, '_get_switch_info')
def test_get_active_zone_set(self, get_switch_info_mock):
cmd_list = [ZoneConstant.GET_ACTIVE_ZONE_CFG]
get_switch_info_mock.return_value = cfgactvshow
active_zoneset_returned = self.get_active_zone_set()
get_switch_info_mock.assert_called_once_with(cmd_list)
self.assertDictMatch(active_zoneset_returned, active_zoneset)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test_get_active_zone_set_ssh_error(self, run_ssh_mock):
run_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.BrocadeZoningCliException,
self.get_active_zone_set)
@mock.patch.object(BrcdFCZoneClientCLI, 'get_active_zone_set')
@mock.patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@mock.patch.object(BrcdFCZoneClientCLI, '_cfg_save')
def test_add_zones_new_zone_no_activate(self, cfg_save_mock,
apply_zone_change_mock,
get_active_zs_mock):
get_active_zs_mock.return_value = active_zoneset
self.add_zones(new_zones, False, None)
get_active_zs_mock.assert_called_once_with()
self.assertEqual(3, apply_zone_change_mock.call_count)
cfg_save_mock.assert_called_once_with()
@mock.patch.object(BrcdFCZoneClientCLI, 'get_active_zone_set')
@mock.patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@mock.patch.object(BrcdFCZoneClientCLI, 'activate_zoneset')
def test_add_zones_new_zone_activate(self, activate_zoneset_mock,
apply_zone_change_mock,
get_active_zs_mock):
get_active_zs_mock.return_value = active_zoneset
self.add_zones(new_zone, True, active_zoneset)
self.assertEqual(2, apply_zone_change_mock.call_count)
activate_zoneset_mock.assert_called_once_with(
active_zoneset['active_zone_config'])
@mock.patch.object(BrcdFCZoneClientCLI, '_ssh_execute')
def test_activate_zoneset(self, ssh_execute_mock):
ssh_execute_mock.return_value = True
return_value = self.activate_zoneset('zoneset1')
self.assertTrue(return_value)
@mock.patch.object(BrcdFCZoneClientCLI, '_ssh_execute')
def test_deactivate_zoneset(self, ssh_execute_mock):
ssh_execute_mock.return_value = True
return_value = self.deactivate_zoneset()
self.assertTrue(return_value)
@mock.patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@mock.patch.object(BrcdFCZoneClientCLI, '_cfg_save')
def test_delete_zones_activate_false(self, cfg_save_mock,
apply_zone_change_mock):
with mock.patch.object(self, '_zone_delete') as zone_delete_mock:
self.delete_zones(zone_names_to_delete, False,
active_zoneset_multiple_zones)
self.assertEqual(1, apply_zone_change_mock.call_count)
zone_delete_mock.assert_called_once_with(zone_names_to_delete)
cfg_save_mock.assert_called_once_with()
@patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@patch.object(BrcdFCZoneClientCLI, 'activate_zoneset')
def test_delete_zones_activate_true(self, activate_zs_mock,
apply_zone_change_mock):
with mock.patch.object(self, '_zone_delete') \
as zone_delete_mock:
self.delete_zones(zone_names_to_delete, True,
active_zoneset_multiple_zones)
self.assertEqual(1, apply_zone_change_mock.call_count)
zone_delete_mock.assert_called_once_with(zone_names_to_delete)
activate_zs_mock.assert_called_once_with(
active_zoneset['active_zone_config'])
@patch.object(BrcdFCZoneClientCLI, '_get_switch_info')
def test_get_nameserver_info(self, get_switch_info_mock):
ns_info_list = []
ns_info_list_expected = ['20:1a:00:05:1e:e8:e3:29']
get_switch_info_mock.return_value = (switch_data)
ns_info_list = self.get_nameserver_info()
self.assertEqual(ns_info_list, ns_info_list_expected)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test_get_nameserver_info_ssh_error(self, run_ssh_mock):
run_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.BrocadeZoningCliException,
self.get_nameserver_info)
@patch.object(BrcdFCZoneClientCLI, '_ssh_execute')
def test__cfg_save(self, ssh_execute_mock):
cmd_list = [ZoneConstant.CFG_SAVE]
self._cfg_save()
ssh_execute_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
def test__zone_delete(self, apply_zone_change_mock):
zone_name = 'testzone'
cmd_list = ['zonedelete', '"testzone"']
self._zone_delete(zone_name)
apply_zone_change_mock.assert_called_once_with(cmd_list)
@patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
def test__cfg_trans_abort(self, apply_zone_change_mock):
cmd_list = [ZoneConstant.CFG_ZONE_TRANS_ABORT]
with mock.patch.object(self, '_is_trans_abortable') \
as is_trans_abortable_mock:
is_trans_abortable_mock.return_value = True
self._cfg_trans_abort()
is_trans_abortable_mock.assert_called_once_with()
apply_zone_change_mock.assert_called_once_with(cmd_list)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__is_trans_abortable_true(self, run_ssh_mock):
cmd_list = [ZoneConstant.CFG_SHOW_TRANS]
run_ssh_mock.return_value = (Stream(ZoneConstant.TRANS_ABORTABLE),
None)
data = self._is_trans_abortable()
self.assertTrue(data)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__is_trans_abortable_ssh_error(self, run_ssh_mock):
run_ssh_mock.return_value = (Stream(), Stream())
self.assertRaises(exception.BrocadeZoningCliException,
self._is_trans_abortable)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__is_trans_abortable_false(self, run_ssh_mock):
cmd_list = [ZoneConstant.CFG_SHOW_TRANS]
cfgtransshow = 'There is no outstanding zoning transaction'
run_ssh_mock.return_value = (Stream(cfgtransshow), None)
data = self._is_trans_abortable()
self.assertFalse(data)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test_apply_zone_change(self, run_ssh_mock):
cmd_list = [ZoneConstant.CFG_SAVE]
run_ssh_mock.return_value = (None, None)
self.apply_zone_change(cmd_list)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__get_switch_info(self, run_ssh_mock):
cmd_list = [ZoneConstant.NS_SHOW]
nsshow_list = [nsshow]
run_ssh_mock.return_value = (Stream(nsshow), Stream())
switch_data = self._get_switch_info(cmd_list)
self.assertEqual(switch_data, nsshow_list)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
def test__parse_ns_output(self):
invalid_switch_data = [' N 011a00;20:1a:00:05:1e:e8:e3:29']
return_wwn_list = []
expected_wwn_list = ['20:1a:00:05:1e:e8:e3:29']
return_wwn_list = self._parse_ns_output(switch_data)
self.assertEqual(return_wwn_list, expected_wwn_list)
self.assertRaises(exception.InvalidParameterValue,
self._parse_ns_output, invalid_switch_data)
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.return_value = (supported_firmware, None)
self.assertTrue(self.is_supported_firmware())
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware_invalid(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.return_value = (unsupported_firmware, None)
self.assertFalse(self.is_supported_firmware())
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware_no_ssh_response(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.return_value = (None, Stream())
self.assertFalse(self.is_supported_firmware())
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware_ssh_error(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.BrocadeZoningCliException,
self.is_supported_firmware)
class Channel(object):
def recv_exit_status(self):
return 0
class Stream(object):
def __init__(self, buffer=''):
self.buffer = buffer
self.channel = Channel()
def readlines(self):
return self.buffer
def splitlines(self):
return self.buffer.splitlines()
def close(self):
pass
def flush(self):
self.buffer = ''
| apache-2.0 |
brettlangdon/dd-agent | tests/core/test_tail.py | 38 | 2099 | import logging
import subprocess
import tempfile
import unittest
class TestTail(unittest.TestCase):
def setUp(self):
self.log_file = tempfile.NamedTemporaryFile()
self.logrotate_config = tempfile.NamedTemporaryFile()
self.logrotate_config.write("""%s {
copytruncate
notifempty
missingok
rotate 1
weekly
}""" % self.log_file.name)
self.logrotate_config.flush()
self.logrotate_state_file = tempfile.NamedTemporaryFile()
self.last_line = None
def _trigger_logrotate(self):
subprocess.check_call([
'logrotate',
'-v', # Verbose logging
'-f', # Force the rotation even though the file isn't old
# Create a state file that you have file permissions for
'-s', self.logrotate_state_file.name,
self.logrotate_config.name
])
def test_logrotate_copytruncate(self):
from utils.tailfile import TailFile
def line_parser(l):
self.last_line = l
tail = TailFile(logging.getLogger(), self.log_file.name, line_parser)
self.assertEquals(tail._size, 0)
# Write some data to the log file
init_string = "hey there, I am a log\n"
self.log_file.write(init_string)
self.log_file.flush()
# Consume from the tail
gen = tail.tail(line_by_line=False, move_end=True)
gen.next()
# Verify that the tail consumed the data I wrote
self.assertEquals(tail._size, len(init_string))
try:
# Trigger a copytruncate logrotation on the log file
self._trigger_logrotate()
# Write a new line to the log file
new_string = "I am shorter\n"
self.log_file.write(new_string)
self.log_file.flush()
# Verify that the tail recognized the logrotation
gen.next()
self.assertEquals(self.last_line, new_string[:-1], self.last_line)
except OSError:
"logrotate is not present"
| bsd-3-clause |
anomen-s/programming-challenges | projecteuler.net/0099-Largest_exponential/solve.py | 1 | 1442 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#Comparing two numbers written in index form like 211 and 37 is not difficult, as any calculator would confirm that 211 = 2048 < 37 = 2187.
#
#However, confirming that 632382518061 > 519432525806 would be much more difficult, as both numbers contain over three million digits.
#
#Using base_exp.txt (right click and 'Save Link/Target As...'), a 22K text file containing one thousand lines with a base/exponent pair on each line, determine which line number has the greatest numerical value.
#
#NOTE: The first two lines in the file represent the numbers in the example given above.
# Observation:
# logarithm can be used to compare values
# log(a**b) = b * log(a)
import math
def readfile(filename):
f = open(filename)
try:
data = f.readlines()
finally:
f.close()
return [list(map(int, x.split(','))) for x in data]
def log(base, exp):
return exp * math.log10(base)
def main():
maxNl = 0
maxLine = 0
line = 0
numbers = readfile('p099_base_exp.txt')
for [nbase, nexp] in numbers:
line = line + 1
nl = log(nbase, nexp)
# print ([line, nbase, nexp, nl])
if abs(maxNl-nl) < ((10**-7)*nl):
print('warning close: ', line, maxLine, nl, maxNl)
if (nl > maxNl):
maxNl = nl
maxLine = line
print([maxLine, maxNl, numbers[line-1]])
if __name__ =='__main__':main()
| gpl-2.0 |
bratsche/Neutron-Drive | google_appengine/lib/django_1_2/django/core/files/storage.py | 44 | 8943 | import os
import errno
import urlparse
import itertools
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.core.files import locks, File
from django.core.files.move import file_move_safe
from django.utils.encoding import force_unicode, filepath_to_uri
from django.utils.functional import LazyObject
from django.utils.importlib import import_module
from django.utils.text import get_valid_filename
from django.utils._os import safe_join
__all__ = ('Storage', 'FileSystemStorage', 'DefaultStorage', 'default_storage')
class Storage(object):
"""
A base storage class, providing some default behaviors that all other
storage systems can inherit or override, as necessary.
"""
# The following methods represent a public interface to private methods.
# These shouldn't be overridden by subclasses unless absolutely necessary.
def open(self, name, mode='rb', mixin=None):
"""
Retrieves the specified file from storage, using the optional mixin
class to customize what features are available on the File returned.
"""
file = self._open(name, mode)
if mixin:
# Add the mixin as a parent class of the File returned from storage.
file.__class__ = type(mixin.__name__, (mixin, file.__class__), {})
return file
def save(self, name, content):
"""
Saves new content to the file specified by name. The content should be a
proper File object, ready to be read from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = content.name
name = self.get_available_name(name)
name = self._save(name, content)
# Store filenames with forward slashes, even on Windows
return force_unicode(name.replace('\\', '/'))
# These methods are part of the public API, with default implementations.
def get_valid_name(self, name):
"""
Returns a filename, based on the provided filename, that's suitable for
use in the target storage system.
"""
return get_valid_filename(name)
def get_available_name(self, name):
"""
Returns a filename that's free on the target storage system, and
available for new content to be written to.
"""
dir_name, file_name = os.path.split(name)
file_root, file_ext = os.path.splitext(file_name)
# If the filename already exists, add an underscore and a number (before
# the file extension, if one exists) to the filename until the generated
# filename doesn't exist.
count = itertools.count(1)
while self.exists(name):
# file_ext includes the dot.
name = os.path.join(dir_name, "%s_%s%s" % (file_root, count.next(), file_ext))
return name
def path(self, name):
"""
Returns a local filesystem path where the file can be retrieved using
Python's built-in open() function. Storage systems that can't be
accessed using open() should *not* implement this method.
"""
raise NotImplementedError("This backend doesn't support absolute paths.")
# The following methods form the public API for storage systems, but with
# no default implementations. Subclasses must implement *all* of these.
def delete(self, name):
"""
Deletes the specified file from the storage system.
"""
raise NotImplementedError()
def exists(self, name):
"""
Returns True if a file referened by the given name already exists in the
storage system, or False if the name is available for a new file.
"""
raise NotImplementedError()
def listdir(self, path):
"""
Lists the contents of the specified path, returning a 2-tuple of lists;
the first item being directories, the second item being files.
"""
raise NotImplementedError()
def size(self, name):
"""
Returns the total size, in bytes, of the file specified by name.
"""
raise NotImplementedError()
def url(self, name):
"""
Returns an absolute URL where the file's contents can be accessed
directly by a Web browser.
"""
raise NotImplementedError()
class FileSystemStorage(Storage):
"""
Standard filesystem storage
"""
def __init__(self, location=None, base_url=None):
if location is None:
location = settings.MEDIA_ROOT
if base_url is None:
base_url = settings.MEDIA_URL
self.location = os.path.abspath(location)
self.base_url = base_url
def _open(self, name, mode='rb'):
return File(open(self.path(name), mode))
def _save(self, name, content):
full_path = self.path(name)
directory = os.path.dirname(full_path)
if not os.path.exists(directory):
os.makedirs(directory)
elif not os.path.isdir(directory):
raise IOError("%s exists and is not a directory." % directory)
# There's a potential race condition between get_available_name and
# saving the file; it's possible that two threads might return the
# same name, at which point all sorts of fun happens. So we need to
# try to create the file, but if it already exists we have to go back
# to get_available_name() and try again.
while True:
try:
# This file has a file path that we can move.
if hasattr(content, 'temporary_file_path'):
file_move_safe(content.temporary_file_path(), full_path)
content.close()
# This is a normal uploadedfile that we can stream.
else:
# This fun binary flag incantation makes os.open throw an
# OSError if the file already exists before we open it.
fd = os.open(full_path, os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, 'O_BINARY', 0))
try:
locks.lock(fd, locks.LOCK_EX)
for chunk in content.chunks():
os.write(fd, chunk)
finally:
locks.unlock(fd)
os.close(fd)
except OSError, e:
if e.errno == errno.EEXIST:
# Ooops, the file exists. We need a new file name.
name = self.get_available_name(name)
full_path = self.path(name)
else:
raise
else:
# OK, the file save worked. Break out of the loop.
break
if settings.FILE_UPLOAD_PERMISSIONS is not None:
os.chmod(full_path, settings.FILE_UPLOAD_PERMISSIONS)
return name
def delete(self, name):
name = self.path(name)
# If the file exists, delete it from the filesystem.
if os.path.exists(name):
os.remove(name)
def exists(self, name):
return os.path.exists(self.path(name))
def listdir(self, path):
path = self.path(path)
directories, files = [], []
for entry in os.listdir(path):
if os.path.isdir(os.path.join(path, entry)):
directories.append(entry)
else:
files.append(entry)
return directories, files
def path(self, name):
try:
path = safe_join(self.location, name)
except ValueError:
raise SuspiciousOperation("Attempted access to '%s' denied." % name)
return os.path.normpath(path)
def size(self, name):
return os.path.getsize(self.path(name))
def url(self, name):
if self.base_url is None:
raise ValueError("This file is not accessible via a URL.")
return urlparse.urljoin(self.base_url, filepath_to_uri(name))
def get_storage_class(import_path=None):
if import_path is None:
import_path = settings.DEFAULT_FILE_STORAGE
try:
dot = import_path.rindex('.')
except ValueError:
raise ImproperlyConfigured("%s isn't a storage module." % import_path)
module, classname = import_path[:dot], import_path[dot+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error importing storage module %s: "%s"' % (module, e))
try:
return getattr(mod, classname)
except AttributeError:
raise ImproperlyConfigured('Storage module "%s" does not define a "%s" class.' % (module, classname))
class DefaultStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class()()
default_storage = DefaultStorage()
| bsd-3-clause |
chen0510566/MissionPlanner | Lib/genericpath.py | 77 | 3120 | """
Path operations common to more than one OS
Do not use directly. The OS specific modules import the appropriate
functions from this module themselves.
"""
import os
import stat
__all__ = ['commonprefix', 'exists', 'getatime', 'getctime', 'getmtime',
'getsize', 'isdir', 'isfile']
# Does a path exist?
# This is false for dangling symbolic links on systems that support them.
def exists(path):
"""Test whether a path exists. Returns False for broken symbolic links"""
try:
os.stat(path)
except os.error:
return False
return True
# This follows symbolic links, so both islink() and isdir() can be true
# for the same path ono systems that support symlinks
def isfile(path):
"""Test whether a path is a regular file"""
try:
st = os.stat(path)
except os.error:
return False
return stat.S_ISREG(st.st_mode)
# Is a path a directory?
# This follows symbolic links, so both islink() and isdir()
# can be true for the same path on systems that support symlinks
def isdir(s):
"""Return true if the pathname refers to an existing directory."""
try:
st = os.stat(s)
except os.error:
return False
return stat.S_ISDIR(st.st_mode)
def getsize(filename):
"""Return the size of a file, reported by os.stat()."""
return os.stat(filename).st_size
def getmtime(filename):
"""Return the last modification time of a file, reported by os.stat()."""
return os.stat(filename).st_mtime
def getatime(filename):
"""Return the last access time of a file, reported by os.stat()."""
return os.stat(filename).st_atime
def getctime(filename):
"""Return the metadata change time of a file, reported by os.stat()."""
return os.stat(filename).st_ctime
# Return the longest prefix of all list elements.
def commonprefix(m):
"Given a list of pathnames, returns the longest common leading component"
if not m: return ''
s1 = min(m)
s2 = max(m)
for i, c in enumerate(s1):
if c != s2[i]:
return s1[:i]
return s1
# Split a path in root and extension.
# The extension is everything starting at the last dot in the last
# pathname component; the root is everything before that.
# It is always true that root + ext == p.
# Generic implementation of splitext, to be parametrized with
# the separators
def _splitext(p, sep, altsep, extsep):
"""Split the extension from a pathname.
Extension is everything from the last dot to the end, ignoring
leading dots. Returns "(root, ext)"; ext may be empty."""
sepIndex = p.rfind(sep)
if altsep:
altsepIndex = p.rfind(altsep)
sepIndex = max(sepIndex, altsepIndex)
dotIndex = p.rfind(extsep)
if dotIndex > sepIndex:
# skip all leading dots
filenameIndex = sepIndex + 1
while filenameIndex < dotIndex:
if p[filenameIndex] != extsep:
return p[:dotIndex], p[dotIndex:]
filenameIndex += 1
return p, ''
| gpl-3.0 |
robhudson/django | tests/admin_views/tests.py | 41 | 303541 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
import os
import re
import unittest
from django.contrib.admin import AdminSite, ModelAdmin
from django.contrib.admin.helpers import ACTION_CHECKBOX_NAME
from django.contrib.admin.models import ADDITION, DELETION, LogEntry
from django.contrib.admin.options import TO_FIELD_VAR
from django.contrib.admin.templatetags.admin_static import static
from django.contrib.admin.templatetags.admin_urls import add_preserved_filters
from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase
from django.contrib.admin.utils import quote
from django.contrib.admin.views.main import IS_POPUP_VAR
from django.contrib.auth import REDIRECT_FIELD_NAME, get_permission_codename
from django.contrib.auth.models import Group, Permission, User
from django.contrib.contenttypes.models import ContentType
from django.contrib.staticfiles.storage import staticfiles_storage
from django.core import mail
from django.core.checks import Error
from django.core.files import temp as tempfile
from django.core.urlresolvers import NoReverseMatch, resolve, reverse
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.template.response import TemplateResponse
from django.test import (
SimpleTestCase, TestCase, ignore_warnings, modify_settings,
override_settings, skipUnlessDBFeature,
)
from django.test.utils import override_script_prefix, patch_logger
from django.utils import formats, six, translation
from django.utils._os import upath
from django.utils.cache import get_max_age
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import force_bytes, force_text, iri_to_uri
from django.utils.html import escape
from django.utils.http import urlencode
from django.utils.six.moves.urllib.parse import parse_qsl, urljoin, urlparse
from . import customadmin
from .admin import CityAdmin, site, site2
from .models import (
Actor, AdminOrderedAdminMethod, AdminOrderedCallable, AdminOrderedField,
AdminOrderedModelMethod, Answer, Article, BarAccount, Book, Bookmark,
Category, Chapter, ChapterXtra1, ChapterXtra2, Character, Child, Choice,
City, Collector, Color, Color2, ComplexSortedPerson, CoverLetter,
CustomArticle, CyclicOne, CyclicTwo, DooHickey, Employee, EmptyModel,
ExternalSubscriber, Fabric, FancyDoodad, FieldOverridePost,
FilteredManager, FooAccount, FoodDelivery, FunkyTag, Gallery, Grommet,
Inquisition, Language, MainPrepopulated, ModelWithStringPrimaryKey,
OtherStory, Paper, Parent, ParentWithDependentChildren, Person, Persona,
Picture, Pizza, Plot, PlotDetails, PluggableSearchPerson, Podcast, Post,
PrePopulatedPost, Promo, Question, Recommendation, Recommender,
RelatedPrepopulated, Report, Restaurant, RowLevelChangePermissionModel,
SecretHideout, Section, ShortMessage, Simple, State, Story, Subscriber,
SuperSecretHideout, SuperVillain, Telegram, TitleTranslation, Topping,
UnchangeableObject, UndeletableObject, UnorderedObject, Villain, Vodcast,
Whatsit, Widget, Worker, WorkHour,
)
ERROR_MESSAGE = "Please enter the correct username and password \
for a staff account. Note that both fields may be case-sensitive."
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
USE_I18N=True, USE_L10N=False, LANGUAGE_CODE='en')
class AdminViewBasicTestCase(TestCase):
@classmethod
def setUpTestData(cls):
# password = "secret"
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.color1 = Color.objects.create(value='Red', warm=True)
cls.color2 = Color.objects.create(value='Orange', warm=True)
cls.color3 = Color.objects.create(value='Blue', warm=False)
cls.color4 = Color.objects.create(value='Green', warm=False)
cls.fab1 = Fabric.objects.create(surface='x')
cls.fab2 = Fabric.objects.create(surface='y')
cls.fab3 = Fabric.objects.create(surface='plain')
cls.b1 = Book.objects.create(name='Book 1')
cls.b2 = Book.objects.create(name='Book 2')
cls.pro1 = Promo.objects.create(name='Promo 1', book=cls.b1)
cls.pro1 = Promo.objects.create(name='Promo 2', book=cls.b2)
cls.chap1 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b1)
cls.chap2 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Chapter 1', content='[ insert contents here ]', book=cls.b2)
cls.chap4 = Chapter.objects.create(title='Chapter 2', content='[ insert contents here ]', book=cls.b2)
cls.cx1 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='ChapterXtra1 1')
cls.cx2 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='ChapterXtra1 2')
# Post data for edit inline
cls.inline_post_data = {
"name": "Test section",
# inline data
"article_set-TOTAL_FORMS": "6",
"article_set-INITIAL_FORMS": "3",
"article_set-MAX_NUM_FORMS": "0",
"article_set-0-id": cls.a1.pk,
# there is no title in database, give one here or formset will fail.
"article_set-0-title": "Norske bostaver æøå skaper problemer",
"article_set-0-content": "<p>Middle content</p>",
"article_set-0-date_0": "2008-03-18",
"article_set-0-date_1": "11:54:58",
"article_set-0-section": cls.s1.pk,
"article_set-1-id": cls.a2.pk,
"article_set-1-title": "Need a title.",
"article_set-1-content": "<p>Oldest content</p>",
"article_set-1-date_0": "2000-03-18",
"article_set-1-date_1": "11:54:58",
"article_set-2-id": cls.a3.pk,
"article_set-2-title": "Need a title.",
"article_set-2-content": "<p>Newest content</p>",
"article_set-2-date_0": "2009-03-18",
"article_set-2-date_1": "11:54:58",
"article_set-3-id": "",
"article_set-3-title": "",
"article_set-3-content": "",
"article_set-3-date_0": "",
"article_set-3-date_1": "",
"article_set-4-id": "",
"article_set-4-title": "",
"article_set-4-content": "",
"article_set-4-date_0": "",
"article_set-4-date_1": "",
"article_set-5-id": "",
"article_set-5-title": "",
"article_set-5-content": "",
"article_set-5-date_0": "",
"article_set-5-date_1": "",
}
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assertContentBefore(self, response, text1, text2, failing_msg=None):
"""
Testing utility asserting that text1 appears before text2 in response
content.
"""
self.assertEqual(response.status_code, 200)
self.assertLess(response.content.index(force_bytes(text1)), response.content.index(force_bytes(text2)),
failing_msg)
class AdminViewBasicTest(AdminViewBasicTestCase):
def test_trailing_slash_required(self):
"""
If you leave off the trailing slash, app should redirect and add it.
"""
add_url = reverse('admin:admin_views_article_add')
response = self.client.get(add_url[:-1])
self.assertRedirects(response, add_url, status_code=301)
def test_admin_static_template_tag(self):
"""
Test that admin_static.static is pointing to the collectstatic version
(as django.contrib.collectstatic is in installed apps).
"""
old_url = staticfiles_storage.base_url
staticfiles_storage.base_url = '/test/'
try:
self.assertEqual(static('path'), '/test/path')
finally:
staticfiles_storage.base_url = old_url
def test_basic_add_GET(self):
"""
A smoke test to ensure GET on the add_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_add_with_GET_args(self):
response = self.client.get(reverse('admin:admin_views_section_add'), {'name': 'My Section'})
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'value="My Section"',
msg_prefix="Couldn't find an input with the right value in the response")
def test_basic_edit_GET(self):
"""
A smoke test to ensure GET on the change_view works.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
def test_basic_edit_GET_string_PK(self):
"""
Ensure GET on the change_view works (returns an HTTP 404 error, see
#11191) when passing a string as the PK argument for a model with an
integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_section_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_edit_GET_old_url_redirect(self):
"""
The change URL changed in Django 1.9, but the old one still redirects.
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,)).replace('change/', '')
)
self.assertRedirects(response, reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
def test_basic_inheritance_GET_string_PK(self):
"""
Ensure GET on the change_view works on inherited models (returns an
HTTP 404 error, see #19951) when passing a string as the PK argument
for a model with an integer PK field.
"""
response = self.client.get(reverse('admin:admin_views_supervillain_change', args=('abc',)))
self.assertEqual(response.status_code, 404)
def test_basic_add_POST(self):
"""
A smoke test to ensure POST on add_view works.
"""
post_data = {
"name": "Another Section",
# inline data
"article_set-TOTAL_FORMS": "3",
"article_set-INITIAL_FORMS": "0",
"article_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_section_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_popup_add_POST(self):
"""
Ensure http response from a popup is properly escaped.
"""
post_data = {
'_popup': '1',
'title': 'title with a new\nline',
'content': 'some content',
'date_0': '2010-09-10',
'date_1': '14:55:39',
}
response = self.client.post(reverse('admin:admin_views_article_add'), post_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
self.assertContains(response, 'title with a new\\u000Aline')
def test_basic_edit_POST(self):
"""
A smoke test to ensure POST on edit_view works.
"""
url = reverse('admin:admin_views_section_change', args=(self.s1.pk,))
response = self.client.post(url, self.inline_post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as(self):
"""
Test "save as".
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-3-section": "1",
"article_set-4-section": "1",
"article_set-5-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_edit_save_as_delete_inline(self):
"""
Should be able to "Save as new" while also deleting an inline.
"""
post_data = self.inline_post_data.copy()
post_data.update({
'_saveasnew': 'Save+as+new',
"article_set-1-section": "1",
"article_set-2-section": "1",
"article_set-2-DELETE": "1",
"article_set-3-section": "1",
})
response = self.client.post(reverse('admin:admin_views_section_change', args=(self.s1.pk,)), post_data)
self.assertEqual(response.status_code, 302)
# started with 3 articles, one was deleted.
self.assertEqual(Section.objects.latest('id').article_set.count(), 2)
def test_change_list_sorting_callable(self):
"""
Ensure we can sort on a list_display field that is a callable
(column 2 is callable_year in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': 2})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on callable are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on callable are out of order.")
def test_change_list_sorting_model(self):
"""
Ensure we can sort on a list_display field that is a Model method
(column 3 is 'model_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-3'})
self.assertContentBefore(response, 'Newest content', 'Middle content',
"Results of sorting on Model method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Oldest content',
"Results of sorting on Model method are out of order.")
def test_change_list_sorting_model_admin(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin method
(column 4 is 'modeladmin_year' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '4'})
self.assertContentBefore(response, 'Oldest content', 'Middle content',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, 'Middle content', 'Newest content',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_model_admin_reverse(self):
"""
Ensure we can sort on a list_display field that is a ModelAdmin
method in reverse order (i.e. admin_order_field uses the '-' prefix)
(column 6 is 'model_year_reverse' in ArticleAdmin)
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '6'})
self.assertContentBefore(response, '2009', '2008',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008', '2000',
"Results of sorting on ModelAdmin method are out of order.")
# Let's make sure the ordering is right and that we don't get a
# FieldError when we change to descending order
response = self.client.get(reverse('admin:admin_views_article_changelist'), {'o': '-6'})
self.assertContentBefore(response, '2000', '2008',
"Results of sorting on ModelAdmin method are out of order.")
self.assertContentBefore(response, '2008', '2009',
"Results of sorting on ModelAdmin method are out of order.")
def test_change_list_sorting_multiple(self):
p1 = Person.objects.create(name="Chris", gender=1, alive=True)
p2 = Person.objects.create(name="Chris", gender=2, alive=True)
p3 = Person.objects.create(name="Bob", gender=1, alive=True)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
# Sort by name, gender
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '1.2'})
self.assertContentBefore(response, link3, link1)
self.assertContentBefore(response, link1, link2)
# Sort by gender descending, name
response = self.client.get(reverse('admin:admin_views_person_changelist'), {'o': '-2.1'})
self.assertContentBefore(response, link2, link3)
self.assertContentBefore(response, link3, link1)
def test_change_list_sorting_preserve_queryset_ordering(self):
"""
If no ordering is defined in `ModelAdmin.ordering` or in the query
string, then the underlying order of the queryset should not be
changed, even if it is defined in `Modeladmin.get_queryset()`.
Refs #11868, #7309.
"""
p1 = Person.objects.create(name="Amy", gender=1, alive=True, age=80)
p2 = Person.objects.create(name="Bob", gender=1, alive=True, age=70)
p3 = Person.objects.create(name="Chris", gender=2, alive=False, age=60)
link1 = reverse('admin:admin_views_person_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_person_change', args=(p2.pk,))
link3 = reverse('admin:admin_views_person_change', args=(p3.pk,))
response = self.client.get(reverse('admin:admin_views_person_changelist'), {})
self.assertContentBefore(response, link3, link2)
self.assertContentBefore(response, link2, link1)
def test_change_list_sorting_model_meta(self):
# Test ordering on Model Meta is respected
l1 = Language.objects.create(iso='ur', name='Urdu')
l2 = Language.objects.create(iso='ar', name='Arabic')
link1 = reverse('admin:admin_views_language_change', args=(quote(l1.pk),))
link2 = reverse('admin:admin_views_language_change', args=(quote(l2.pk),))
response = self.client.get(reverse('admin:admin_views_language_changelist'), {})
self.assertContentBefore(response, link2, link1)
# Test we can override with query string
response = self.client.get(reverse('admin:admin_views_language_changelist'), {'o': '-1'})
self.assertContentBefore(response, link1, link2)
def test_change_list_sorting_override_model_admin(self):
# Test ordering on Model Admin is respected, and overrides Model Meta
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_podcast_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
def test_multiple_sort_same_field(self):
# Check that we get the columns we expect if we have two columns
# that correspond to the same ordering field
dt = datetime.datetime.now()
p1 = Podcast.objects.create(name="A", release_date=dt)
p2 = Podcast.objects.create(name="B", release_date=dt - datetime.timedelta(10))
link1 = reverse('admin:admin_views_podcast_change', args=(quote(p1.pk),))
link2 = reverse('admin:admin_views_podcast_change', args=(quote(p2.pk),))
response = self.client.get(reverse('admin:admin_views_podcast_changelist'), {})
self.assertContentBefore(response, link1, link2)
p1 = ComplexSortedPerson.objects.create(name="Bob", age=10)
p2 = ComplexSortedPerson.objects.create(name="Amy", age=20)
link1 = reverse('admin:admin_views_complexsortedperson_change', args=(p1.pk,))
link2 = reverse('admin:admin_views_complexsortedperson_change', args=(p2.pk,))
response = self.client.get(reverse('admin:admin_views_complexsortedperson_changelist'), {})
# Should have 5 columns (including action checkbox col)
self.assertContains(response, '<th scope="col"', count=5)
self.assertContains(response, 'Name')
self.assertContains(response, 'Colored name')
# Check order
self.assertContentBefore(response, 'Name', 'Colored name')
# Check sorting - should be by name
self.assertContentBefore(response, link2, link1)
def test_sort_indicators_admin_order(self):
"""
Ensures that the admin shows default sort indicators for all
kinds of 'ordering' fields: field names, method on the model
admin and model itself, and other callables. See #17252.
"""
models = [(AdminOrderedField, 'adminorderedfield'),
(AdminOrderedModelMethod, 'adminorderedmodelmethod'),
(AdminOrderedAdminMethod, 'adminorderedadminmethod'),
(AdminOrderedCallable, 'adminorderedcallable')]
for model, url in models:
model.objects.create(stuff='The Last Item', order=3)
model.objects.create(stuff='The First Item', order=1)
model.objects.create(stuff='The Middle Item', order=2)
response = self.client.get(reverse('admin:admin_views_%s_changelist' % url), {})
self.assertEqual(response.status_code, 200)
# Should have 3 columns including action checkbox col.
self.assertContains(response, '<th scope="col"', count=3, msg_prefix=url)
# Check if the correct column was selected. 2 is the index of the
# 'order' column in the model admin's 'list_display' with 0 being
# the implicit 'action_checkbox' and 1 being the column 'stuff'.
self.assertEqual(response.context['cl'].get_ordering_field_columns(), {2: 'asc'})
# Check order of records.
self.assertContentBefore(response, 'The First Item', 'The Middle Item')
self.assertContentBefore(response, 'The Middle Item', 'The Last Item')
def test_limited_filter(self):
"""Ensure admin changelist filters do not contain objects excluded via limit_choices_to.
This also tests relation-spanning filters (e.g. 'color__value').
"""
response = self.client.get(reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">',
msg_prefix="Expected filter not found in changelist view")
self.assertNotContains(response, '<a href="?color__id__exact=3">Blue</a>',
msg_prefix="Changelist filter not correctly limited by limit_choices_to")
def test_relation_spanning_filters(self):
changelist_url = reverse('admin:admin_views_chapterxtra1_changelist')
response = self.client.get(changelist_url)
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<div id="changelist-filter">')
filters = {
'chap__id__exact': dict(
values=[c.id for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.id == value),
'chap__title': dict(
values=[c.title for c in Chapter.objects.all()],
test=lambda obj, value: obj.chap.title == value),
'chap__book__id__exact': dict(
values=[b.id for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.id == value),
'chap__book__name': dict(
values=[b.name for b in Book.objects.all()],
test=lambda obj, value: obj.chap.book.name == value),
'chap__book__promo__id__exact': dict(
values=[p.id for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
'chap__book__promo__name': dict(
values=[p.name for p in Promo.objects.all()],
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
}
for filter_path, params in filters.items():
for value in params['values']:
query_string = urlencode({filter_path: value})
# ensure filter link exists
self.assertContains(response, '<a href="?%s">' % query_string)
# ensure link works
filtered_response = self.client.get('%s?%s' % (changelist_url, query_string))
self.assertEqual(filtered_response.status_code, 200)
# ensure changelist contains only valid objects
for obj in filtered_response.context['cl'].queryset.all():
self.assertTrue(params['test'](obj, value))
def test_incorrect_lookup_parameters(self):
"""Ensure incorrect lookup parameters are handled gracefully."""
changelist_url = reverse('admin:admin_views_thing_changelist')
response = self.client.get(changelist_url, {'notarealfield': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Spanning relationships through a nonexistent related object (Refs #16716)
response = self.client.get(changelist_url, {'notarealfield__whatever': '5'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
response = self.client.get(changelist_url, {'color__id__exact': 'StringNotInteger!'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
# Regression test for #18530
response = self.client.get(changelist_url, {'pub_date__gte': 'foo'})
self.assertRedirects(response, '%s?e=1' % changelist_url)
def test_isnull_lookups(self):
"""Ensure is_null is handled correctly."""
Article.objects.create(title="I Could Go Anywhere", content="Versatile", date=datetime.datetime.now())
changelist_url = reverse('admin:admin_views_article_changelist')
response = self.client.get(changelist_url)
self.assertContains(response, '4 articles')
response = self.client.get(changelist_url, {'section__isnull': 'false'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': '0'})
self.assertContains(response, '3 articles')
response = self.client.get(changelist_url, {'section__isnull': 'true'})
self.assertContains(response, '1 article')
response = self.client.get(changelist_url, {'section__isnull': '1'})
self.assertContains(response, '1 article')
def test_logout_and_password_change_URLs(self):
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, '<a href="%s">' % reverse('admin:logout'))
self.assertContains(response, '<a href="%s">' % reverse('admin:password_change'))
def test_named_group_field_choices_change_list(self):
"""
Ensures the admin changelist shows correct values in the relevant column
for rows corresponding to instances of a model in which a named group
has been used in the choices option of a field.
"""
link1 = reverse('admin:admin_views_fabric_change', args=(self.fab1.pk,))
link2 = reverse('admin:admin_views_fabric_change', args=(self.fab2.pk,))
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist table isn't showing the right human-readable values "
"set by a model field 'choices' option named group."
)
self.assertContains(response, '<a href="%s">Horizontal</a>' % link1, msg_prefix=fail_msg, html=True)
self.assertContains(response, '<a href="%s">Vertical</a>' % link2, msg_prefix=fail_msg, html=True)
def test_named_group_field_choices_filter(self):
"""
Ensures the filter UI shows correctly when at least one named group has
been used in the choices option of a model field.
"""
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
fail_msg = (
"Changelist filter isn't showing options contained inside a model "
"field 'choices' option named group."
)
self.assertContains(response, '<div id="changelist-filter">')
self.assertContains(response,
'<a href="?surface__exact=x">Horizontal</a>', msg_prefix=fail_msg, html=True)
self.assertContains(response,
'<a href="?surface__exact=y">Vertical</a>', msg_prefix=fail_msg, html=True)
def test_change_list_null_boolean_display(self):
Post.objects.create(public=None)
response = self.client.get(reverse('admin:admin_views_post_changelist'))
self.assertContains(response, 'icon-unknown.svg')
def test_i18n_language_non_english_default(self):
"""
Check if the JavaScript i18n view returns an empty language catalog
if the default language is non-English but the selected language
is English. See #13388 and #3594 for more details.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('en-us'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, 'Choisir une heure')
def test_i18n_language_non_english_fallback(self):
"""
Makes sure that the fallback language is still working properly
in cases where the selected language cannot be found.
"""
with self.settings(LANGUAGE_CODE='fr'), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertContains(response, 'Choisir une heure')
def test_L10N_deactivated(self):
"""
Check if L10N is deactivated, the JavaScript i18n view doesn't
return localized date/time formats. Refs #14824.
"""
with self.settings(LANGUAGE_CODE='ru', USE_L10N=False), translation.override('none'):
response = self.client.get(reverse('admin:jsi18n'))
self.assertNotContains(response, '%d.%m.%Y %H:%M:%S')
self.assertContains(response, '%Y-%m-%d %H:%M:%S')
def test_disallowed_filtering(self):
with patch_logger('django.security.DisallowedModelAdminLookup', 'error') as calls:
response = self.client.get(
"%s?owner__email__startswith=fuzzy" % reverse('admin:admin_views_album_changelist')
)
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Filters are allowed if explicitly included in list_filter
response = self.client.get("%s?color__value__startswith=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
response = self.client.get("%s?color__value=red" % reverse('admin:admin_views_thing_changelist'))
self.assertEqual(response.status_code, 200)
# Filters should be allowed if they involve a local field without the
# need to whitelist them in list_filter or date_hierarchy.
response = self.client.get("%s?age__gt=30" % reverse('admin:admin_views_person_changelist'))
self.assertEqual(response.status_code, 200)
e1 = Employee.objects.create(name='Anonymous', gender=1, age=22, alive=True, code='123')
e2 = Employee.objects.create(name='Visitor', gender=2, age=19, alive=True, code='124')
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e1)
WorkHour.objects.create(datum=datetime.datetime.now(), employee=e2)
response = self.client.get(reverse('admin:admin_views_workhour_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'employee__person_ptr__exact')
response = self.client.get("%s?employee__person_ptr__exact=%d" % (
reverse('admin:admin_views_workhour_changelist'), e1.pk)
)
self.assertEqual(response.status_code, 200)
def test_disallowed_to_field(self):
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_changelist')
response = self.client.get(url, {TO_FIELD_VAR: 'missing_field'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# Specifying a field that is not referred by any other model registered
# to this admin site should raise an exception.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.get(reverse('admin:admin_views_section_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
# #23839 - Primary key should always be allowed, even if the referenced model isn't registered.
response = self.client.get(reverse('admin:admin_views_notreferenced_changelist'), {TO_FIELD_VAR: 'id'})
self.assertEqual(response.status_code, 200)
# #23915 - Specifying a field referenced by another model though a m2m should be allowed.
response = self.client.get(reverse('admin:admin_views_recipe_changelist'), {TO_FIELD_VAR: 'rname'})
self.assertEqual(response.status_code, 200)
# #23604, #23915 - Specifying a field referenced through a reverse m2m relationship should be allowed.
response = self.client.get(reverse('admin:admin_views_ingredient_changelist'), {TO_FIELD_VAR: 'iname'})
self.assertEqual(response.status_code, 200)
# #23329 - Specifying a field that is not referred by any other model directly registered
# to this admin site but registered through inheritance should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyparent_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# #23431 - Specifying a field that is only referred to by a inline of a registered
# model should be allowed.
response = self.client.get(reverse('admin:admin_views_referencedbyinline_changelist'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 200)
# We also want to prevent the add, change, and delete views from
# leaking a disallowed field value.
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
response = self.client.post(reverse('admin:admin_views_section_add'), {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
section = Section.objects.create()
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_change', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
with patch_logger('django.security.DisallowedModelAdminToField', 'error') as calls:
url = reverse('admin:admin_views_section_delete', args=(section.pk,))
response = self.client.post(url, {TO_FIELD_VAR: 'name'})
self.assertEqual(response.status_code, 400)
self.assertEqual(len(calls), 1)
def test_allowed_filtering_15103(self):
"""
Regressions test for ticket 15103 - filtering on fields defined in a
ForeignKey 'limit_choices_to' should be allowed, otherwise raw_id_fields
can break.
"""
# Filters should be allowed if they are defined on a ForeignKey pointing to this model
url = "%s?leader__name=Palin&leader__age=27" % reverse('admin:admin_views_inquisition_changelist')
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
def test_popup_dismiss_related(self):
"""
Regression test for ticket 20664 - ensure the pk is properly quoted.
"""
actor = Actor.objects.create(name="Palin", age=27)
response = self.client.get("%s?%s" % (reverse('admin:admin_views_actor_changelist'), IS_POPUP_VAR))
self.assertContains(response, "opener.dismissRelatedLookupPopup(window, '%s')" % actor.pk)
def test_hide_change_password(self):
"""
Tests if the "change password" link in the admin is hidden if the User
does not have a usable password set.
(against 9bea85795705d015cdadc82c68b99196a8554f5c)
"""
user = User.objects.get(username='super')
user.set_unusable_password()
user.save()
self.client.force_login(user)
response = self.client.get(reverse('admin:index'))
self.assertNotContains(response, reverse('admin:password_change'),
msg_prefix='The "change password" link should not be displayed if a user does not have a usable password.')
def test_change_view_with_show_delete_extra_context(self):
"""
Ensured that the 'show_delete' context variable in the admin's change
view actually controls the display of the delete button.
Refs #10057.
"""
instance = UndeletableObject.objects.create(name='foo')
response = self.client.get(reverse('admin:admin_views_undeletableobject_change', args=(instance.pk,)))
self.assertNotContains(response, 'deletelink')
def test_allows_attributeerror_to_bubble_up(self):
"""
Ensure that AttributeErrors are allowed to bubble when raised inside
a change list view.
Requires a model to be created so there's something to be displayed
Refs: #16655, #18593, and #18747
"""
Simple.objects.create()
with self.assertRaises(AttributeError):
self.client.get(reverse('admin:admin_views_simple_changelist'))
def test_changelist_with_no_change_url(self):
"""
ModelAdmin.changelist_view shouldn't result in a NoReverseMatch if url
for change_view is removed from get_urls
Regression test for #20934
"""
UnchangeableObject.objects.create()
response = self.client.get(reverse('admin:admin_views_unchangeableobject_changelist'))
self.assertEqual(response.status_code, 200)
# Check the format of the shown object -- shouldn't contain a change link
self.assertContains(response, '<th class="field-__str__">UnchangeableObject object</th>', html=True)
def test_invalid_appindex_url(self):
"""
#21056 -- URL reversing shouldn't work for nonexistent apps.
"""
good_url = '/test_admin/admin/admin_views/'
confirm_good_url = reverse('admin:app_list',
kwargs={'app_label': 'admin_views'})
self.assertEqual(good_url, confirm_good_url)
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', kwargs={'app_label': 'this_should_fail'})
with self.assertRaises(NoReverseMatch):
reverse('admin:app_list', args=('admin_views2',))
def test_resolve_admin_views(self):
index_match = resolve('/test_admin/admin4/')
list_match = resolve('/test_admin/admin4/auth/user/')
self.assertIs(index_match.func.admin_site, customadmin.simple_site)
self.assertIsInstance(list_match.func.model_admin, customadmin.CustomPwdTemplateUserAdmin)
def test_proxy_model_content_type_is_used_for_log_entries(self):
"""
Log entries for proxy models should have the proxy model's content
type.
Regression test for #21084.
"""
color2_content_type = ContentType.objects.get_for_model(Color2, for_concrete_model=False)
# add
color2_add_url = reverse('admin:admin_views_color2_add')
self.client.post(color2_add_url, {'value': 'orange'})
color2_addition_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_addition_log.content_type)
# change
color_id = color2_addition_log.object_id
color2_change_url = reverse('admin:admin_views_color2_change', args=(color_id,))
self.client.post(color2_change_url, {'value': 'blue'})
color2_change_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_change_log.content_type)
# delete
color2_delete_url = reverse('admin:admin_views_color2_delete', args=(color_id,))
self.client.post(color2_delete_url)
color2_delete_log = LogEntry.objects.all()[0]
self.assertEqual(color2_content_type, color2_delete_log.content_type)
def test_adminsite_display_site_url(self):
"""
#13749 - Admin should display link to front-end site 'View site'
"""
url = reverse('admin:index')
response = self.client.get(url)
self.assertEqual(response.context['site_url'], '/my-site-url/')
self.assertContains(response, '<a href="/my-site-url/">View site</a>')
@override_settings(TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# Put this app's and the shared tests templates dirs in DIRS to take precedence
# over the admin's templates dir.
'DIRS': [
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
os.path.join(os.path.dirname(os.path.dirname(upath(__file__))), 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}])
class AdminCustomTemplateTests(AdminViewBasicTestCase):
def test_custom_model_admin_templates(self):
# Test custom change list template with custom extra context
response = self.client.get(reverse('admin:admin_views_customarticle_changelist'))
self.assertContains(response, "var hello = 'Hello!';")
self.assertTemplateUsed(response, 'custom_admin/change_list.html')
# Test custom add form template
response = self.client.get(reverse('admin:admin_views_customarticle_add'))
self.assertTemplateUsed(response, 'custom_admin/add_form.html')
# Add an article so we can test delete, change, and history views
post = self.client.post(reverse('admin:admin_views_customarticle_add'), {
'content': '<p>great article</p>',
'date_0': '2008-03-18',
'date_1': '10:54:39'
})
self.assertRedirects(post, reverse('admin:admin_views_customarticle_changelist'))
self.assertEqual(CustomArticle.objects.all().count(), 1)
article_pk = CustomArticle.objects.all()[0].pk
# Test custom delete, change, and object history templates
# Test custom change form template
response = self.client.get(reverse('admin:admin_views_customarticle_change', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/change_form.html')
response = self.client.get(reverse('admin:admin_views_customarticle_delete', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
response = self.client.post(reverse('admin:admin_views_customarticle_changelist'), data={
'index': 0,
'action': ['delete_selected'],
'_selected_action': ['1'],
})
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
response = self.client.get(reverse('admin:admin_views_customarticle_history', args=(article_pk,)))
self.assertTemplateUsed(response, 'custom_admin/object_history.html')
def test_extended_bodyclass_template_change_form(self):
"""
Ensure that the admin/change_form.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_change_password(self):
"""
Ensure that the auth/user/change_password.html template uses block
super in the bodyclass block.
"""
user = User.objects.get(username='super')
response = self.client.get(reverse('admin:auth_user_password_change', args=(user.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_index(self):
"""
Ensure that the admin/index.html template uses block.super in the
bodyclass block.
"""
response = self.client.get(reverse('admin:index'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_change_list(self):
"""
Ensure that the admin/change_list.html' template uses block.super
in the bodyclass block.
"""
response = self.client.get(reverse('admin:admin_views_article_changelist'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_login(self):
"""
Ensure that the admin/login.html template uses block.super in the
bodyclass block.
"""
self.client.logout()
response = self.client.get(reverse('admin:login'))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_confirmation(self):
"""
Ensure that the admin/delete_confirmation.html template uses
block.super in the bodyclass block.
"""
group = Group.objects.create(name="foogroup")
response = self.client.get(reverse('admin:auth_group_delete', args=(group.id,)))
self.assertContains(response, 'bodyclass_consistency_check ')
def test_extended_bodyclass_template_delete_selected_confirmation(self):
"""
Ensure that the admin/delete_selected_confirmation.html template uses
block.super in bodyclass block.
"""
group = Group.objects.create(name="foogroup")
post_data = {
'action': 'delete_selected',
'selected_across': '0',
'index': '0',
'_selected_action': group.id
}
response = self.client.post(reverse('admin:auth_group_changelist'), post_data)
self.assertEqual(response.context['site_header'], 'Django administration')
self.assertContains(response, 'bodyclass_consistency_check ')
def test_filter_with_custom_template(self):
"""
Ensure that one can use a custom template to render an admin filter.
Refs #17515.
"""
response = self.client.get(reverse('admin:admin_views_color2_changelist'))
self.assertTemplateUsed(response, 'custom_filter_template.html')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewFormUrlTest(TestCase):
current_app = "admin3"
@classmethod
def setUpTestData(cls):
# password = "secret"
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.login(username='super', password='secret')
def test_change_form_URL_has_correct_value(self):
"""
Tests whether change_view has form_url in response.context
"""
response = self.client.get(
reverse('admin:admin_views_section_change', args=(self.s1.pk,), current_app=self.current_app)
)
self.assertIn('form_url', response.context, msg='form_url not present in response.context')
self.assertEqual(response.context['form_url'], 'pony')
def test_initial_data_can_be_overridden(self):
"""
Tests that the behavior for setting initial
form data can be overridden in the ModelAdmin class.
Usually, the initial value is set via the GET params.
"""
response = self.client.get(
reverse('admin:admin_views_restaurant_add', current_app=self.current_app),
{'name': 'test_value'}
)
# this would be the usual behaviour
self.assertNotContains(response, 'value="test_value"')
# this is the overridden behaviour
self.assertContains(response, 'value="overridden_value"')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminJavaScriptTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_js_minified_only_if_debug_is_false(self):
"""
Ensure that the minified versions of the JS files are only used when
DEBUG is False.
Refs #17521.
"""
with override_settings(DEBUG=False):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertNotContains(response, 'vendor/jquery/jquery.js')
self.assertContains(response, 'vendor/jquery/jquery.min.js')
self.assertNotContains(response, 'prepopulate.js')
self.assertContains(response, 'prepopulate.min.js')
self.assertNotContains(response, 'actions.js')
self.assertContains(response, 'actions.min.js')
self.assertNotContains(response, 'collapse.js')
self.assertContains(response, 'collapse.min.js')
self.assertNotContains(response, 'inlines.js')
self.assertContains(response, 'inlines.min.js')
with override_settings(DEBUG=True):
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertContains(response, 'vendor/jquery/jquery.js')
self.assertNotContains(response, 'vendor/jquery/jquery.min.js')
self.assertContains(response, 'prepopulate.js')
self.assertNotContains(response, 'prepopulate.min.js')
self.assertContains(response, 'actions.js')
self.assertNotContains(response, 'actions.min.js')
self.assertContains(response, 'collapse.js')
self.assertNotContains(response, 'collapse.min.js')
self.assertContains(response, 'inlines.js')
self.assertNotContains(response, 'inlines.min.js')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class SaveAsTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_as_duplication(self):
"""Ensure save as actually creates a new person"""
post_data = {'_saveasnew': '', 'name': 'John M', 'gender': 1, 'age': 42}
self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), post_data)
self.assertEqual(len(Person.objects.filter(name='John M')), 1)
self.assertEqual(len(Person.objects.filter(id=self.per1.pk)), 1)
def test_save_as_new_with_validation_errors(self):
"""
Ensure that when you click "Save as new" and have a validation error,
you only see the "Save as new" button and not the other save buttons,
and that only the "Save as" button is visible.
"""
response = self.client.post(reverse('admin:admin_views_person_change', args=(self.per1.pk,)), {
'_saveasnew': '',
'gender': 'invalid',
'_addanother': 'fail',
})
self.assertContains(response, 'Please correct the errors below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_validation_errors_with_inlines(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': 'Child',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': '_invalid',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
def test_save_as_new_with_inlines_with_validation_errors(self):
parent = Parent.objects.create(name='Father')
child = Child.objects.create(parent=parent, name='Child')
response = self.client.post(reverse('admin:admin_views_parent_change', args=(parent.pk,)), {
'_saveasnew': 'Save as new',
'child_set-0-parent': parent.pk,
'child_set-0-id': child.pk,
'child_set-0-name': '_invalid',
'child_set-INITIAL_FORMS': 1,
'child_set-MAX_NUM_FORMS': 1000,
'child_set-MIN_NUM_FORMS': 0,
'child_set-TOTAL_FORMS': 4,
'name': 'Father',
})
self.assertContains(response, 'Please correct the error below.')
self.assertFalse(response.context['show_save_and_add_another'])
self.assertFalse(response.context['show_save_and_continue'])
self.assertTrue(response.context['show_save_as_new'])
@override_settings(ROOT_URLCONF="admin_views.urls")
class CustomModelAdminTest(AdminViewBasicTestCase):
def test_custom_admin_site_login_form(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertEqual(response.status_code, 200)
login = self.client.post(reverse('admin2:login'), {
REDIRECT_FIELD_NAME: reverse('admin2:index'),
'username': 'customform',
'password': 'secret',
}, follow=True)
self.assertIsInstance(login, TemplateResponse)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'custom form error')
self.assertContains(login, 'path/to/media.css')
def test_custom_admin_site_login_template(self):
self.client.logout()
response = self.client.get(reverse('admin2:index'), follow=True)
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/login.html')
self.assertContains(response, 'Hello from a custom login template')
def test_custom_admin_site_logout_template(self):
response = self.client.get(reverse('admin2:logout'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/logout.html')
self.assertContains(response, 'Hello from a custom logout template')
def test_custom_admin_site_index_view_and_template(self):
try:
response = self.client.get(reverse('admin2:index'))
except TypeError:
self.fail('AdminSite.index_template should accept a list of template paths')
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/index.html')
self.assertContains(response, 'Hello from a custom index template *bar*')
def test_custom_admin_site_app_index_view_and_template(self):
response = self.client.get(reverse('admin2:app_list', args=('admin_views',)))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/app_index.html')
self.assertContains(response, 'Hello from a custom app_index template')
def test_custom_admin_site_password_change_template(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'Hello from a custom password change form template')
def test_custom_admin_site_password_change_with_extra_context(self):
response = self.client.get(reverse('admin2:password_change'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_form.html')
self.assertContains(response, 'eggs')
def test_custom_admin_site_password_change_done_template(self):
response = self.client.get(reverse('admin2:password_change_done'))
self.assertIsInstance(response, TemplateResponse)
self.assertTemplateUsed(response, 'custom_admin/password_change_done.html')
self.assertContains(response, 'Hello from a custom password change done template')
def test_custom_admin_site_view(self):
self.client.login(username='super', password='secret')
response = self.client.get(reverse('admin2:my_view'))
self.assertEqual(response.content, b"Django is a magical pony!")
def test_pwd_change_custom_template(self):
self.client.login(username='super', password='secret')
su = User.objects.get(username='super')
try:
response = self.client.get(
reverse('admin4:auth_user_password_change', args=(su.pk,))
)
except TypeError:
self.fail('ModelAdmin.change_user_password_template should accept a list of template paths')
self.assertEqual(response.status_code, 200)
def get_perm(Model, perm):
"""Return the permission object, for the Model"""
ct = ContentType.objects.get_for_model(Model)
return Permission.objects.get(content_type=ct, codename=perm)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewPermissionsTest(TestCase):
"""Tests for Admin Views Permissions."""
@classmethod
def setUpTestData(cls):
super(AdminViewPermissionsTest, cls).setUpTestData()
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
# Setup permissions, for our users who can add, change, and delete.
opts = Article._meta
# User who can add Articles
cls.u2.user_permissions.add(get_perm(Article, get_permission_codename('add', opts)))
# User who can change Articles
cls.u3.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
cls.u6.user_permissions.add(get_perm(Article, get_permission_codename('change', opts)))
# User who can delete Articles
cls.u4.user_permissions.add(get_perm(Article, get_permission_codename('delete', opts)))
cls.u4.user_permissions.add(get_perm(Section, get_permission_codename('delete', Section._meta)))
# login POST dicts
cls.index_url = reverse('admin:index')
cls.super_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super',
'password': 'secret',
}
cls.super_email_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'secret',
}
cls.super_email_bad_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'super@example.com',
'password': 'notsecret',
}
cls.adduser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'adduser',
'password': 'secret',
}
cls.changeuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'changeuser',
'password': 'secret',
}
cls.deleteuser_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'deleteuser',
'password': 'secret',
}
cls.nostaff_login = {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'nostaff',
'password': 'secret',
}
cls.joepublic_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'username': 'joepublic',
'password': 'secret',
}
cls.no_username_login = {
REDIRECT_FIELD_NAME: cls.index_url,
'password': 'secret',
}
def test_login(self):
"""
Make sure only staff members can log in.
Successful posts to the login page will redirect to the original url.
Unsuccessful attempts will continue to render the login page with
a 200 status code.
"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# Super User
response = self.client.get(self.index_url)
self.assertRedirects(response, login_url)
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Test if user enters email address
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# only correct passwords get a username hint
login = self.client.post(login_url, self.super_email_bad_login)
self.assertContains(login, ERROR_MESSAGE)
new_user = User(username='jondoe', password='secret', email='super@example.com')
new_user.save()
# check to ensure if there are multiple email addresses a user doesn't get a 500
login = self.client.post(login_url, self.super_email_login)
self.assertContains(login, ERROR_MESSAGE)
# Add User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.adduser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Change User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.changeuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Delete User
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.deleteuser_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
# Regular User should not be able to login.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Requests without username should not return 500 errors.
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
login = self.client.post(login_url, self.no_username_login)
self.assertEqual(login.status_code, 200)
form = login.context[0].get('form')
self.assertEqual(form.errors['username'][0], 'This field is required.')
def test_login_redirect_for_direct_get(self):
"""
Login redirect should be to the admin index page when going directly to
/admin/login/.
"""
response = self.client.get(reverse('admin:login'))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[REDIRECT_FIELD_NAME], reverse('admin:index'))
def test_login_has_permission(self):
# Regular User should not be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, 'permission denied')
# User with permissions should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), self.nostaff_login)
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
# Staff should be able to login.
response = self.client.get(reverse('has_permission_admin:index'))
self.assertEqual(response.status_code, 302)
login = self.client.post(reverse('has_permission_admin:login'), {
REDIRECT_FIELD_NAME: reverse('has_permission_admin:index'),
'username': 'deleteuser',
'password': 'secret',
})
self.assertRedirects(login, reverse('has_permission_admin:index'))
self.assertFalse(login.context)
self.client.get(reverse('has_permission_admin:logout'))
def test_login_successfully_redirects_to_original_URL(self):
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
query_string = 'the-answer=42'
redirect_url = '%s?%s' % (self.index_url, query_string)
new_next = {REDIRECT_FIELD_NAME: redirect_url}
post_data = self.super_login.copy()
post_data.pop(REDIRECT_FIELD_NAME)
login = self.client.post(
'%s?%s' % (reverse('admin:login'), urlencode(new_next)),
post_data)
self.assertRedirects(login, redirect_url)
def test_double_login_is_not_allowed(self):
"""Regression test for #19327"""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
response = self.client.get(self.index_url)
self.assertEqual(response.status_code, 302)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with non-admin user fails
login = self.client.post(login_url, self.joepublic_login)
self.assertEqual(login.status_code, 200)
self.assertContains(login, ERROR_MESSAGE)
# Establish a valid admin session
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
# Logging in with admin user while already logged in
login = self.client.post(login_url, self.super_login)
self.assertRedirects(login, self.index_url)
self.assertFalse(login.context)
self.client.get(reverse('admin:logout'))
def test_login_page_notice_for_non_staff_users(self):
"""
A logged-in non-staff user trying to access the admin index should be
presented with the login page and a hint indicating that the current
user doesn't have access to it.
"""
hint_template = 'You are authenticated as {}'
# Anonymous user should not be shown the hint
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertNotContains(response, hint_template.format(''), status_code=200)
# Non-staff user should be shown the hint
self.client.login(**self.nostaff_login)
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'login-form')
self.assertContains(response, hint_template.format(self.u6.username), status_code=200)
def test_add_view(self):
"""Test add view restricts access and actually adds items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
add_dict = {'title': 'Døm ikke',
'content': '<p>great article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
# Change User should not have access to add articles
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
response = self.client.get(reverse('admin:admin_views_article_add'))
self.assertEqual(response.status_code, 403)
# Try POST just to make sure
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.get(reverse('admin:logout'))
# Add user may login and POST to add view, then redirect to admin root
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
change_list_link = '› <a href="%s">Articles</a>' % reverse('admin:admin_views_article_changelist')
self.assertNotContains(addpage, change_list_link,
msg_prefix='User restricted to add permission is given link to change list view in breadcrumbs.')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 4)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a created object')
self.client.get(reverse('admin:logout'))
# Check that the addition was logged correctly
addition_log = LogEntry.objects.all()[0]
new_article = Article.objects.last()
article_ct = ContentType.objects.get_for_model(Article)
self.assertEqual(addition_log.user_id, self.u2.pk)
self.assertEqual(addition_log.content_type_id, article_ct.pk)
self.assertEqual(addition_log.object_id, str(new_article.pk))
self.assertEqual(addition_log.object_repr, "Døm ikke")
self.assertEqual(addition_log.action_flag, ADDITION)
self.assertEqual(addition_log.change_message, "Added.")
# Super can add too, but is redirected to the change list view
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
addpage = self.client.get(reverse('admin:admin_views_article_add'))
self.assertContains(addpage, change_list_link,
msg_prefix='Unrestricted user is not given link to change list view in breadcrumbs.')
post = self.client.post(reverse('admin:admin_views_article_add'), add_dict)
self.assertRedirects(post, reverse('admin:admin_views_article_changelist'))
self.assertEqual(Article.objects.count(), 5)
self.client.get(reverse('admin:logout'))
# 8509 - if a normal user is already logged in, it is possible
# to change user into the superuser without error
self.client.login(username='joepublic', password='secret')
# Check and make sure that if user expires, data still persists
self.client.get(self.index_url)
self.client.post(login_url, self.super_login)
# make sure the view removes test cookie
self.assertEqual(self.client.session.test_cookie_worked(), False)
def test_change_view(self):
"""Change view should restrict access and allow users to edit items."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
change_dict = {'title': 'Ikke fordømt',
'content': '<p>edited article</p>',
'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': self.s1.pk}
article_change_url = reverse('admin:admin_views_article_change', args=(self.a1.pk,))
article_changelist_url = reverse('admin:admin_views_article_changelist')
# add user should not be able to view the list of article or change any of them
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 403)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(article_change_url, change_dict)
self.assertEqual(post.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
response = self.client.get(article_changelist_url)
self.assertEqual(response.status_code, 200)
response = self.client.get(article_change_url)
self.assertEqual(response.status_code, 200)
post = self.client.post(article_change_url, change_dict)
self.assertRedirects(post, article_changelist_url)
self.assertEqual(Article.objects.get(pk=self.a1.pk).content, '<p>edited article</p>')
# one error in form should produce singular error message, multiple errors plural
change_dict['title'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(post, 'Please correct the error below.',
msg_prefix='Singular error message not found in response to post with one error')
change_dict['content'] = ''
post = self.client.post(article_change_url, change_dict)
self.assertContains(post, 'Please correct the errors below.',
msg_prefix='Plural error message not found in response to post with multiple errors')
self.client.get(reverse('admin:logout'))
# Test redirection when using row-level change permissions. Refs #11513.
r1 = RowLevelChangePermissionModel.objects.create(id=1, name="odd id")
r2 = RowLevelChangePermissionModel.objects.create(id=2, name="even id")
change_url_1 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r1.pk,))
change_url_2 = reverse('admin:admin_views_rowlevelchangepermissionmodel_change', args=(r2.pk,))
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
response = self.client.get(change_url_1)
self.assertEqual(response.status_code, 403)
response = self.client.post(change_url_1, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 403)
response = self.client.get(change_url_2)
self.assertEqual(response.status_code, 200)
response = self.client.post(change_url_2, {'name': 'changed'})
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertRedirects(response, self.index_url)
self.client.get(reverse('admin:logout'))
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
response = self.client.get(change_url_1, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_1, {'name': 'changed'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=1).name, 'odd id')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.get(change_url_2, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
response = self.client.post(change_url_2, {'name': 'changed again'}, follow=True)
self.assertEqual(RowLevelChangePermissionModel.objects.get(id=2).name, 'changed')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_delete_view(self):
"""Delete view should restrict access and actually delete items."""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_article_delete', args=(self.a1.pk,))
# add user should not be able to delete articles
self.client.login(**self.adduser_login)
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 403)
post = self.client.post(delete_url, delete_dict)
self.assertEqual(post.status_code, 403)
self.assertEqual(Article.objects.count(), 3)
self.client.logout()
# Delete user can delete
self.client.login(**self.deleteuser_login)
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 3</li>")
# test response contains link to related Article
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
response = self.client.get(delete_url)
self.assertContains(response, "admin_views/article/%s/" % self.a1.pk)
self.assertContains(response, "<h2>Summary</h2>")
self.assertContains(response, "<li>Articles: 1</li>")
self.assertEqual(response.status_code, 200)
post = self.client.post(delete_url, delete_dict)
self.assertRedirects(post, self.index_url)
self.assertEqual(Article.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a deleted object')
article_ct = ContentType.objects.get_for_model(Article)
logged = LogEntry.objects.get(content_type=article_ct, action_flag=DELETION)
self.assertEqual(logged.object_id, str(self.a1.pk))
def test_history_view(self):
"""History view should restrict access."""
login_url = '%s?next=%s' % (reverse('admin:login'), reverse('admin:index'))
# add user should not be able to view the list of article or change any of them
self.client.get(self.index_url)
self.client.post(login_url, self.adduser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 403)
self.client.get(reverse('admin:logout'))
# change user can view all items and edit them
self.client.get(self.index_url)
self.client.post(login_url, self.changeuser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=(self.a1.pk,)))
self.assertEqual(response.status_code, 200)
# Test redirection when using row-level change permissions. Refs #11513.
rl1 = RowLevelChangePermissionModel.objects.create(name="odd id")
rl2 = RowLevelChangePermissionModel.objects.create(name="even id")
for login_dict in [self.super_login, self.changeuser_login, self.adduser_login, self.deleteuser_login]:
self.client.post(login_url, login_dict)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 403)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.client.get(reverse('admin:logout'))
for login_dict in [self.joepublic_login, self.no_username_login]:
self.client.post(login_url, login_dict)
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl1.pk,))
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
url = reverse('admin:admin_views_rowlevelchangepermissionmodel_history', args=(rl2.pk,))
response = self.client.get(url, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'login-form')
self.client.get(reverse('admin:logout'))
def test_history_view_bad_url(self):
self.client.post(reverse('admin:login'), self.changeuser_login)
response = self.client.get(reverse('admin:admin_views_article_history', args=('foo',)))
self.assertEqual(response.status_code, 404)
def test_conditionally_show_add_section_link(self):
"""
The foreign key widget should only show the "add related" button if the
user has permission to add that related item.
"""
self.client.login(**self.adduser_login)
# The user can't add sections yet, so they shouldn't see the "add section" link.
url = reverse('admin:admin_views_article_add')
add_link_text = 'add_id_section'
response = self.client.get(url)
self.assertNotContains(response, add_link_text)
# Allow the user to add sections too. Now they can see the "add section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('add', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertContains(response, add_link_text)
def test_conditionally_show_change_section_link(self):
"""
The foreign key widget should only show the "change related" button if
the user has permission to change that related item.
"""
def get_change_related(response):
return response.context['adminform'].form.fields['section'].widget.can_change_related
self.client.login(**self.adduser_login)
# The user can't change sections yet, so they shouldn't see the "change section" link.
url = reverse('admin:admin_views_article_add')
change_link_text = 'change_id_section'
response = self.client.get(url)
self.assertFalse(get_change_related(response))
self.assertNotContains(response, change_link_text)
# Allow the user to change sections too. Now they can see the "change section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('change', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_change_related(response))
self.assertContains(response, change_link_text)
def test_conditionally_show_delete_section_link(self):
"""
The foreign key widget should only show the "delete related" button if
the user has permission to delete that related item.
"""
def get_delete_related(response):
return response.context['adminform'].form.fields['sub_section'].widget.can_delete_related
self.client.login(**self.adduser_login)
# The user can't delete sections yet, so they shouldn't see the "delete section" link.
url = reverse('admin:admin_views_article_add')
delete_link_text = 'delete_id_sub_section'
response = self.client.get(url)
self.assertFalse(get_delete_related(response))
self.assertNotContains(response, delete_link_text)
# Allow the user to delete sections too. Now they can see the "delete section" link.
user = User.objects.get(username='adduser')
perm = get_perm(Section, get_permission_codename('delete', Section._meta))
user.user_permissions.add(perm)
response = self.client.get(url)
self.assertTrue(get_delete_related(response))
self.assertContains(response, delete_link_text)
def test_disabled_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_active = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_disabled_staff_permissions_when_logged_in(self):
self.client.login(username='super', password='secret')
superuser = User.objects.get(username='super')
superuser.is_staff = False
superuser.save()
response = self.client.get(self.index_url, follow=True)
self.assertContains(response, 'id="login-form"')
self.assertNotContains(response, 'Log out')
response = self.client.get(reverse('secure_view'), follow=True)
self.assertContains(response, 'id="login-form"')
def test_app_index_fail_early(self):
"""
If a user has no module perms, avoid iterating over all the modeladmins
in the registry.
"""
opts = Article._meta
change_user = User.objects.get(username='changeuser')
permission = get_perm(Article, get_permission_codename('change', opts))
self.client.login(**self.changeuser_login)
# the user has no module permissions, because this module doesn't exist
change_user.user_permissions.remove(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 403)
# the user now has module permissions
change_user.user_permissions.add(permission)
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
def test_shortcut_view_only_available_to_staff(self):
"""
Only admin users should be able to use the admin shortcut view.
"""
model_ctype = ContentType.objects.get_for_model(ModelWithStringPrimaryKey)
obj = ModelWithStringPrimaryKey.objects.create(string_pk='foo')
shortcut_url = reverse('admin:view_on_site', args=(model_ctype.pk, obj.pk))
# Not logged in: we should see the login page.
response = self.client.get(shortcut_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
# Logged in? Redirect.
self.client.login(username='super', password='secret')
response = self.client.get(shortcut_url, follow=False)
# Can't use self.assertRedirects() because User.get_absolute_url() is silly.
self.assertEqual(response.status_code, 302)
# Domain may depend on contrib.sites tests also run
six.assertRegex(self, response.url, 'http://(testserver|example.com)/dummy/foo/')
def test_has_module_permission(self):
"""
Ensure that has_module_permission() returns True for all users who
have any permission for that module (add, change, or delete), so that
the module is displayed on the admin index page.
"""
self.client.login(**self.super_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.login(**self.adduser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.login(**self.changeuser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
self.client.logout()
self.client.login(**self.deleteuser_login)
response = self.client.get(self.index_url)
self.assertContains(response, 'admin_views')
self.assertContains(response, 'Articles')
def test_overriding_has_module_permission(self):
"""
Ensure that overriding has_module_permission() has the desired effect.
In this case, it always returns False, so the module should not be
displayed on the admin index page for any users.
"""
index_url = reverse('admin7:index')
self.client.login(**self.super_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.login(**self.adduser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.login(**self.changeuser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
self.client.logout()
self.client.login(**self.deleteuser_login)
response = self.client.get(index_url)
self.assertNotContains(response, 'admin_views')
self.assertNotContains(response, 'Articles')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewsNoUrlTest(TestCase):
"""Regression test for #17333"""
@classmethod
def setUpTestData(cls):
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
opts = Report._meta
# User who can change Reports
cls.u3.user_permissions.add(get_perm(Report, get_permission_codename('change', opts)))
# login POST dict
cls.changeuser_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'changeuser',
'password': 'secret',
}
def test_no_standard_modeladmin_urls(self):
"""Admin index views don't break when user's ModelAdmin removes standard urls"""
self.client.get(reverse('admin:index'))
r = self.client.post(reverse('admin:login'), self.changeuser_login)
r = self.client.get(reverse('admin:index'))
# we shouldn't get a 500 error caused by a NoReverseMatch
self.assertEqual(r.status_code, 200)
self.client.get(reverse('admin:logout'))
@skipUnlessDBFeature('can_defer_constraint_checks')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewDeletedObjectsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.v1 = Villain.objects.create(name='Adam')
cls.v2 = Villain.objects.create(name='Sue')
cls.sv1 = SuperVillain.objects.create(name='Bob')
cls.pl1 = Plot.objects.create(name='World Domination', team_leader=cls.v1, contact=cls.v2)
cls.pl2 = Plot.objects.create(name='World Peace', team_leader=cls.v2, contact=cls.v2)
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
cls.pd1 = PlotDetails.objects.create(details='almost finished', plot=cls.pl1)
cls.sh1 = SecretHideout.objects.create(location='underground bunker', villain=cls.v1)
cls.sh2 = SecretHideout.objects.create(location='floating castle', villain=cls.sv1)
cls.ssh1 = SuperSecretHideout.objects.create(location='super floating castle!', supervillain=cls.sv1)
cls.cy1 = CyclicOne.objects.create(name='I am recursive', two_id=1)
cls.cy2 = CyclicTwo.objects.create(name='I am recursive too', one_id=1)
def setUp(self):
self.client.login(username='super', password='secret')
def test_nesting(self):
"""
Objects should be nested to display the relationships that
cause them to be scheduled for deletion.
"""
pattern = re.compile(
force_bytes(
r'<li>Plot: <a href="%s">World Domination</a>\s*<ul>\s*'
r'<li>Plot details: <a href="%s">almost finished</a>' % (
reverse('admin:admin_views_plot_change', args=(self.pl1.pk,)),
reverse('admin:admin_views_plotdetails_change', args=(self.pd1.pk,)),
)
)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
six.assertRegex(self, response.content, pattern)
def test_cyclic(self):
"""
Cyclic relationships should still cause each object to only be
listed once.
"""
one = '<li>Cyclic one: <a href="%s">I am recursive</a>' % (
reverse('admin:admin_views_cyclicone_change', args=(self.cy1.pk,)),
)
two = '<li>Cyclic two: <a href="%s">I am recursive too</a>' % (
reverse('admin:admin_views_cyclictwo_change', args=(self.cy2.pk,)),
)
response = self.client.get(reverse('admin:admin_views_cyclicone_delete', args=(self.cy1.pk,)))
self.assertContains(response, one, 1)
self.assertContains(response, two, 1)
def test_perms_needed(self):
self.client.logout()
delete_user = User.objects.get(username='deleteuser')
delete_user.user_permissions.add(get_perm(Plot,
get_permission_codename('delete', Plot._meta)))
self.assertTrue(self.client.login(username='deleteuser',
password='secret'))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(self.pl1.pk,)))
self.assertContains(response, "your account doesn't have permission to delete the following types of objects")
self.assertContains(response, "<li>plot details</li>")
def test_protected(self):
q = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q, answer="Because.")
a2 = Answer.objects.create(question=q, answer="Yes.")
response = self.client.get(reverse('admin:admin_views_question_delete', args=(q.pk,)))
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,))
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,))
)
def test_not_registered(self):
should_contain = """<li>Secret hideout: underground bunker"""
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain, 1)
def test_multiple_fkeys_to_same_model(self):
"""
If a deleted object has two relationships from another model,
both of those should be followed in looking for related
objects to delete.
"""
should_contain = '<li>Plot: <a href="%s">World Domination</a>' % reverse(
'admin:admin_views_plot_change', args=(self.pl1.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v1.pk,)))
self.assertContains(response, should_contain)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain)
def test_multiple_fkeys_to_same_instance(self):
"""
If a deleted object has two relationships pointing to it from
another object, the other object should still only be listed
once.
"""
should_contain = '<li>Plot: <a href="%s">World Peace</a></li>' % reverse(
'admin:admin_views_plot_change', args=(self.pl2.pk,)
)
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.v2.pk,)))
self.assertContains(response, should_contain, 1)
def test_inheritance(self):
"""
In the case of an inherited model, if either the child or
parent-model instance is deleted, both instances are listed
for deletion, as well as any relationships they have.
"""
should_contain = [
'<li>Villain: <a href="%s">Bob</a>' % reverse('admin:admin_views_villain_change', args=(self.sv1.pk,)),
'<li>Super villain: <a href="%s">Bob</a>' % reverse(
'admin:admin_views_supervillain_change', args=(self.sv1.pk,)
),
'<li>Secret hideout: floating castle',
'<li>Super secret hideout: super floating castle!',
]
response = self.client.get(reverse('admin:admin_views_villain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
response = self.client.get(reverse('admin:admin_views_supervillain_delete', args=(self.sv1.pk,)))
for should in should_contain:
self.assertContains(response, should, 1)
def test_generic_relations(self):
"""
If a deleted object has GenericForeignKeys pointing to it,
those objects should be listed for deletion.
"""
plot = self.pl3
tag = FunkyTag.objects.create(content_object=plot, name='hott')
should_contain = '<li>Funky tag: <a href="%s">hott' % reverse(
'admin:admin_views_funkytag_change', args=(tag.id,))
response = self.client.get(reverse('admin:admin_views_plot_delete', args=(plot.pk,)))
self.assertContains(response, should_contain)
def test_generic_relations_with_related_query_name(self):
"""
If a deleted object has GenericForeignKey with
GenericRelation(related_query_name='...') pointing to it, those objects
should be listed for deletion.
"""
bookmark = Bookmark.objects.create(name='djangoproject')
tag = FunkyTag.objects.create(content_object=bookmark, name='django')
tag_url = reverse('admin:admin_views_funkytag_change', args=(tag.id,))
should_contain = '<li>Funky tag: <a href="%s">django' % tag_url
response = self.client.get(reverse('admin:admin_views_bookmark_delete', args=(bookmark.pk,)))
self.assertContains(response, should_contain)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestGenericRelations(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.v1 = Villain.objects.create(name='Adam')
cls.pl3 = Plot.objects.create(name='Corn Conspiracy', team_leader=cls.v1, contact=cls.v1)
def setUp(self):
self.client.login(username='super', password='secret')
def test_generic_content_object_in_list_display(self):
FunkyTag.objects.create(content_object=self.pl3, name='hott')
response = self.client.get(reverse('admin:admin_views_funkytag_changelist'))
self.assertContains(response, "%s</td>" % self.pl3)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewStringPrimaryKeyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.pk = (
"abcdefghijklmnopqrstuvwxyz ABCDEFGHIJKLMNOPQRSTUVWXYZ 1234567890 "
"""-_.!~*'() ;/?:@&=+$, <>#%" {}|\^[]`"""
)
cls.m1 = ModelWithStringPrimaryKey.objects.create(string_pk=cls.pk)
content_type_pk = ContentType.objects.get_for_model(ModelWithStringPrimaryKey).pk
LogEntry.objects.log_action(100, content_type_pk, cls.pk, cls.pk, 2, change_message='Changed something')
def setUp(self):
self.client.login(username='super', password='secret')
def test_get_history_view(self):
"""
Retrieving the history for an object using urlencoded form of primary
key should work.
Refs #12349, #18550.
"""
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_history', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertContains(response, 'Changed something')
self.assertEqual(response.status_code, 200)
def test_get_change_view(self):
"Retrieving the object using urlencoded form of primary key should work"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_change', args=(self.pk,)))
self.assertContains(response, escape(self.pk))
self.assertEqual(response.status_code, 200)
def test_changelist_to_changeform_link(self):
"Link to the changeform of the object in changelist should use reverse() and be quoted -- #18072"
response = self.client.get(reverse('admin:admin_views_modelwithstringprimarykey_changelist'))
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
pk_final_url = escape(iri_to_uri(quote(self.pk)))
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', pk_final_url)
should_contain = '<th class="field-__str__"><a href="%s">%s</a></th>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_link(self):
"The link from the recent actions list referring to the changeform of the object should be quoted"
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
def test_recentactions_without_content_type(self):
"If a LogEntry is missing content_type it will not display it in span tag under the hyperlink."
response = self.client.get(reverse('admin:index'))
link = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
should_contain = """<a href="%s">%s</a>""" % (escape(link), escape(self.pk))
self.assertContains(response, should_contain)
should_contain = "Model with string primary key" # capitalized in Recent Actions
self.assertContains(response, should_contain)
logentry = LogEntry.objects.get(content_type__model__iexact='modelwithstringprimarykey')
# http://code.djangoproject.com/ticket/10275
# if the log entry doesn't have a content type it should still be
# possible to view the Recent Actions part
logentry.content_type = None
logentry.save()
counted_presence_before = response.content.count(force_bytes(should_contain))
response = self.client.get(reverse('admin:index'))
counted_presence_after = response.content.count(force_bytes(should_contain))
self.assertEqual(counted_presence_before - 1,
counted_presence_after)
def test_logentry_get_admin_url(self):
"""
LogEntry.get_admin_url returns a URL to edit the entry's object or
None for non-existent (possibly deleted) models.
"""
log_entry_model = "modelwithstringprimarykey" # capitalized in Recent Actions
logentry = LogEntry.objects.get(content_type__model__iexact=log_entry_model)
desired_admin_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(self.pk),))
self.assertEqual(logentry.get_admin_url(), desired_admin_url)
self.assertIn(iri_to_uri(quote(self.pk)), logentry.get_admin_url())
logentry.content_type.model = "non-existent"
self.assertEqual(logentry.get_admin_url(), None)
def test_logentry_get_edited_object(self):
"LogEntry.get_edited_object returns the edited object of a given LogEntry object"
logentry = LogEntry.objects.get(content_type__model__iexact="modelwithstringprimarykey")
edited_obj = logentry.get_edited_object()
self.assertEqual(logentry.object_id, str(edited_obj.pk))
def test_logentry_save(self):
""""
LogEntry.action_time is a timestamp of the date when the entry was
created. It shouldn't be updated on a subsequent save().
"""
logentry = LogEntry.objects.get(content_type__model__iexact="modelwithstringprimarykey")
action_time = logentry.action_time
logentry.save()
self.assertEqual(logentry.action_time, action_time)
def test_deleteconfirmation_link(self):
"The link from the delete confirmation page referring back to the changeform of the object should be quoted"
url = reverse('admin:admin_views_modelwithstringprimarykey_delete', args=(quote(self.pk),))
response = self.client.get(url)
# this URL now comes through reverse(), thus url quoting and iri_to_uri encoding
change_url = reverse(
'admin:admin_views_modelwithstringprimarykey_change', args=('__fk__',)
).replace('__fk__', escape(iri_to_uri(quote(self.pk))))
should_contain = '<a href="%s">%s</a>' % (change_url, escape(self.pk))
self.assertContains(response, should_contain)
def test_url_conflicts_with_add(self):
"A model with a primary key that ends with add or is `add` should be visible"
add_model = ModelWithStringPrimaryKey.objects.create(pk="i have something to add")
add_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
add_model2 = ModelWithStringPrimaryKey.objects.create(pk="add")
add_url = reverse('admin:admin_views_modelwithstringprimarykey_add')
change_url = reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(add_model2.pk),))
self.assertNotEqual(add_url, change_url)
def test_url_conflicts_with_delete(self):
"A model with a primary key that ends with delete should be visible"
delete_model = ModelWithStringPrimaryKey(pk="delete")
delete_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(delete_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_url_conflicts_with_history(self):
"A model with a primary key that ends with history should be visible"
history_model = ModelWithStringPrimaryKey(pk="history")
history_model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(history_model.pk),))
)
should_contain = """<h1>Change model with string primary key</h1>"""
self.assertContains(response, should_contain)
def test_shortcut_view_with_escaping(self):
"'View on site should' work properly with char fields"
model = ModelWithStringPrimaryKey(pk='abc_123')
model.save()
response = self.client.get(
reverse('admin:admin_views_modelwithstringprimarykey_change', args=(quote(model.pk),))
)
should_contain = '/%s/" class="viewsitelink">' % model.pk
self.assertContains(response, should_contain)
def test_change_view_history_link(self):
"""Object history button link should work and contain the pk value quoted."""
url = reverse('admin:%s_modelwithstringprimarykey_change' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
expected_link = reverse('admin:%s_modelwithstringprimarykey_history' %
ModelWithStringPrimaryKey._meta.app_label,
args=(quote(self.pk),))
self.assertContains(response, '<a href="%s" class="historylink"' % escape(expected_link))
def test_redirect_on_add_view_continue_button(self):
"""As soon as an object is added using "Save and continue editing"
button, the user should be redirected to the object's change_view.
In case primary key is a string containing some special characters
like slash or underscore, these characters must be escaped (see #22266)
"""
response = self.client.post(
reverse('admin:admin_views_modelwithstringprimarykey_add'),
{
'string_pk': '123/history',
"_continue": "1", # Save and continue editing
}
)
self.assertEqual(response.status_code, 302) # temporary redirect
self.assertIn('/123_2Fhistory/', response['location']) # PK is quoted
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class SecureViewTests(TestCase):
"""
Test behavior of a view protected by the staff_member_required decorator.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def test_secure_view_shows_login_if_not_logged_in(self):
"""
Ensure that we see the admin login form.
"""
secure_url = reverse('secure_view')
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?next=%s' % (reverse('admin:login'), secure_url))
response = self.client.get(secure_url, follow=True)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.context[REDIRECT_FIELD_NAME], secure_url)
def test_staff_member_required_decorator_works_with_argument(self):
"""
Ensure that staff_member_required decorator works with an argument
(redirect_field_name).
"""
secure_url = '/test_admin/admin/secure-view2/'
response = self.client.get(secure_url)
self.assertRedirects(response, '%s?myfield=%s' % (reverse('admin:login'), secure_url))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewUnicodeTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.b1 = Book.objects.create(name='Lærdommer')
cls.p1 = Promo.objects.create(name='<Promo for Lærdommer>', book=cls.b1)
cls.chap1 = Chapter.objects.create(
title='Norske bostaver æøå skaper problemer', content='<p>Svært frustrerende med UnicodeDecodeErro</p>',
book=cls.b1
)
cls.chap2 = Chapter.objects.create(
title='Kjærlighet', content='<p>La kjærligheten til de lidende seire.</p>', book=cls.b1)
cls.chap3 = Chapter.objects.create(title='Kjærlighet', content='<p>Noe innhold</p>', book=cls.b1)
cls.chap4 = ChapterXtra1.objects.create(chap=cls.chap1, xtra='<Xtra(1) Norske bostaver æøå skaper problemer>')
cls.chap5 = ChapterXtra1.objects.create(chap=cls.chap2, xtra='<Xtra(1) Kjærlighet>')
cls.chap6 = ChapterXtra1.objects.create(chap=cls.chap3, xtra='<Xtra(1) Kjærlighet>')
cls.chap7 = ChapterXtra2.objects.create(chap=cls.chap1, xtra='<Xtra(2) Norske bostaver æøå skaper problemer>')
cls.chap8 = ChapterXtra2.objects.create(chap=cls.chap2, xtra='<Xtra(2) Kjærlighet>')
cls.chap9 = ChapterXtra2.objects.create(chap=cls.chap3, xtra='<Xtra(2) Kjærlighet>')
def setUp(self):
self.client.login(username='super', password='secret')
def test_unicode_edit(self):
"""
A test to ensure that POST on edit_view handles non-ASCII characters.
"""
post_data = {
"name": "Test lærdommer",
# inline data
"chapter_set-TOTAL_FORMS": "6",
"chapter_set-INITIAL_FORMS": "3",
"chapter_set-MAX_NUM_FORMS": "0",
"chapter_set-0-id": self.chap1.pk,
"chapter_set-0-title": "Norske bostaver æøå skaper problemer",
"chapter_set-0-content": "<p>Svært frustrerende med UnicodeDecodeError</p>",
"chapter_set-1-id": self.chap2.id,
"chapter_set-1-title": "Kjærlighet.",
"chapter_set-1-content": "<p>La kjærligheten til de lidende seire.</p>",
"chapter_set-2-id": self.chap3.id,
"chapter_set-2-title": "Need a title.",
"chapter_set-2-content": "<p>Newest content</p>",
"chapter_set-3-id": "",
"chapter_set-3-title": "",
"chapter_set-3-content": "",
"chapter_set-4-id": "",
"chapter_set-4-title": "",
"chapter_set-4-content": "",
"chapter_set-5-id": "",
"chapter_set-5-title": "",
"chapter_set-5-content": "",
}
response = self.client.post(reverse('admin:admin_views_book_change', args=(self.b1.pk,)), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
def test_unicode_delete(self):
"""
Ensure that the delete_view handles non-ASCII characters
"""
delete_dict = {'post': 'yes'}
delete_url = reverse('admin:admin_views_book_delete', args=(self.b1.pk,))
response = self.client.get(delete_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(delete_url, delete_dict)
self.assertRedirects(response, reverse('admin:admin_views_book_changelist'))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewListEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.login(username='super', password='secret')
def test_inheritance(self):
Podcast.objects.create(name="This Week in Django",
release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_inheritance_2(self):
Vodcast.objects.create(name="This Week in Django", released=True)
response = self.client.get(reverse('admin:admin_views_vodcast_changelist'))
self.assertEqual(response.status_code, 200)
def test_custom_pk(self):
Language.objects.create(iso='en', name='English', english_name='English')
response = self.client.get(reverse('admin:admin_views_language_changelist'))
self.assertEqual(response.status_code, 200)
def test_changelist_input_html(self):
response = self.client.get(reverse('admin:admin_views_person_changelist'))
# 2 inputs per object(the field and the hidden id field) = 6
# 4 management hidden fields = 4
# 4 action inputs (3 regular checkboxes, 1 checkbox to select all)
# main form submit button = 1
# search field and search submit button = 2
# CSRF field = 1
# field to track 'select all' across paginated views = 1
# 6 + 4 + 4 + 1 + 2 + 1 + 1 = 19 inputs
self.assertContains(response, "<input", count=19)
# 1 select per object = 3 selects
self.assertContains(response, "<select", count=4)
def test_post_messages(self):
# Ticket 12707: Saving inline editable should not show admin
# action warnings
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'),
data, follow=True)
self.assertEqual(len(response.context['messages']), 1)
def test_post_submission(self):
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
# test a filtered page
data = {
"form-TOTAL_FORMS": "2",
"form-INITIAL_FORMS": "2",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"form-0-alive": "checked",
"form-1-id": "%s" % self.per3.pk,
"form-1-gender": "1",
"form-1-alive": "checked",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?gender__exact=1', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
# test a searched page
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per1.pk,
"form-0-gender": "1",
"_save": "Save",
}
self.client.post(reverse('admin:admin_views_person_changelist') + '?q=john', data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
def test_non_field_errors(self):
''' Ensure that non field errors are displayed for each of the
forms in the changelist's formset. Refs #13126.
'''
fd1 = FoodDelivery.objects.create(reference='123', driver='bill', restaurant='thai')
fd2 = FoodDelivery.objects.create(reference='456', driver='bill', restaurant='india')
fd3 = FoodDelivery.objects.create(reference='789', driver='bill', restaurant='pizza')
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "pizza",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
1,
html=True
)
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-id": str(fd1.id),
"form-0-reference": "123",
"form-0-driver": "bill",
"form-0-restaurant": "thai",
# Same data as above: Forbidden because of unique_together!
"form-1-id": str(fd2.id),
"form-1-reference": "456",
"form-1-driver": "bill",
"form-1-restaurant": "thai",
# Same data also.
"form-2-id": str(fd3.id),
"form-2-reference": "789",
"form-2-driver": "bill",
"form-2-restaurant": "thai",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_fooddelivery_changelist'), data)
self.assertContains(
response,
'<tr><td colspan="4"><ul class="errorlist nonfield"><li>Food delivery '
'with this Driver and Restaurant already exists.</li></ul></td></tr>',
2,
html=True
)
def test_non_form_errors(self):
# test if non-form errors are handled; ticket #12716
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertContains(response, "Grace is not a Zombie")
def test_non_form_errors_is_errorlist(self):
# test if non-form errors are correctly handled; ticket #12878
data = {
"form-TOTAL_FORMS": "1",
"form-INITIAL_FORMS": "1",
"form-MAX_NUM_FORMS": "0",
"form-0-id": "%s" % self.per2.pk,
"form-0-alive": "1",
"form-0-gender": "2",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_person_changelist'), data)
non_form_errors = response.context['cl'].formset.non_form_errors()
self.assertIsInstance(non_form_errors, ErrorList)
self.assertEqual(str(non_form_errors), str(ErrorList(["Grace is not a Zombie"])))
def test_list_editable_ordering(self):
collector = Collector.objects.create(id=1, name="Frederick Clegg")
Category.objects.create(id=1, order=1, collector=collector)
Category.objects.create(id=2, order=2, collector=collector)
Category.objects.create(id=3, order=0, collector=collector)
Category.objects.create(id=4, order=0, collector=collector)
# NB: The order values must be changed so that the items are reordered.
data = {
"form-TOTAL_FORMS": "4",
"form-INITIAL_FORMS": "4",
"form-MAX_NUM_FORMS": "0",
"form-0-order": "14",
"form-0-id": "1",
"form-0-collector": "1",
"form-1-order": "13",
"form-1-id": "2",
"form-1-collector": "1",
"form-2-order": "1",
"form-2-id": "3",
"form-2-collector": "1",
"form-3-order": "0",
"form-3-id": "4",
"form-3-collector": "1",
# Ensure that the form processing understands this as a list_editable "Save"
# and not an action "Go".
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_category_changelist'), data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
def test_list_editable_pagination(self):
"""
Ensure that pagination works for list_editable items.
Refs #16819.
"""
UnorderedObject.objects.create(id=1, name='Unordered object #1')
UnorderedObject.objects.create(id=2, name='Unordered object #2')
UnorderedObject.objects.create(id=3, name='Unordered object #3')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist'))
self.assertContains(response, 'Unordered object #3')
self.assertContains(response, 'Unordered object #2')
self.assertNotContains(response, 'Unordered object #1')
response = self.client.get(reverse('admin:admin_views_unorderedobject_changelist') + '?p=1')
self.assertNotContains(response, 'Unordered object #3')
self.assertNotContains(response, 'Unordered object #2')
self.assertContains(response, 'Unordered object #1')
def test_list_editable_action_submit(self):
# List editable changes should not be executed if the action "Go" button is
# used to submit the form.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "1",
"form-1-gender": "2",
"form-1-id": "2",
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "3",
"index": "0",
"_selected_action": ['3'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, True)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 1)
def test_list_editable_action_choices(self):
# List editable changes should be executed if the "Save" button is
# used to submit the form - any action choices should be ignored.
data = {
"form-TOTAL_FORMS": "3",
"form-INITIAL_FORMS": "3",
"form-MAX_NUM_FORMS": "0",
"form-0-gender": "1",
"form-0-id": "%s" % self.per1.pk,
"form-1-gender": "2",
"form-1-id": "%s" % self.per2.pk,
"form-2-alive": "checked",
"form-2-gender": "1",
"form-2-id": "%s" % self.per3.pk,
"_save": "Save",
"_selected_action": ['1'],
"action": ['', 'delete_selected'],
}
self.client.post(reverse('admin:admin_views_person_changelist'), data)
self.assertEqual(Person.objects.get(name="John Mauchly").alive, False)
self.assertEqual(Person.objects.get(name="Grace Hopper").gender, 2)
def test_list_editable_popup(self):
"""
Fields should not be list-editable in popups.
"""
response = self.client.get(reverse('admin:admin_views_person_changelist'))
self.assertNotEqual(response.context['cl'].list_editable, ())
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context['cl'].list_editable, ())
def test_pk_hidden_fields(self):
""" Ensure that hidden pk fields aren't displayed in the table body and
that their corresponding human-readable value is displayed instead.
Note that the hidden pk fields are in fact be displayed but
separately (not in the table), and only once.
Refs #12475.
"""
story1 = Story.objects.create(title='The adventures of Guido', content='Once upon a time in Djangoland...')
story2 = Story.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
response = self.client.get(reverse('admin:admin_views_story_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<td class="field-id">%d</td>' % story1.id, 1)
self.assertContains(response, '<td class="field-id">%d</td>' % story2.id, 1)
def test_pk_hidden_fields_with_list_display_links(self):
""" Similarly as test_pk_hidden_fields, but when the hidden pk fields are
referenced in list_display_links.
Refs #12475.
"""
story1 = OtherStory.objects.create(
title='The adventures of Guido',
content='Once upon a time in Djangoland...',
)
story2 = OtherStory.objects.create(
title='Crouching Tiger, Hidden Python',
content='The Python was sneaking into...',
)
link1 = reverse('admin:admin_views_otherstory_change', args=(story1.pk,))
link2 = reverse('admin:admin_views_otherstory_change', args=(story2.pk,))
response = self.client.get(reverse('admin:admin_views_otherstory_changelist'))
# Only one hidden field, in a separate place than the table.
self.assertContains(response, 'id="id_form-0-id"', 1)
self.assertContains(response, 'id="id_form-1-id"', 1)
self.assertContains(
response,
'<div class="hiddenfields">\n'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" />\n</div>'
% (story2.id, story1.id),
html=True
)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link1, story1.id), 1)
self.assertContains(response, '<th class="field-id"><a href="%s">%d</a></th>' % (link2, story2.id), 1)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminSearchTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
cls.t1 = Recommender.objects.create()
cls.t2 = Recommendation.objects.create(recommender=cls.t1)
cls.t3 = Recommender.objects.create()
cls.t4 = Recommendation.objects.create(recommender=cls.t3)
cls.tt1 = TitleTranslation.objects.create(title=cls.t1, text='Bar')
cls.tt2 = TitleTranslation.objects.create(title=cls.t2, text='Foo')
cls.tt3 = TitleTranslation.objects.create(title=cls.t3, text='Few')
cls.tt4 = TitleTranslation.objects.create(title=cls.t4, text='Bas')
def setUp(self):
self.client.login(username='super', password='secret')
def test_search_on_sibling_models(self):
"Check that a search that mentions sibling models"
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned 1 object
self.assertContains(response, "\n1 recommendation\n")
def test_with_fk_to_field(self):
"""
Ensure that the to_field GET parameter is preserved when a search
is performed. Refs #10918.
"""
response = self.client.get(reverse('admin:auth_user_changelist') + '?q=joe&%s=id' % TO_FIELD_VAR)
self.assertContains(response, "\n1 user\n")
self.assertContains(response, '<input type="hidden" name="%s" value="id"/>' % TO_FIELD_VAR, html=True)
def test_exact_matches(self):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
# confirm the search returned one object
self.assertContains(response, "\n1 recommendation\n")
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=ba')
# confirm the search returned zero objects
self.assertContains(response, "\n0 recommendations\n")
def test_beginning_matches(self):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
# confirm the search returned one object
self.assertContains(response, "\n1 person\n")
self.assertContains(response, "Guido")
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=uido')
# confirm the search returned zero objects
self.assertContains(response, "\n0 persons\n")
self.assertNotContains(response, "Guido")
def test_pluggable_search(self):
PluggableSearchPerson.objects.create(name="Bob", age=10)
PluggableSearchPerson.objects.create(name="Amy", age=20)
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=Bob')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Bob")
response = self.client.get(reverse('admin:admin_views_pluggablesearchperson_changelist') + '?q=20')
# confirm the search returned one object
self.assertContains(response, "\n1 pluggable search person\n")
self.assertContains(response, "Amy")
def test_reset_link(self):
"""
Test presence of reset link in search bar ("1 result (_x total_)").
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
# + 1 for total count
with self.assertNumQueries(5):
response = self.client.get(reverse('admin:admin_views_person_changelist') + '?q=Gui')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">3 total</a>)</span>""",
html=True)
def test_no_total_count(self):
"""
#8408 -- "Show all" should be displayed instead of the total count if
ModelAdmin.show_full_result_count is False.
"""
# 1 query for session + 1 for fetching user
# + 1 for filtered result + 1 for filtered count
with self.assertNumQueries(4):
response = self.client.get(reverse('admin:admin_views_recommendation_changelist') + '?q=bar')
self.assertContains(response,
"""<span class="small quiet">1 result (<a href="?">Show all</a>)</span>""",
html=True)
self.assertTrue(response.context['cl'].show_admin_actions)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminInheritedInlinesTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_inline(self):
"Ensure that inline models which inherit from a common parent are correctly handled by admin."
foo_user = "foo username"
bar_user = "bar username"
name_re = re.compile(b'name="(.*?)"')
# test the add case
response = self.client.get(reverse('admin:admin_views_persona_add'))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
# test the add case
post_data = {
"name": "Test Name",
# inline data
"accounts-TOTAL_FORMS": "1",
"accounts-INITIAL_FORMS": "0",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": foo_user,
"accounts-2-TOTAL_FORMS": "1",
"accounts-2-INITIAL_FORMS": "0",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": bar_user,
}
response = self.client.post(reverse('admin:admin_views_persona_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
persona_id = Persona.objects.all()[0].id
foo_id = FooAccount.objects.all()[0].id
bar_id = BarAccount.objects.all()[0].id
# test the edit case
response = self.client.get(reverse('admin:admin_views_persona_change', args=(persona_id,)))
names = name_re.findall(response.content)
# make sure we have no duplicate HTML names
self.assertEqual(len(names), len(set(names)))
post_data = {
"name": "Test Name",
"accounts-TOTAL_FORMS": "2",
"accounts-INITIAL_FORMS": "1",
"accounts-MAX_NUM_FORMS": "0",
"accounts-0-username": "%s-1" % foo_user,
"accounts-0-account_ptr": str(foo_id),
"accounts-0-persona": str(persona_id),
"accounts-2-TOTAL_FORMS": "2",
"accounts-2-INITIAL_FORMS": "1",
"accounts-2-MAX_NUM_FORMS": "0",
"accounts-2-0-username": "%s-1" % bar_user,
"accounts-2-0-account_ptr": str(bar_id),
"accounts-2-0-persona": str(persona_id),
}
response = self.client.post(reverse('admin:admin_views_persona_change', args=(persona_id,)), post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Persona.objects.count(), 1)
self.assertEqual(FooAccount.objects.count(), 1)
self.assertEqual(BarAccount.objects.count(), 1)
self.assertEqual(FooAccount.objects.all()[0].username, "%s-1" % foo_user)
self.assertEqual(BarAccount.objects.all()[0].username, "%s-1" % bar_user)
self.assertEqual(Persona.objects.all()[0].accounts.count(), 2)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminActionsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = ExternalSubscriber.objects.create(name='John Doe', email='john@example.org')
cls.s2 = Subscriber.objects.create(name='Max Mustermann', email='max@example.org')
def setUp(self):
self.client.login(username='super', password='secret')
def test_model_admin_custom_action(self):
"Tests a custom action defined in a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'mail_admin',
'index': 0,
}
self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a ModelAdmin action')
def test_model_admin_default_delete_action(self):
"Tests the default delete action defined as a ModelAdmin method"
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'index': 0,
}
delete_confirmation_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': 'delete_selected',
'post': 'yes',
}
confirmation = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertIsInstance(confirmation, TemplateResponse)
self.assertContains(confirmation, "Are you sure you want to delete the selected subscribers?")
self.assertContains(confirmation, "<h2>Summary</h2>")
self.assertContains(confirmation, "<li>Subscribers: 3</li>")
self.assertContains(confirmation, "<li>External subscribers: 1</li>")
self.assertContains(confirmation, ACTION_CHECKBOX_NAME, count=2)
self.client.post(reverse('admin:admin_views_subscriber_changelist'), delete_confirmation_data)
self.assertEqual(Subscriber.objects.count(), 0)
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_non_localized_pk(self):
"""If USE_THOUSAND_SEPARATOR is set, make sure that the ids for
the objects selected for deletion are rendered without separators.
Refs #14895.
"""
subscriber = Subscriber.objects.get(id=1)
subscriber.id = 9999
subscriber.save()
action_data = {
ACTION_CHECKBOX_NAME: [9999, 2],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
self.assertTemplateUsed(response, 'admin/delete_selected_confirmation.html')
self.assertContains(response, 'value="9999"') # Instead of 9,999
self.assertContains(response, 'value="2"')
def test_model_admin_default_delete_action_protected(self):
"""
Tests the default delete action defined as a ModelAdmin method in the
case where some related objects are protected from deletion.
"""
q1 = Question.objects.create(question="Why?")
a1 = Answer.objects.create(question=q1, answer="Because.")
a2 = Answer.objects.create(question=q1, answer="Yes.")
q2 = Question.objects.create(question="Wherefore?")
action_data = {
ACTION_CHECKBOX_NAME: [q1.pk, q2.pk],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_question_changelist'), action_data)
self.assertContains(response, "would require deleting the following protected related objects")
self.assertContains(
response,
'<li>Answer: <a href="%s">Because.</a></li>' % reverse('admin:admin_views_answer_change', args=(a1.pk,)),
html=True
)
self.assertContains(
response,
'<li>Answer: <a href="%s">Yes.</a></li>' % reverse('admin:admin_views_answer_change', args=(a2.pk,)),
html=True
)
def test_model_admin_default_delete_action_no_change_url(self):
"""
Default delete action shouldn't break if a user's ModelAdmin removes the url for change_view.
Regression test for #20640
"""
obj = UnchangeableObject.objects.create()
action_data = {
ACTION_CHECKBOX_NAME: obj.pk,
"action": "delete_selected",
"index": "0",
}
response = self.client.post(reverse('admin:admin_views_unchangeableobject_changelist'), action_data)
# No 500 caused by NoReverseMatch
self.assertEqual(response.status_code, 200)
# The page shouldn't display a link to the nonexistent change page
self.assertContains(response, "<li>Unchangeable object: UnchangeableObject object</li>", 1, html=True)
def test_custom_function_mail_action(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_custom_function_action_with_redirect(self):
"Tests a custom action defined in a function"
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'redirect_to',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 302)
def test_default_redirect(self):
"""
Test that actions which don't return an HttpResponse are redirected to
the same page, retaining the querystring (which may contain changelist
information).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'external_mail',
'index': 0,
}
url = reverse('admin:admin_views_externalsubscriber_changelist') + '?o=1'
response = self.client.post(url, action_data)
self.assertRedirects(response, url)
def test_custom_function_action_streaming_response(self):
"""Tests a custom action that returns a StreamingHttpResponse."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'download',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
content = b''.join(response.streaming_content)
self.assertEqual(content, b'This is the content of the file')
self.assertEqual(response.status_code, 200)
def test_custom_function_action_no_perm_response(self):
"""Tests a custom action that returns an HttpResponse with 403 code."""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'no_perm',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
self.assertEqual(response.status_code, 403)
self.assertEqual(response.content, b'No permission to perform this action')
def test_actions_ordering(self):
"""
Ensure that actions are ordered as expected.
Refs #15964.
"""
response = self.client.get(reverse('admin:admin_views_externalsubscriber_changelist'))
self.assertContains(response, '''<label>Action: <select name="action">
<option value="" selected="selected">---------</option>
<option value="delete_selected">Delete selected external
subscribers</option>
<option value="redirect_to">Redirect to (Awesome action)</option>
<option value="external_mail">External mail (Another awesome
action)</option>
<option value="download">Download subscription</option>
<option value="no_perm">No permission to run</option>
</select>''', html=True)
def test_model_without_action(self):
"Tests a ModelAdmin without any action"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertNotContains(response, '<input type="checkbox" class="action-select"',
msg_prefix="Found an unexpected action toggle checkboxbox in response")
self.assertNotContains(response, '<input type="checkbox" class="action-select"')
def test_model_without_action_still_has_jquery(self):
"Tests that a ModelAdmin without any actions still gets jQuery included in page"
response = self.client.get(reverse('admin:admin_views_oldsubscriber_changelist'))
self.assertEqual(response.context["action_form"], None)
self.assertContains(response, 'jquery.min.js',
msg_prefix="jQuery missing from admin pages for model with no admin actions")
def test_action_column_class(self):
"Tests that the checkbox column class is present in the response"
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
self.assertContains(response, 'action-checkbox-column')
def test_multiple_actions_form(self):
"""
Test that actions come from the form whose submit button was pressed (#10618).
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
# Two different actions selected on the two forms...
'action': ['external_mail', 'delete_selected'],
# ...but we clicked "go" on the top form.
'index': 0
}
self.client.post(reverse('admin:admin_views_externalsubscriber_changelist'), action_data)
# Send mail, don't delete.
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].subject, 'Greetings from a function action')
def test_user_message_on_none_selected(self):
"""
User should see a warning when 'Go' is pressed and no items are selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """Items must be selected in order to perform actions on them. No items have been changed."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_user_message_on_no_action(self):
"""
User should see a warning when 'Go' is pressed and no action is selected.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1, 2],
'action': '',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_subscriber_changelist'), action_data)
msg = """No action selected."""
self.assertContains(response, msg)
self.assertEqual(Subscriber.objects.count(), 2)
def test_selection_counter(self):
"""
Check if the selection counter is there.
"""
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertContains(response, '0 of 2 selected')
def test_popup_actions(self):
""" Actions should not be shown in popups. """
response = self.client.get(reverse('admin:admin_views_subscriber_changelist'))
self.assertNotEqual(response.context["action_form"], None)
response = self.client.get(
reverse('admin:admin_views_subscriber_changelist') + '?%s' % IS_POPUP_VAR)
self.assertEqual(response.context["action_form"], None)
def test_popup_template_response(self):
"""
Success on popups shall be rendered from template in order to allow
easy customization.
"""
response = self.client.post(
reverse('admin:admin_views_actor_add') + '?%s=1' % IS_POPUP_VAR,
{'name': 'Troy McClure', 'age': '55', IS_POPUP_VAR: '1'})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name, 'admin/popup_response.html')
def test_popup_template_escaping(self):
context = {
'new_value': 'new_value\\',
'obj': 'obj\\',
'value': 'value\\',
}
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
'opener.dismissAddRelatedObjectPopup(window, "value\\u005C", "obj\\u005C");', output
)
context['action'] = 'change'
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
'opener.dismissChangeRelatedObjectPopup(window, '
'"value\\u005C", "obj\\u005C", "new_value\\u005C");', output
)
context['action'] = 'delete'
output = render_to_string('admin/popup_response.html', context)
self.assertIn(
'opener.dismissDeleteRelatedObjectPopup(window, "value\\u005C");', output
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestCustomChangeList(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_custom_changelist(self):
"""
Validate that a custom ChangeList class can be used (#9749)
"""
# Insert some data
post_data = {"name": "First Gadget"}
response = self.client.post(reverse('admin:admin_views_gadget_add'), post_data)
self.assertEqual(response.status_code, 302) # redirect somewhere
# Hit the page once to get messages out of the queue message list
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
# Ensure that data is still not visible on the page
response = self.client.get(reverse('admin:admin_views_gadget_changelist'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'First Gadget')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestInlineNotEditable(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
def test_GET_parent_add(self):
"""
InlineModelAdmin broken?
"""
response = self.client.get(reverse('admin:admin_views_parent_add'))
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminCustomQuerysetTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
self.pks = [EmptyModel.objects.create().id for i in range(3)]
self.super_login = {
REDIRECT_FIELD_NAME: reverse('admin:index'),
'username': 'super',
'password': 'secret',
}
def test_changelist_view(self):
response = self.client.get(reverse('admin:admin_views_emptymodel_changelist'))
for i in self.pks:
if i > 1:
self.assertContains(response, 'Primary key = %s' % i)
else:
self.assertNotContains(response, 'Primary key = %s' % i)
def test_changelist_view_count_queries(self):
# create 2 Person objects
Person.objects.create(name='person1', gender=1)
Person.objects.create(name='person2', gender=2)
changelist_url = reverse('admin:admin_views_person_changelist')
# 4 queries are expected: 1 for the session, 1 for the user,
# 1 for the count and 1 for the objects on the page
with self.assertNumQueries(4):
resp = self.client.get(changelist_url)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
# here one more count(*) query will run, because filters were applied
with self.assertNumQueries(5):
extra = {'q': 'not_in_name'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 0 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 0 selected')
with self.assertNumQueries(5):
extra = {'q': 'person'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 2 selected')
self.assertEqual(resp.context['selection_note_all'], 'All 2 selected')
with self.assertNumQueries(5):
extra = {'gender__exact': '1'}
resp = self.client.get(changelist_url, extra)
self.assertEqual(resp.context['selection_note'], '0 of 1 selected')
self.assertEqual(resp.context['selection_note_all'], '1 selected')
def test_change_view(self):
for i in self.pks:
response = self.client.get(reverse('admin:admin_views_emptymodel_change', args=(i,)))
if i > 1:
self.assertEqual(response.status_code, 200)
else:
self.assertEqual(response.status_code, 404)
def test_add_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(CoverLetter.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"author": "Candidate, Best",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The cover letter "Candidate, Best" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(ShortMessage.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"content": "What's this SMS thing?",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The short message "ShortMessage object" was added successfully.</li>',
html=True
)
def test_add_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
self.assertEqual(Telegram.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "Urgent telegram",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The telegram "Urgent telegram" was added successfully.</li>',
html=True
)
# model has no __unicode__ method
self.assertEqual(Paper.objects.count(), 0)
# Emulate model instance creation via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_add'),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name
self.assertContains(
response,
'<li class="success">The paper "Paper object" was added successfully.</li>',
html=True
)
def test_edit_model_modeladmin_defer_qs(self):
# Test for #14529. defer() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
cl = CoverLetter.objects.create(author="John Doe")
self.assertEqual(CoverLetter.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"author": "John Doe II",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_coverletter_change', args=(cl.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(CoverLetter.objects.count(), 1)
# Message should contain non-ugly model verbose name. Instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The cover letter "John Doe II" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
sm = ShortMessage.objects.create(content="This is expensive")
self.assertEqual(ShortMessage.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"content": "Too expensive",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_shortmessage_change', args=(sm.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(ShortMessage.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response, (
'<li class="success">The short message '
'"ShortMessage_Deferred_timestamp object" was '
'changed successfully.</li>'
), html=True
)
def test_edit_model_modeladmin_only_qs(self):
# Test for #14529. only() is used in ModelAdmin.get_queryset()
# model has __unicode__ method
t = Telegram.objects.create(title="Frist Telegram")
self.assertEqual(Telegram.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_telegram_change', args=(t.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "Telegram without typo",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_telegram_change', args=(t.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Telegram.objects.count(), 1)
# Message should contain non-ugly model verbose name. The instance
# representation is set by model's __unicode__()
self.assertContains(
response,
'<li class="success">The telegram "Telegram without typo" was changed successfully.</li>',
html=True
)
# model has no __unicode__ method
p = Paper.objects.create(title="My Paper Title")
self.assertEqual(Paper.objects.count(), 1)
response = self.client.get(reverse('admin:admin_views_paper_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
# Emulate model instance edit via the admin
post_data = {
"title": "My Modified Paper Title",
"_save": "Save",
}
response = self.client.post(reverse('admin:admin_views_paper_change', args=(p.pk,)),
post_data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertEqual(Paper.objects.count(), 1)
# Message should contain non-ugly model verbose name. The ugly(!)
# instance representation is set by six.text_type()
self.assertContains(
response,
'<li class="success">The paper "Paper_Deferred_author object" was changed successfully.</li>',
html=True
)
def test_history_view_custom_qs(self):
"""
Ensure that custom querysets are considered for the admin history view.
Refs #21013.
"""
self.client.post(reverse('admin:login'), self.super_login)
FilteredManager.objects.create(pk=1)
FilteredManager.objects.create(pk=2)
response = self.client.get(reverse('admin:admin_views_filteredmanager_changelist'))
self.assertContains(response, "PK=1")
self.assertContains(response, "PK=2")
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(1,))).status_code, 200
)
self.assertEqual(
self.client.get(reverse('admin:admin_views_filteredmanager_history', args=(2,))).status_code, 200
)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminInlineFileUploadTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
# Set up test Picture and Gallery.
# These must be set up here instead of in fixtures in order to allow Picture
# to use a NamedTemporaryFile.
file1 = tempfile.NamedTemporaryFile(suffix=".file1")
file1.write(b'a' * (2 ** 21))
filename = file1.name
file1.close()
self.gallery = Gallery(name="Test Gallery")
self.gallery.save()
self.picture = Picture(name="Test Picture", image=filename, gallery=self.gallery)
self.picture.save()
def test_inline_file_upload_edit_validation_error_post(self):
"""
Test that inline file uploads correctly display prior data (#10002).
"""
post_data = {
"name": "Test Gallery",
"pictures-TOTAL_FORMS": "2",
"pictures-INITIAL_FORMS": "1",
"pictures-MAX_NUM_FORMS": "0",
"pictures-0-id": six.text_type(self.picture.id),
"pictures-0-gallery": six.text_type(self.gallery.id),
"pictures-0-name": "Test Picture",
"pictures-0-image": "",
"pictures-1-id": "",
"pictures-1-gallery": str(self.gallery.id),
"pictures-1-name": "Test Picture 2",
"pictures-1-image": "",
}
response = self.client.post(
reverse('admin:admin_views_gallery_change', args=(self.gallery.id,)), post_data
)
self.assertContains(response, b"Currently")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminInlineTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.post_data = {
"name": "Test Name",
"widget_set-TOTAL_FORMS": "3",
"widget_set-INITIAL_FORMS": "0",
"widget_set-MAX_NUM_FORMS": "0",
"widget_set-0-id": "",
"widget_set-0-owner": "1",
"widget_set-0-name": "",
"widget_set-1-id": "",
"widget_set-1-owner": "1",
"widget_set-1-name": "",
"widget_set-2-id": "",
"widget_set-2-owner": "1",
"widget_set-2-name": "",
"doohickey_set-TOTAL_FORMS": "3",
"doohickey_set-INITIAL_FORMS": "0",
"doohickey_set-MAX_NUM_FORMS": "0",
"doohickey_set-0-owner": "1",
"doohickey_set-0-code": "",
"doohickey_set-0-name": "",
"doohickey_set-1-owner": "1",
"doohickey_set-1-code": "",
"doohickey_set-1-name": "",
"doohickey_set-2-owner": "1",
"doohickey_set-2-code": "",
"doohickey_set-2-name": "",
"grommet_set-TOTAL_FORMS": "3",
"grommet_set-INITIAL_FORMS": "0",
"grommet_set-MAX_NUM_FORMS": "0",
"grommet_set-0-code": "",
"grommet_set-0-owner": "1",
"grommet_set-0-name": "",
"grommet_set-1-code": "",
"grommet_set-1-owner": "1",
"grommet_set-1-name": "",
"grommet_set-2-code": "",
"grommet_set-2-owner": "1",
"grommet_set-2-name": "",
"whatsit_set-TOTAL_FORMS": "3",
"whatsit_set-INITIAL_FORMS": "0",
"whatsit_set-MAX_NUM_FORMS": "0",
"whatsit_set-0-owner": "1",
"whatsit_set-0-index": "",
"whatsit_set-0-name": "",
"whatsit_set-1-owner": "1",
"whatsit_set-1-index": "",
"whatsit_set-1-name": "",
"whatsit_set-2-owner": "1",
"whatsit_set-2-index": "",
"whatsit_set-2-name": "",
"fancydoodad_set-TOTAL_FORMS": "3",
"fancydoodad_set-INITIAL_FORMS": "0",
"fancydoodad_set-MAX_NUM_FORMS": "0",
"fancydoodad_set-0-doodad_ptr": "",
"fancydoodad_set-0-owner": "1",
"fancydoodad_set-0-name": "",
"fancydoodad_set-0-expensive": "on",
"fancydoodad_set-1-doodad_ptr": "",
"fancydoodad_set-1-owner": "1",
"fancydoodad_set-1-name": "",
"fancydoodad_set-1-expensive": "on",
"fancydoodad_set-2-doodad_ptr": "",
"fancydoodad_set-2-owner": "1",
"fancydoodad_set-2-name": "",
"fancydoodad_set-2-expensive": "on",
"category_set-TOTAL_FORMS": "3",
"category_set-INITIAL_FORMS": "0",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "",
"category_set-0-id": "",
"category_set-0-collector": "1",
"category_set-1-order": "",
"category_set-1-id": "",
"category_set-1-collector": "1",
"category_set-2-order": "",
"category_set-2-id": "",
"category_set-2-collector": "1",
}
result = self.client.login(username='super', password='secret')
self.assertEqual(result, True)
self.collector = Collector(pk=1, name='John Fowles')
self.collector.save()
def test_simple_inline(self):
"A simple model can be saved as inlines"
# First add a new inline
self.post_data['widget_set-0-name'] = "Widget 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
widget_id = Widget.objects.all()[0].id
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="widget_set-0-id"')
# Now resave that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1")
# Now modify that inline
self.post_data['widget_set-INITIAL_FORMS'] = "1"
self.post_data['widget_set-0-id'] = str(widget_id)
self.post_data['widget_set-0-name'] = "Widget 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Widget.objects.count(), 1)
self.assertEqual(Widget.objects.all()[0].name, "Widget 1 Updated")
def test_explicit_autofield_inline(self):
"A model with an explicit autofield primary key can be saved as inlines. Regression for #8093"
# First add a new inline
self.post_data['grommet_set-0-name'] = "Grommet 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="grommet_set-0-code"')
# Now resave that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1")
# Now modify that inline
self.post_data['grommet_set-INITIAL_FORMS'] = "1"
self.post_data['grommet_set-0-code'] = str(Grommet.objects.all()[0].code)
self.post_data['grommet_set-0-name'] = "Grommet 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Grommet.objects.count(), 1)
self.assertEqual(Grommet.objects.all()[0].name, "Grommet 1 Updated")
def test_char_pk_inline(self):
"A model with a character PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="doohickey_set-0-code"')
# Now resave that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1")
# Now modify that inline
self.post_data['doohickey_set-INITIAL_FORMS'] = "1"
self.post_data['doohickey_set-0-code'] = "DH1"
self.post_data['doohickey_set-0-name'] = "Doohickey 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(DooHickey.objects.count(), 1)
self.assertEqual(DooHickey.objects.all()[0].name, "Doohickey 1 Updated")
def test_integer_pk_inline(self):
"A model with an integer PK can be saved as inlines. Regression for #10992"
# First add a new inline
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="whatsit_set-0-index"')
# Now resave that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1")
# Now modify that inline
self.post_data['whatsit_set-INITIAL_FORMS'] = "1"
self.post_data['whatsit_set-0-index'] = "42"
self.post_data['whatsit_set-0-name'] = "Whatsit 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Whatsit.objects.count(), 1)
self.assertEqual(Whatsit.objects.all()[0].name, "Whatsit 1 Updated")
def test_inherited_inline(self):
"An inherited model can be saved as inlines. Regression for #11042"
# First add a new inline
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
doodad_pk = FancyDoodad.objects.all()[0].pk
# Check that the PK link exists on the rendered form
response = self.client.get(collector_url)
self.assertContains(response, 'name="fancydoodad_set-0-doodad_ptr"')
# Now resave that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1")
# Now modify that inline
self.post_data['fancydoodad_set-INITIAL_FORMS'] = "1"
self.post_data['fancydoodad_set-0-doodad_ptr'] = str(doodad_pk)
self.post_data['fancydoodad_set-0-name'] = "Fancy Doodad 1 Updated"
response = self.client.post(collector_url, self.post_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(FancyDoodad.objects.count(), 1)
self.assertEqual(FancyDoodad.objects.all()[0].name, "Fancy Doodad 1 Updated")
def test_ordered_inline(self):
"""Check that an inline with an editable ordering fields is
updated correctly. Regression for #10922"""
# Create some objects with an initial ordering
Category.objects.create(id=1, order=1, collector=self.collector)
Category.objects.create(id=2, order=2, collector=self.collector)
Category.objects.create(id=3, order=0, collector=self.collector)
Category.objects.create(id=4, order=0, collector=self.collector)
# NB: The order values must be changed so that the items are reordered.
self.post_data.update({
"name": "Frederick Clegg",
"category_set-TOTAL_FORMS": "7",
"category_set-INITIAL_FORMS": "4",
"category_set-MAX_NUM_FORMS": "0",
"category_set-0-order": "14",
"category_set-0-id": "1",
"category_set-0-collector": "1",
"category_set-1-order": "13",
"category_set-1-id": "2",
"category_set-1-collector": "1",
"category_set-2-order": "1",
"category_set-2-id": "3",
"category_set-2-collector": "1",
"category_set-3-order": "0",
"category_set-3-id": "4",
"category_set-3-collector": "1",
"category_set-4-order": "",
"category_set-4-id": "",
"category_set-4-collector": "1",
"category_set-5-order": "",
"category_set-5-id": "",
"category_set-5-collector": "1",
"category_set-6-order": "",
"category_set-6-id": "",
"category_set-6-collector": "1",
})
collector_url = reverse('admin:admin_views_collector_change', args=(self.collector.pk,))
response = self.client.post(collector_url, self.post_data)
# Successful post will redirect
self.assertEqual(response.status_code, 302)
# Check that the order values have been applied to the right objects
self.assertEqual(self.collector.category_set.count(), 4)
self.assertEqual(Category.objects.get(id=1).order, 14)
self.assertEqual(Category.objects.get(id=2).order, 13)
self.assertEqual(Category.objects.get(id=3).order, 1)
self.assertEqual(Category.objects.get(id=4).order, 0)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class NeverCacheTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
def setUp(self):
self.client.login(username='super', password='secret')
def test_admin_index(self):
"Check the never-cache status of the main index"
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_app_index(self):
"Check the never-cache status of an application index"
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(get_max_age(response), 0)
def test_model_index(self):
"Check the never-cache status of a model index"
response = self.client.get(reverse('admin:admin_views_fabric_changelist'))
self.assertEqual(get_max_age(response), 0)
def test_model_add(self):
"Check the never-cache status of a model add page"
response = self.client.get(reverse('admin:admin_views_fabric_add'))
self.assertEqual(get_max_age(response), 0)
def test_model_view(self):
"Check the never-cache status of a model edit page"
response = self.client.get(reverse('admin:admin_views_section_change', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_history(self):
"Check the never-cache status of a model history page"
response = self.client.get(reverse('admin:admin_views_section_history', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_model_delete(self):
"Check the never-cache status of a model delete page"
response = self.client.get(reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(get_max_age(response), 0)
def test_login(self):
"Check the never-cache status of login views"
self.client.logout()
response = self.client.get(reverse('admin:index'))
self.assertEqual(get_max_age(response), 0)
def test_logout(self):
"Check the never-cache status of logout view"
response = self.client.get(reverse('admin:logout'))
self.assertEqual(get_max_age(response), 0)
def test_password_change(self):
"Check the never-cache status of the password change view"
self.client.logout()
response = self.client.get(reverse('admin:password_change'))
self.assertEqual(get_max_age(response), None)
def test_password_change_done(self):
"Check the never-cache status of the password change done view"
response = self.client.get(reverse('admin:password_change_done'))
self.assertEqual(get_max_age(response), None)
def test_JS_i18n(self):
"Check the never-cache status of the JavaScript i18n view"
response = self.client.get(reverse('admin:jsi18n'))
self.assertEqual(get_max_age(response), None)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class PrePopulatedTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.login(username='super', password='secret')
def test_prepopulated_on(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "id: '#id_slug',")
self.assertContains(response, "field['dependency_ids'].push('#id_title');")
self.assertContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
def test_prepopulated_off(self):
response = self.client.get(reverse('admin:admin_views_prepopulatedpost_change', args=(self.p1.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, "A Long Title")
self.assertNotContains(response, "id: '#id_slug'")
self.assertNotContains(response, "field['dependency_ids'].push('#id_title');")
self.assertNotContains(response, "id: '#id_prepopulatedsubpost_set-0-subslug',")
@override_settings(USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
def test_prepopulated_maxlength_localized(self):
"""
Regression test for #15938: if USE_THOUSAND_SEPARATOR is set, make sure
that maxLength (in the JavaScript) is rendered without separators.
"""
response = self.client.get(reverse('admin:admin_views_prepopulatedpostlargeslug_add'))
self.assertContains(response, "maxLength: 1000") # instead of 1,000
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class SeleniumAdminViewsFirefoxTests(AdminSeleniumWebDriverTestCase):
available_apps = ['admin_views'] + AdminSeleniumWebDriverTestCase.available_apps
webdriver_class = 'selenium.webdriver.firefox.webdriver.WebDriver'
def setUp(self):
self.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
self.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def test_prepopulated_fields(self):
"""
Ensure that the JavaScript-automated prepopulated fields work with the
main form and with stacked and tabular inlines.
Refs #13068, #9264, #9983, #9784.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_mainprepopulated_add')))
# Main form ----------------------------------------------------------
self.selenium.find_element_by_css_selector('#id_pubdate').send_keys('2012-02-18')
self.get_select_option('#id_status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_name').send_keys(' this is the mAin nÀMë and it\'s awεšomeııı')
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
slug3 = self.selenium.find_element_by_css_selector('#id_slug3').get_attribute('value')
self.assertEqual(slug1, 'main-name-and-its-awesomeiii-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-and-its-awesomeiii')
self.assertEqual(slug3, 'main-n\xe0m\xeb-and-its-aw\u03b5\u0161ome\u0131\u0131\u0131')
# Stacked inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-0-pubdate').send_keys('2011-12-17')
self.get_select_option('#id_relatedprepopulated_set-0-status', 'option one').click()
self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-0-name'
).send_keys(' here is a sŤāÇkeð inline ! ')
slug1 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-0-slug1'
).get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-0-slug2'
).get_attribute('value')
self.assertEqual(slug1, 'here-stacked-inline-2011-12-17')
self.assertEqual(slug2, 'option-one-here-stacked-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[0].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-pubdate').send_keys('1999-01-25')
self.get_select_option('#id_relatedprepopulated_set-1-status', 'option two').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-1-name').send_keys(
' now you haVe anöther sŤāÇkeð inline with a very ... '
'loooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooog text... '
)
slug1 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-1-slug1'
).get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-1-slug2'
).get_attribute('value')
# 50 characters maximum for slug1 field
self.assertEqual(slug1, 'now-you-have-another-stacked-inline-very-loooooooo')
# 60 characters maximum for slug2 field
self.assertEqual(slug2, 'option-two-now-you-have-another-stacked-inline-very-looooooo')
# Tabular inlines ----------------------------------------------------
# Initial inline
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-0-pubdate').send_keys('1234-12-07')
self.get_select_option('#id_relatedprepopulated_set-2-0-status', 'option two').click()
self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-2-0-name'
).send_keys('And now, with a tÃbűlaŘ inline !!!')
slug1 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-2-0-slug1'
).get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-2-0-slug2'
).get_attribute('value')
self.assertEqual(slug1, 'and-now-tabular-inline-1234-12-07')
self.assertEqual(slug2, 'option-two-and-now-tabular-inline')
# Add an inline
self.selenium.find_elements_by_link_text('Add another Related prepopulated')[1].click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-pubdate').send_keys('1981-08-22')
self.get_select_option('#id_relatedprepopulated_set-2-1-status', 'option one').click()
self.selenium.find_element_by_css_selector('#id_relatedprepopulated_set-2-1-name').send_keys(
'a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters'
)
slug1 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-2-1-slug1'
).get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector(
'#id_relatedprepopulated_set-2-1-slug2'
).get_attribute('value')
self.assertEqual(slug1, 'tabular-inline-ignored-characters-1981-08-22')
self.assertEqual(slug2, 'option-one-tabular-inline-ignored-characters')
# Save and check that everything is properly stored in the database
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.assertEqual(MainPrepopulated.objects.all().count(), 1)
MainPrepopulated.objects.get(
name=' this is the mAin nÀMë and it\'s awεšomeııı',
pubdate='2012-02-18',
status='option two',
slug1='main-name-and-its-awesomeiii-2012-02-18',
slug2='option-two-main-name-and-its-awesomeiii',
)
self.assertEqual(RelatedPrepopulated.objects.all().count(), 4)
RelatedPrepopulated.objects.get(
name=' here is a sŤāÇkeð inline ! ',
pubdate='2011-12-17',
status='option one',
slug1='here-stacked-inline-2011-12-17',
slug2='option-one-here-stacked-inline',
)
RelatedPrepopulated.objects.get(
# 75 characters in name field
name=' now you haVe anöther sŤāÇkeð inline with a very ... loooooooooooooooooo',
pubdate='1999-01-25',
status='option two',
slug1='now-you-have-another-stacked-inline-very-loooooooo',
slug2='option-two-now-you-have-another-stacked-inline-very-looooooo',
)
RelatedPrepopulated.objects.get(
name='And now, with a tÃbűlaŘ inline !!!',
pubdate='1234-12-07',
status='option two',
slug1='and-now-tabular-inline-1234-12-07',
slug2='option-two-and-now-tabular-inline',
)
RelatedPrepopulated.objects.get(
name='a tÃbűlaŘ inline with ignored ;"&*^\%$#@-/`~ characters',
pubdate='1981-08-22',
status='option one',
slug1='tabular-inline-ignored-characters-1981-08-22',
slug2='option-one-tabular-inline-ignored-characters',
)
def test_populate_existing_object(self):
"""
Ensure that the prepopulation works for existing objects too, as long
as the original field is empty.
Refs #19082.
"""
# Slugs are empty to start with.
item = MainPrepopulated.objects.create(
name=' this is the mAin nÀMë',
pubdate='2012-02-18',
status='option two',
slug1='',
slug2='',
)
self.admin_login(username='super',
password='secret',
login_url=reverse('admin:index'))
object_url = '%s%s' % (
self.live_server_url,
reverse('admin:admin_views_mainprepopulated_change', args=(item.id,)))
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' the best')
# The slugs got prepopulated since they were originally empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
# Save the object
self.selenium.find_element_by_xpath('//input[@value="Save"]').click()
self.wait_page_loaded()
self.selenium.get(object_url)
self.selenium.find_element_by_css_selector('#id_name').send_keys(' hello')
# The slugs got prepopulated didn't change since they were originally not empty
slug1 = self.selenium.find_element_by_css_selector('#id_slug1').get_attribute('value')
slug2 = self.selenium.find_element_by_css_selector('#id_slug2').get_attribute('value')
self.assertEqual(slug1, 'main-name-best-2012-02-18')
self.assertEqual(slug2, 'option-two-main-name-best')
def test_collapsible_fieldset(self):
"""
Test that the 'collapse' class in fieldsets definition allows to
show/hide the appropriate field section.
"""
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_article_add')))
self.assertFalse(self.selenium.find_element_by_id('id_title').is_displayed())
self.selenium.find_elements_by_link_text('Show')[0].click()
self.assertTrue(self.selenium.find_element_by_id('id_title').is_displayed())
self.assertEqual(
self.selenium.find_element_by_id('fieldsetcollapser0').text,
"Hide"
)
def test_first_field_focus(self):
"""JavaScript-assisted auto-focus on first usable form field."""
# First form field has a single widget
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_picture_add')))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_name')
)
# First form field has a MultiWidget
self.selenium.get('%s%s' % (self.live_server_url,
reverse('admin:admin_views_reservation_add')))
self.assertEqual(
self.selenium.switch_to.active_element,
self.selenium.find_element_by_id('id_start_date_0')
)
def test_cancel_delete_confirmation(self):
"Cancelling the deletion of an object takes the user back one page."
pizza = Pizza.objects.create(name="Double Cheese")
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Wait until we're on the delete page.
self.wait_for('.cancel-link')
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
def test_cancel_delete_related_confirmation(self):
"""
Cancelling the deletion of an object with relations takes the user back
one page.
"""
pizza = Pizza.objects.create(name="Double Cheese")
topping1 = Topping.objects.create(name="Cheddar")
topping2 = Topping.objects.create(name="Mozzarella")
pizza.toppings.add(topping1, topping2)
url = reverse('admin:admin_views_pizza_change', args=(pizza.id,))
full_url = '%s%s' % (self.live_server_url, url)
self.admin_login(username='super', password='secret', login_url=reverse('admin:index'))
self.selenium.get(full_url)
self.selenium.find_element_by_class_name('deletelink').click()
# Wait until we're on the delete page.
self.wait_for('.cancel-link')
self.selenium.find_element_by_class_name('cancel-link').click()
# Wait until we're back on the change page.
self.wait_for_text('#content h1', 'Change pizza')
self.assertEqual(self.selenium.current_url, full_url)
self.assertEqual(Pizza.objects.count(), 1)
self.assertEqual(Topping.objects.count(), 2)
class SeleniumAdminViewsChromeTests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.chrome.webdriver.WebDriver'
class SeleniumAdminViewsIETests(SeleniumAdminViewsFirefoxTests):
webdriver_class = 'selenium.webdriver.ie.webdriver.WebDriver'
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class ReadonlyTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_get(self):
response = self.client.get(reverse('admin:admin_views_post_add'))
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="posted"')
# 3 fields + 2 submit buttons + 5 inline management form fields, + 2
# hidden fields for inlines + 1 field for the inline + 2 empty form
self.assertContains(response, "<input", count=15)
self.assertContains(response, formats.localize(datetime.date.today()))
self.assertContains(response,
"<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags
self.assertContains(response, "Multiline<br />test<br />string")
self.assertContains(response, "<p>Multiline<br />html<br />content</p>", html=True)
self.assertContains(response, "InlineMultiline<br />test<br />string")
# Remove only this last line when the deprecation completes.
self.assertContains(response, "<p>Multiline<br />html<br />content<br />with allow tags</p>", html=True)
self.assertContains(response,
formats.localize(datetime.date.today() - datetime.timedelta(days=7)))
self.assertContains(response, '<div class="form-row field-coolness">')
self.assertContains(response, '<div class="form-row field-awesomeness_level">')
self.assertContains(response, '<div class="form-row field-posted">')
self.assertContains(response, '<div class="form-row field-value">')
self.assertContains(response, '<div class="form-row">')
self.assertContains(response, '<p class="help">', 3)
self.assertContains(
response,
'<p class="help">Some help text for the title (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
self.assertContains(
response,
'<p class="help">Some help text for the content (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
self.assertContains(
response,
'<p class="help">Some help text for the date (with unicode ŠĐĆŽćžšđ)</p>',
html=True
)
p = Post.objects.create(title="I worked on readonly_fields", content="Its good stuff")
response = self.client.get(reverse('admin:admin_views_post_change', args=(p.pk,)))
self.assertContains(response, "%d amount of cool" % p.pk)
def test_readonly_post(self):
data = {
"title": "Django Got Readonly Fields",
"content": "This is an incredible development.",
"link_set-TOTAL_FORMS": "1",
"link_set-INITIAL_FORMS": "0",
"link_set-MAX_NUM_FORMS": "0",
}
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 1)
p = Post.objects.get()
self.assertEqual(p.posted, datetime.date.today())
data["posted"] = "10-8-1990" # some date that's not today
response = self.client.post(reverse('admin:admin_views_post_add'), data)
self.assertEqual(response.status_code, 302)
self.assertEqual(Post.objects.count(), 2)
p = Post.objects.order_by('-id')[0]
self.assertEqual(p.posted, datetime.date.today())
def test_readonly_manytomany(self):
"Regression test for #13004"
response = self.client.get(reverse('admin:admin_views_pizza_add'))
self.assertEqual(response.status_code, 200)
def test_user_password_change_limited_queryset(self):
su = User.objects.filter(is_superuser=True)[0]
response = self.client.get(reverse('admin2:auth_user_password_change', args=(su.pk,)))
self.assertEqual(response.status_code, 404)
def test_change_form_renders_correct_null_choice_value(self):
"""
Regression test for #17911.
"""
choice = Choice.objects.create(choice=None)
response = self.client.get(reverse('admin:admin_views_choice_change', args=(choice.pk,)))
self.assertContains(response, '<p>No opinion</p>', html=True)
self.assertNotContains(response, '<p>(None)</p>')
def test_readonly_backwards_ref(self):
"""
Regression test for #16433 - backwards references for related objects
broke if the related field is read-only due to the help_text attribute
"""
topping = Topping.objects.create(name='Salami')
pizza = Pizza.objects.create(name='Americano')
pizza.toppings.add(topping)
response = self.client.get(reverse('admin:admin_views_topping_add'))
self.assertEqual(response.status_code, 200)
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_readonly_field_overrides(self):
"""
Regression test for #22087 - ModelForm Meta overrides are ignored by
AdminReadonlyField
"""
p = FieldOverridePost.objects.create(title="Test Post", content="Test Content")
response = self.client.get(reverse('admin:admin_views_fieldoverridepost_change', args=(p.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<p class="help">Overridden help text for the date</p>')
self.assertContains(response, '<label for="id_public">Overridden public label:</label>', html=True)
self.assertNotContains(response, "Some help text for the date (with unicode ŠĐĆŽćžšđ)")
def test_correct_autoescaping(self):
"""
Make sure that non-field readonly elements are properly autoescaped (#24461)
"""
section = Section.objects.create(name='<a>evil</a>')
response = self.client.get(reverse('admin:admin_views_section_change', args=(section.pk,)))
self.assertNotContains(response, "<a>evil</a>", status_code=200)
self.assertContains(response, "<a>evil</a>", status_code=200)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class LimitChoicesToInAdminTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_limit_choices_to_as_callable(self):
"""Test for ticket 2445 changes to admin."""
threepwood = Character.objects.create(
username='threepwood',
last_action=datetime.datetime.today() + datetime.timedelta(days=1),
)
marley = Character.objects.create(
username='marley',
last_action=datetime.datetime.today() - datetime.timedelta(days=1),
)
response = self.client.get(reverse('admin:admin_views_stumpjoke_add'))
# The allowed option should appear twice; the limited option should not appear.
self.assertContains(response, threepwood.username, count=2)
self.assertNotContains(response, marley.username)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class RawIdFieldsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_limit_choices_to(self):
"""Regression test for 14880"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True,
leader=actor,
country="England")
Inquisition.objects.create(expected=False,
leader=actor,
country="Spain")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_inquisition"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step also tests integers, strings and booleans in the
# lookup query string; in model we define inquisition field to have a
# limit_choices_to option that includes a filter on a string field
# (inquisition__actor__name), a filter on an integer field
# (inquisition__actor__age), and a filter on a boolean field
# (inquisition__expected).
response2 = self.client.get(popup_url)
self.assertContains(response2, "Spain")
self.assertNotContains(response2, "England")
def test_limit_choices_to_isnull_false(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant0"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=0 gets parsed correctly from the
# lookup query string; in model we define defendant0 field to have a
# limit_choices_to option that includes "actor__title__isnull=False".
response2 = self.client.get(popup_url)
self.assertContains(response2, "Kilbraken")
self.assertNotContains(response2, "Palin")
def test_limit_choices_to_isnull_true(self):
"""Regression test for 20182"""
Actor.objects.create(name="Palin", age=27)
Actor.objects.create(name="Kilbraken", age=50, title="Judge")
response = self.client.get(reverse('admin:admin_views_sketch_add'))
# Find the link
m = re.search(br'<a href="([^"]*)"[^>]* id="lookup_id_defendant1"', response.content)
self.assertTrue(m) # Got a match
popup_url = m.groups()[0].decode().replace("&", "&")
# Handle relative links
popup_url = urljoin(response.request['PATH_INFO'], popup_url)
# Get the popup and verify the correct objects show up in the resulting
# page. This step tests field__isnull=1 gets parsed correctly from the
# lookup query string; in model we define defendant1 field to have a
# limit_choices_to option that includes "actor__title__isnull=True".
response2 = self.client.get(popup_url)
self.assertNotContains(response2, "Kilbraken")
self.assertContains(response2, "Palin")
def test_list_display_method_same_name_as_reverse_accessor(self):
"""
Should be able to use a ModelAdmin method in list_display that has the
same name as a reverse model field ("sketch" in this case).
"""
actor = Actor.objects.create(name="Palin", age=27)
Inquisition.objects.create(expected=True, leader=actor, country="England")
response = self.client.get(reverse('admin:admin_views_inquisition_changelist'))
self.assertContains(response, 'list-display-sketch')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class UserAdminTest(TestCase):
"""
Tests user CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u2 = User.objects.create(
id=101, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='adduser',
first_name='Add', last_name='User', email='auser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u3 = User.objects.create(
id=102, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='changeuser',
first_name='Change', last_name='User', email='cuser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u4 = User.objects.create(
id=103, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='deleteuser',
first_name='Delete', last_name='User', email='duser@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u6 = User.objects.create(
id=106, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='nostaff',
first_name='No', last_name='Staff', email='nostaff@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
cls.per1 = Person.objects.create(name='John Mauchly', gender=1, alive=True)
cls.per2 = Person.objects.create(name='Grace Hopper', gender=1, alive=False)
cls.per3 = Person.objects.create(name='Guido van Rossum', gender=1, alive=True)
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_save_continue_editing_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_continue': '1',
})
new_user = User.objects.get(username='newuser')
self.assertRedirects(response, reverse('admin:auth_user_change', args=(new_user.pk,)))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_password_mismatch(self):
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'mismatch',
})
self.assertEqual(response.status_code, 200)
adminform = response.context['adminform']
self.assertNotIn('password', adminform.form.errors)
self.assertEqual(adminform.form.errors['password2'],
["The two password fields didn't match."])
def test_user_fk_add_popup(self):
"""User addition through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_add'))
self.assertContains(response, 'class="related-widget-wrapper-link add-related" id="add_id_owner"')
response = self.client.get(reverse('admin:auth_user_add') + '?_popup=1')
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_popup': '1',
'_save': '1',
}
response = self.client.post(reverse('admin:auth_user_add') + '?_popup=1', data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissAddRelatedObjectPopup')
def test_user_fk_change_popup(self):
"""User change through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_change', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_change', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, 'name="_continue"')
self.assertNotContains(response, 'name="_addanother"')
data = {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
'_popup': '1',
'_save': '1',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissChangeRelatedObjectPopup')
def test_user_fk_delete_popup(self):
"""User deletion through a FK popup should return the appropriate JavaScript response."""
response = self.client.get(reverse('admin:admin_views_album_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response, reverse('admin:auth_user_delete', args=('__fk__',)))
self.assertContains(response, 'class="related-widget-wrapper-link change-related" id="change_id_owner"')
user = User.objects.get(username='changeuser')
url = reverse('admin:auth_user_delete', args=(user.pk,)) + '?_popup=1'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'post': 'yes',
'_popup': '1',
}
response = self.client.post(url, data, follow=True)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'dismissDeleteRelatedObjectPopup')
def test_save_add_another_button(self):
user_count = User.objects.count()
response = self.client.post(reverse('admin:auth_user_add'), {
'username': 'newuser',
'password1': 'newpassword',
'password2': 'newpassword',
'_addanother': '1',
})
new_user = User.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_user_add'))
self.assertEqual(User.objects.count(), user_count + 1)
self.assertTrue(new_user.has_usable_password())
def test_user_permission_performance(self):
u = User.objects.all()[0]
# Don't depend on a warm cache, see #17377.
ContentType.objects.clear_cache()
with self.assertNumQueries(10):
response = self.client.get(reverse('admin:auth_user_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
def test_form_url_present_in_context(self):
u = User.objects.all()[0]
response = self.client.get(reverse('admin3:auth_user_password_change', args=(u.pk,)))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['form_url'], 'pony')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class GroupAdminTest(TestCase):
"""
Tests group CRUD functionality.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_save_button(self):
group_count = Group.objects.count()
response = self.client.post(reverse('admin:auth_group_add'), {
'name': 'newgroup',
})
Group.objects.order_by('-id')[0]
self.assertRedirects(response, reverse('admin:auth_group_changelist'))
self.assertEqual(Group.objects.count(), group_count + 1)
def test_group_permission_performance(self):
g = Group.objects.create(name="test_group")
# Ensure no queries are skipped due to cached content type for Group.
ContentType.objects.clear_cache()
with self.assertNumQueries(8):
response = self.client.get(reverse('admin:auth_group_change', args=(g.pk,)))
self.assertEqual(response.status_code, 200)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class CSSTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = Section.objects.create(name='Test section')
cls.a1 = Article.objects.create(
content='<p>Middle content</p>', date=datetime.datetime(2008, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a2 = Article.objects.create(
content='<p>Oldest content</p>', date=datetime.datetime(2000, 3, 18, 11, 54, 58), section=cls.s1
)
cls.a3 = Article.objects.create(
content='<p>Newest content</p>', date=datetime.datetime(2009, 3, 18, 11, 54, 58), section=cls.s1
)
cls.p1 = PrePopulatedPost.objects.create(title='A Long Title', published=True, slug='a-long-title')
def setUp(self):
self.client.login(username='super', password='secret')
@ignore_warnings(category=RemovedInDjango20Warning) # for allow_tags deprecation
def test_field_prefix_css_classes(self):
"""
Ensure that fields have a CSS class name with a 'field-' prefix.
Refs #16371.
"""
response = self.client.get(reverse('admin:admin_views_post_add'))
# The main form
self.assertContains(response, 'class="form-row field-title"')
self.assertContains(response, 'class="form-row field-content"')
self.assertContains(response, 'class="form-row field-public"')
self.assertContains(response, 'class="form-row field-awesomeness_level"')
self.assertContains(response, 'class="form-row field-coolness"')
self.assertContains(response, 'class="form-row field-value"')
self.assertContains(response, 'class="form-row"') # The lambda function
# The tabular inline
self.assertContains(response, '<td class="field-url">')
self.assertContains(response, '<td class="field-posted">')
def test_index_css_classes(self):
"""
Ensure that CSS class names are used for each app and model on the
admin index pages.
Refs #17050.
"""
# General index page
response = self.client.get(reverse('admin:index'))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
# App index page
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertContains(response, '<div class="app-admin_views module">')
self.assertContains(response, '<tr class="model-actor">')
self.assertContains(response, '<tr class="model-album">')
def test_app_model_in_form_body_class(self):
"""
Ensure app and model tag are correctly read by change_form template
"""
response = self.client.get(reverse('admin:admin_views_section_add'))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_list_body_class(self):
"""
Ensure app and model tag are correctly read by change_list template
"""
response = self.client.get(reverse('admin:admin_views_section_changelist'))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_delete_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by delete_confirmation
template
"""
response = self.client.get(
reverse('admin:admin_views_section_delete', args=(self.s1.pk,)))
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_app_model_in_app_index_body_class(self):
"""
Ensure app and model tag are correctly read by app_index template
"""
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertEqual(response.status_code, 200)
self.assertContains(response, '<body class=" dashboard app-admin_views')
def test_app_model_in_delete_selected_confirmation_body_class(self):
"""
Ensure app and model tag are correctly read by
delete_selected_confirmation template
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'delete_selected',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_section_changelist'),
action_data)
self.assertEqual(response.status_code, 200)
self.assertContains(response,
'<body class=" app-admin_views model-section ')
def test_changelist_field_classes(self):
"""
Cells of the change list table should contain the field name in their class attribute
Refs #11195.
"""
Podcast.objects.create(name="Django Dose",
release_date=datetime.date.today())
response = self.client.get(reverse('admin:admin_views_podcast_changelist'))
self.assertContains(
response, '<th class="field-name">')
self.assertContains(
response, '<td class="field-release_date nowrap">')
self.assertContains(
response, '<td class="action-checkbox">')
try:
import docutils
except ImportError:
docutils = None
@unittest.skipUnless(docutils, "no docutils installed.")
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
@modify_settings(INSTALLED_APPS={'append': ['django.contrib.admindocs', 'django.contrib.flatpages']})
class AdminDocsTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_tags(self):
response = self.client.get(reverse('django-admindocs-tags'))
# The builtin tag group exists
self.assertContains(response, "<h2>Built-in tags</h2>", count=2, html=True)
# A builtin tag exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-autoescape">autoescape</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-autoescape">autoescape</a></li>', html=True)
# An app tag exists in both the index and detail
self.assertContains(response, '<h3 id="flatpages-get_flatpages">get_flatpages</h3>', html=True)
self.assertContains(response, '<li><a href="#flatpages-get_flatpages">get_flatpages</a></li>', html=True)
# The admin list tag group exists
self.assertContains(response, "<h2>admin_list</h2>", count=2, html=True)
# An admin list tag exists in both the index and detail
self.assertContains(response, '<h3 id="admin_list-admin_actions">admin_actions</h3>', html=True)
self.assertContains(response, '<li><a href="#admin_list-admin_actions">admin_actions</a></li>', html=True)
def test_filters(self):
response = self.client.get(reverse('django-admindocs-filters'))
# The builtin filter group exists
self.assertContains(response, "<h2>Built-in filters</h2>", count=2, html=True)
# A builtin filter exists in both the index and detail
self.assertContains(response, '<h3 id="built_in-add">add</h3>', html=True)
self.assertContains(response, '<li><a href="#built_in-add">add</a></li>', html=True)
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
}],
USE_I18N=False,
)
class ValidXHTMLTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_lang_name_present(self):
response = self.client.get(reverse('admin:app_list', args=('admin_views',)))
self.assertNotContains(response, ' lang=""')
self.assertNotContains(response, ' xml:lang=""')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
USE_THOUSAND_SEPARATOR=True, USE_L10N=True)
class DateHierarchyTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def tearDown(self):
formats.reset_format_cache()
def assert_non_localized_year(self, response, year):
"""Ensure that the year is not localized with
USE_THOUSAND_SEPARATOR. Refs #15234.
"""
self.assertNotContains(response, formats.number_format(year))
def assert_contains_year_link(self, response, date):
self.assertContains(response, '?release_date__year=%d"' % (date.year,))
def assert_contains_month_link(self, response, date):
self.assertContains(
response, '?release_date__month=%d&release_date__year=%d"' % (
date.month, date.year))
def assert_contains_day_link(self, response, date):
self.assertContains(
response, '?release_date__day=%d&'
'release_date__month=%d&release_date__year=%d"' % (
date.day, date.month, date.year))
def test_empty(self):
"""
Ensure that no date hierarchy links display with empty changelist.
"""
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
self.assertNotContains(response, 'release_date__year=')
self.assertNotContains(response, 'release_date__month=')
self.assertNotContains(response, 'release_date__day=')
def test_single(self):
"""
Ensure that single day-level date hierarchy appears for single object.
"""
DATE = datetime.date(2000, 6, 30)
Podcast.objects.create(release_date=DATE)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
self.assert_contains_day_link(response, DATE)
self.assert_non_localized_year(response, 2000)
def test_within_month(self):
"""
Ensure that day-level links appear for changelist within single month.
"""
DATES = (datetime.date(2000, 6, 30),
datetime.date(2000, 6, 15),
datetime.date(2000, 6, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
for date in DATES:
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_within_year(self):
"""
Ensure that month-level links appear for changelist within single year.
"""
DATES = (datetime.date(2000, 1, 30),
datetime.date(2000, 3, 15),
datetime.date(2000, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
url = reverse('admin:admin_views_podcast_changelist')
response = self.client.get(url)
# no day-level links
self.assertNotContains(response, 'release_date__day=')
for date in DATES:
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
def test_multiple_years(self):
"""
Ensure that year-level links appear for year-spanning changelist.
"""
DATES = (datetime.date(2001, 1, 30),
datetime.date(2003, 3, 15),
datetime.date(2005, 5, 3))
for date in DATES:
Podcast.objects.create(release_date=date)
response = self.client.get(
reverse('admin:admin_views_podcast_changelist'))
# no day/month-level links
self.assertNotContains(response, 'release_date__day=')
self.assertNotContains(response, 'release_date__month=')
for date in DATES:
self.assert_contains_year_link(response, date)
# and make sure GET parameters still behave correctly
for date in DATES:
url = '%s?release_date__year=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year)
response = self.client.get(url)
self.assert_contains_month_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
url = '%s?release_date__year=%d&release_date__month=%d' % (
reverse('admin:admin_views_podcast_changelist'),
date.year, date.month)
response = self.client.get(url)
self.assert_contains_day_link(response, date)
self.assert_non_localized_year(response, 2000)
self.assert_non_localized_year(response, 2003)
self.assert_non_localized_year(response, 2005)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminCustomSaveRelatedTests(TestCase):
"""
Ensure that one can easily customize the way related objects are saved.
Refs #16115.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_should_be_able_to_edit_related_objects_on_add_view(self):
post = {
'child_set-TOTAL_FORMS': '3',
'child_set-INITIAL_FORMS': '0',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-1-name': 'Catherine',
}
self.client.post(reverse('admin:admin_views_parent_add'), post)
self.assertEqual(1, Parent.objects.count())
self.assertEqual(2, Child.objects.count())
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_change_view(self):
parent = Parent.objects.create(name='Josh Stone')
paul = Child.objects.create(parent=parent, name='Paul')
catherine = Child.objects.create(parent=parent, name='Catherine')
post = {
'child_set-TOTAL_FORMS': '5',
'child_set-INITIAL_FORMS': '2',
'name': 'Josh Stone',
'child_set-0-name': 'Paul',
'child_set-0-id': paul.id,
'child_set-1-name': 'Catherine',
'child_set-1-id': catherine.id,
}
self.client.post(reverse('admin:admin_views_parent_change', args=(parent.id,)), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
def test_should_be_able_to_edit_related_objects_on_changelist_view(self):
parent = Parent.objects.create(name='Josh Rock')
Child.objects.create(parent=parent, name='Paul')
Child.objects.create(parent=parent, name='Catherine')
post = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': parent.id,
'form-0-name': 'Josh Stone',
'_save': 'Save'
}
self.client.post(reverse('admin:admin_views_parent_changelist'), post)
children_names = list(Child.objects.order_by('name').values_list('name', flat=True))
self.assertEqual('Josh Stone', Parent.objects.latest('id').name)
self.assertEqual(['Catherine Stone', 'Paul Stone'], children_names)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewLogoutTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_client_logout_url_can_be_used_to_login(self):
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'registration/logged_out.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:logout'))
# we are now logged out
response = self.client.get(reverse('admin:logout'))
self.assertEqual(response.status_code, 302) # we should be redirected to the login page.
# follow the redirect and test results.
response = self.client.get(reverse('admin:logout'), follow=True)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'admin/login.html')
self.assertEqual(response.request['PATH_INFO'], reverse('admin:login'))
self.assertContains(response, '<input type="hidden" name="next" value="%s" />' % reverse('admin:index'))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminUserMessageTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def send_message(self, level):
"""
Helper that sends a post to the dummy test methods and asserts that a
message with the level has appeared in the response.
"""
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_%s' % level,
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="%s">Test %s</li>' % (level, level),
html=True)
@override_settings(MESSAGE_LEVEL=10) # Set to DEBUG for this request
def test_message_debug(self):
self.send_message('debug')
def test_message_info(self):
self.send_message('info')
def test_message_success(self):
self.send_message('success')
def test_message_warning(self):
self.send_message('warning')
def test_message_error(self):
self.send_message('error')
def test_message_extra_tags(self):
action_data = {
ACTION_CHECKBOX_NAME: [1],
'action': 'message_extra_tags',
'index': 0,
}
response = self.client.post(reverse('admin:admin_views_usermessenger_changelist'),
action_data, follow=True)
self.assertContains(response,
'<li class="extra_tag info">Test tags</li>',
html=True)
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminKeepChangeListFiltersTests(TestCase):
admin_site = site
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.u5 = User.objects.create(
id=104, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=False, username='joepublic',
first_name='Joe', last_name='Public', email='joepublic@example.com',
is_staff=False, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def assertURLEqual(self, url1, url2):
"""
Assert that two URLs are equal despite the ordering
of their querystring. Refs #22360.
"""
parsed_url1 = urlparse(url1)
path1 = parsed_url1.path
parsed_qs1 = dict(parse_qsl(parsed_url1.query))
parsed_url2 = urlparse(url2)
path2 = parsed_url2.path
parsed_qs2 = dict(parse_qsl(parsed_url2.query))
for parsed_qs in [parsed_qs1, parsed_qs2]:
if '_changelist_filters' in parsed_qs:
changelist_filters = parsed_qs['_changelist_filters']
parsed_filters = dict(parse_qsl(changelist_filters))
parsed_qs['_changelist_filters'] = parsed_filters
self.assertEqual(path1, path2)
self.assertEqual(parsed_qs1, parsed_qs2)
def test_assert_url_equal(self):
# Test equality.
change_user_url = reverse('admin:auth_user_change', args=(self.u5.pk,))
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
)
)
# Test inequality.
with self.assertRaises(AssertionError):
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'http://testserver{}?_changelist_filters=is_staff__exact%3D1%26is_superuser__exact%3D1'.format(
change_user_url
)
)
# Ignore scheme and host.
self.assertURLEqual(
'http://testserver{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(
change_user_url
),
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url)
)
# Ignore ordering of querystring.
self.assertURLEqual(
'{}?is_staff__exact=0&is_superuser__exact=0'.format(reverse('admin:auth_user_changelist')),
'{}?is_superuser__exact=0&is_staff__exact=0'.format(reverse('admin:auth_user_changelist'))
)
# Ignore ordering of _changelist_filters.
self.assertURLEqual(
'{}?_changelist_filters=is_staff__exact%3D0%26is_superuser__exact%3D0'.format(change_user_url),
'{}?_changelist_filters=is_superuser__exact%3D0%26is_staff__exact%3D0'.format(change_user_url)
)
def get_changelist_filters(self):
return {
'is_superuser__exact': 0,
'is_staff__exact': 0,
}
def get_changelist_filters_querystring(self):
return urlencode(self.get_changelist_filters())
def get_preserved_filters_querystring(self):
return urlencode({
'_changelist_filters': self.get_changelist_filters_querystring()
})
def get_sample_user_id(self):
return self.u5.pk
def get_changelist_url(self):
return '%s?%s' % (
reverse('admin:auth_user_changelist',
current_app=self.admin_site.name),
self.get_changelist_filters_querystring(),
)
def get_add_url(self):
return '%s?%s' % (
reverse('admin:auth_user_add',
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_change_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_change', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_history_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_history', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def get_delete_url(self, user_id=None):
if user_id is None:
user_id = self.get_sample_user_id()
return "%s?%s" % (
reverse('admin:auth_user_delete', args=(user_id,),
current_app=self.admin_site.name),
self.get_preserved_filters_querystring(),
)
def test_changelist_view(self):
response = self.client.get(self.get_changelist_url())
self.assertEqual(response.status_code, 200)
# Check the `change_view` link has the correct querystring.
detail_link = re.search(
'<a href="(.*?)">{}</a>'.format(self.u5.username),
force_text(response.content)
)
self.assertURLEqual(detail_link.group(1), self.get_change_url())
def test_change_view(self):
# Get the `change_view`.
response = self.client.get(self.get_change_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
# Check the history link.
history_link = re.search(
'<a href="(.*?)" class="historylink">History</a>',
force_text(response.content)
)
self.assertURLEqual(history_link.group(1), self.get_history_url())
# Check the delete link.
delete_link = re.search(
'<a href="(.*?)" class="deletelink">Delete</a>',
force_text(response.content)
)
self.assertURLEqual(delete_link.group(1), self.get_delete_url())
# Test redirect on "Save".
post_data = {
'username': 'joepublic',
'last_login_0': '2007-05-30',
'last_login_1': '13:20:10',
'date_joined_0': '2007-05-30',
'date_joined_1': '13:20:10',
}
post_data['_save'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['_continue'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url()
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['_addanother'] = 1
response = self.client.post(self.get_change_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_add_view(self):
# Get the `add_view`.
response = self.client.get(self.get_add_url())
self.assertEqual(response.status_code, 200)
# Check the form action.
form_action = re.search(
'<form enctype="multipart/form-data" action="(.*?)" method="post" id="user_form".*?>',
force_text(response.content)
)
self.assertURLEqual(form_action.group(1), '?%s' % self.get_preserved_filters_querystring())
post_data = {
'username': 'dummy',
'password1': 'test',
'password2': 'test',
}
# Test redirect on "Save".
post_data['_save'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy').pk)
)
post_data.pop('_save')
# Test redirect on "Save and continue".
post_data['username'] = 'dummy2'
post_data['_continue'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_change_url(User.objects.get(username='dummy2').pk)
)
post_data.pop('_continue')
# Test redirect on "Save and add new".
post_data['username'] = 'dummy3'
post_data['_addanother'] = 1
response = self.client.post(self.get_add_url(), data=post_data)
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_add_url()
)
post_data.pop('_addanother')
def test_delete_view(self):
# Test redirect on "Delete".
response = self.client.post(self.get_delete_url(), {'post': 'yes'})
self.assertEqual(response.status_code, 302)
self.assertURLEqual(
response.url,
self.get_changelist_url()
)
def test_url_prefix(self):
context = {
'preserved_filters': self.get_preserved_filters_querystring(),
'opts': User._meta,
}
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
with override_script_prefix('/prefix/'):
url = reverse('admin:auth_user_changelist', current_app=self.admin_site.name)
self.assertURLEqual(
self.get_changelist_url(),
add_preserved_filters(context, url),
)
class NamespacedAdminKeepChangeListFiltersTests(AdminKeepChangeListFiltersTests):
admin_site = site2
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class TestLabelVisibility(TestCase):
""" #11277 -Labels of hidden fields in admin were not hidden. """
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_all_fields_visible(self):
response = self.client.get(reverse('admin:admin_views_emptymodelvisible_add'))
self.assert_fieldline_visible(response)
self.assert_field_visible(response, 'first')
self.assert_field_visible(response, 'second')
def test_all_fields_hidden(self):
response = self.client.get(reverse('admin:admin_views_emptymodelhidden_add'))
self.assert_fieldline_hidden(response)
self.assert_field_hidden(response, 'first')
self.assert_field_hidden(response, 'second')
def test_mixin(self):
response = self.client.get(reverse('admin:admin_views_emptymodelmixin_add'))
self.assert_fieldline_visible(response)
self.assert_field_hidden(response, 'first')
self.assert_field_visible(response, 'second')
def assert_field_visible(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s">' % field_name)
def assert_field_hidden(self, response, field_name):
self.assertContains(response, '<div class="field-box field-%s hidden">' % field_name)
def assert_fieldline_visible(self, response):
self.assertContains(response, '<div class="form-row field-first field-second">')
def assert_fieldline_hidden(self, response):
self.assertContains(response, '<div class="form-row hidden')
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class AdminViewOnSiteTests(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.login(username='super', password='secret')
def test_add_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test1",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": "",
"dependentchild_set-0-family_name": "Test2"}
response = self.client.post(reverse('admin:admin_views_parentwithdependentchildren_add'),
post_data)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_change_view_form_and_formsets_run_validation(self):
"""
Issue #20522
Verifying that if the parent form fails validation, the inlines also
run validation even if validation is contingent on parent form data
"""
pwdc = ParentWithDependentChildren.objects.create(some_required_info=6,
family_name="Test1")
# The form validation should fail because 'some_required_info' is
# not included on the parent form, and the family_name of the parent
# does not match that of the child
post_data = {"family_name": "Test2",
"dependentchild_set-TOTAL_FORMS": "1",
"dependentchild_set-INITIAL_FORMS": "0",
"dependentchild_set-MAX_NUM_FORMS": "1",
"dependentchild_set-0-id": "",
"dependentchild_set-0-parent": str(pwdc.id),
"dependentchild_set-0-family_name": "Test1"}
response = self.client.post(
reverse('admin:admin_views_parentwithdependentchildren_change', args=(pwdc.id,)), post_data
)
# just verifying the parent form failed validation, as expected --
# this isn't the regression test
self.assertIn('some_required_info', response.context['adminform'].form.errors)
# actual regression test
for error_set in response.context['inline_admin_formset'].formset.errors:
self.assertEqual(['Children must share a family name with their parents in this contrived test case'],
error_set.get('__all__'))
def test_check(self):
"Ensure that the view_on_site value is either a boolean or a callable"
try:
admin = CityAdmin(City, AdminSite())
CityAdmin.view_on_site = True
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = False
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = lambda obj: obj.get_absolute_url()
self.assertEqual(admin.check(), [])
CityAdmin.view_on_site = []
self.assertEqual(admin.check(), [
Error(
"The value of 'view_on_site' must be a callable or a boolean value.",
hint=None,
obj=CityAdmin,
id='admin.E025',
),
])
finally:
# Restore the original values for the benefit of other tests.
CityAdmin.view_on_site = True
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, 1)))
def test_true(self):
"Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_worker_change', args=(self.w1.pk,)))
self.assertContains(response, '"/worker/%s/%s/"' % (self.w1.surname, self.w1.name))
def test_missing_get_absolute_url(self):
"Ensure None is returned if model doesn't have get_absolute_url"
model_admin = ModelAdmin(Worker, None)
self.assertIsNone(model_admin.get_view_on_site_url(Worker()))
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls")
class InlineAdminViewOnSiteTest(TestCase):
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
cls.s1 = State.objects.create(name='New York')
cls.s2 = State.objects.create(name='Illinois')
cls.s3 = State.objects.create(name='California')
cls.c1 = City.objects.create(state=cls.s1, name='New York')
cls.c2 = City.objects.create(state=cls.s2, name='Chicago')
cls.c3 = City.objects.create(state=cls.s3, name='San Francisco')
cls.r1 = Restaurant.objects.create(city=cls.c1, name='Italian Pizza')
cls.r2 = Restaurant.objects.create(city=cls.c1, name='Boulevard')
cls.r3 = Restaurant.objects.create(city=cls.c2, name='Chinese Dinner')
cls.r4 = Restaurant.objects.create(city=cls.c2, name='Angels')
cls.r5 = Restaurant.objects.create(city=cls.c2, name='Take Away')
cls.r6 = Restaurant.objects.create(city=cls.c3, name='The Unknown Restaurant')
cls.w1 = Worker.objects.create(work_at=cls.r1, name='Mario', surname='Rossi')
cls.w2 = Worker.objects.create(work_at=cls.r1, name='Antonio', surname='Bianchi')
cls.w3 = Worker.objects.create(work_at=cls.r1, name='John', surname='Doe')
def setUp(self):
self.client.login(username='super', password='secret')
def test_false(self):
"Ensure that the 'View on site' button is not displayed if view_on_site is False"
response = self.client.get(reverse('admin:admin_views_state_change', args=(self.s1.pk,)))
content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertNotContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.c1.pk)))
def test_true(self):
"Ensure that the 'View on site' button is displayed if view_on_site is True"
response = self.client.get(reverse('admin:admin_views_city_change', args=(self.c1.pk,)))
content_type_pk = ContentType.objects.get_for_model(Restaurant).pk
self.assertContains(response, reverse('admin:view_on_site', args=(content_type_pk, self.r1.pk)))
def test_callable(self):
"Ensure that the right link is displayed if view_on_site is a callable"
response = self.client.get(reverse('admin:admin_views_restaurant_change', args=(self.r1.pk,)))
self.assertContains(response, '"/worker_inline/%s/%s/"' % (self.w1.surname, self.w1.name))
@override_settings(ROOT_URLCONF="admin_views.urls")
class TestEtagWithAdminView(SimpleTestCase):
# See https://code.djangoproject.com/ticket/16003
def test_admin(self):
with self.settings(USE_ETAGS=False):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertFalse(response.has_header('ETag'))
with self.settings(USE_ETAGS=True):
response = self.client.get(reverse('admin:index'))
self.assertEqual(response.status_code, 302)
self.assertTrue(response.has_header('ETag'))
@override_settings(
PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
ROOT_URLCONF="admin_views.urls",
)
class GetFormsetsWithInlinesArgumentTest(TestCase):
"""
#23934 - When adding a new model instance in the admin, the 'obj' argument
of get_formsets_with_inlines() should be None. When changing, it should be
equal to the existing model instance.
The GetFormsetsArgumentCheckingAdmin ModelAdmin throws an exception
if obj is not None during add_view or obj is None during change_view.
"""
@classmethod
def setUpTestData(cls):
cls.u1 = User.objects.create(
id=100, password='sha1$995a3$6011485ea3834267d719b4c801409b8b1ddd0158',
last_login=datetime.datetime(2007, 5, 30, 13, 20, 10), is_superuser=True, username='super',
first_name='Super', last_name='User', email='super@example.com',
is_staff=True, is_active=True, date_joined=datetime.datetime(2007, 5, 30, 13, 20, 10)
)
def setUp(self):
self.client.login(username='super', password='secret')
def test_explicitly_provided_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_explicitlyprovidedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
def test_implicitly_generated_pk(self):
post_data = {'name': '1'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_add'), post_data)
self.assertEqual(response.status_code, 302)
post_data = {'name': '2'}
response = self.client.post(reverse('admin:admin_views_implicitlygeneratedpk_change', args=(1,)), post_data)
self.assertEqual(response.status_code, 302)
| bsd-3-clause |
michaelWagner/oppia | extensions/rich_text_components/Image/Image.py | 9 | 1770 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from extensions.rich_text_components import base
class Image(base.BaseRichTextComponent):
"""A rich-text component representing an inline image."""
name = 'Image'
category = 'Basic Input'
description = 'An image.'
frontend_name = 'image'
tooltip = 'Insert image'
requires_fs = True
is_block_element = True
_customization_arg_specs = [{
'name': 'filepath',
'description': (
'The name of the image file. (Allowed extensions: gif, jpeg, jpg, '
'png.)'),
'schema': {
'type': 'custom',
'obj_type': 'Filepath',
},
'default_value': '',
}, {
'name': 'caption',
'description': ('Caption for image (optional)'),
'schema': {
'type': 'unicode',
},
'default_value': '',
}, {
'name': 'alt',
'description': 'Alternative text (for screen readers)',
'schema': {
'type': 'unicode',
},
'default_value': '',
}]
@property
def preview_url_template(self):
return '/imagehandler/<[explorationId]>/<[filepath]>'
| apache-2.0 |
createwindow/pjsip-blf | pjsip-apps/src/confbot/confbot.py | 32 | 16451 | # $Id: confbot.py 2912 2009-08-24 11:56:13Z bennylp $
#
# SIP Conference Bot
#
# Copyright (C) 2008-2009 Teluu Inc. (http://www.teluu.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import pjsua as pj
import string
import sys
CFG_FILE = "config"
INFO = 1
TRACE = 2
# Call callback. This would just forward the event to the Member class
class CallCb(pj.CallCallback):
def __init__(self, member, call=None):
pj.CallCallback.__init__(self, call)
self.member = member
def on_state(self):
self.member.on_call_state(self.call)
def on_media_state(self):
self.member.on_call_media_state(self.call)
def on_dtmf_digit(self, digits):
self.member.on_call_dtmf_digit(self.call, digits)
def on_transfer_request(self, dst, code):
return self.member.on_call_transfer_request(self.call, dst, code)
def on_transfer_status(self, code, reason, final, cont):
return self.member.on_call_transfer_status(self.call, code, reason, final, cont)
def on_replace_request(self, code, reason):
return self.member.on_call_replace_request(self.call, code, reason)
def on_replaced(self, new_call):
self.member.on_call_replaced(self.call, new_call)
def on_typing(self, is_typing):
self.member.on_typing(is_typing, call=self.call)
def on_pager(self, mime_type, body):
self.member.on_pager(mime_type, body, call=self.call)
def on_pager_status(self, body, im_id, code, reason):
self.member.on_pager_status(body, im_id, code, reason, call=self.call)
# Buddy callback. This would just forward the event to Member class
class BuddyCb(pj.BuddyCallback):
def __init__(self, member, buddy=None):
pj.BuddyCallback.__init__(self, buddy)
self.member = member
def on_pager(self, mime_type, body):
self.member.on_pager(mime_type, body, buddy=self.buddy)
def on_pager_status(self, body, im_id, code, reason):
self.member.on_pager_status(body, im_id, code, reason, buddy=self.buddy)
def on_state(self):
self.member.on_pres_state(self.buddy)
def on_typing(self, is_typing):
self.member.on_typing(is_typing, buddy=self.buddy)
##############################################################################
#
#
# This class represents individual room member (either/both chat and voice conf)
#
#
class Member:
def __init__(self, bot, uri):
self.uri = uri
self.bot = bot
self.call = None
self.buddy = None
self.bi = pj.BuddyInfo()
self.in_chat = False
self.in_voice = False
self.im_error = False
self.html = False
def __str__(self):
str = string.ljust(self.uri, 30) + " -- "
if self.buddy:
bi = self.buddy.info()
str = str + bi.online_text
else:
str = str + "Offline"
str = str + " ["
if (self.in_voice):
str = str + " voice"
if (self.in_chat):
str = str + " chat"
if (self.html):
str = str + " html"
else:
str = str + " plain"
if (self.im_error):
str = str + " im_error"
str = str + "]"
return str
def join_call(self, call):
if self.call:
self.call.hangup(603, "You have been disconnected for making another call")
self.call = call
call.set_callback(CallCb(self, call))
msg = "%(uri)s is attempting to join the voice conference" % \
{'uri': self.uri}
self.bot.DEBUG(msg + "\n", INFO)
self.bot.broadcast_pager(None, msg)
def join_chat(self):
if not self.buddy:
self.bot.DEBUG(self.uri + " joining chatroom...\n", INFO)
self.buddy = self.bot.acc.add_buddy(self.uri)
self.buddy.set_callback(BuddyCb(self, self.buddy))
self.buddy.subscribe()
else:
self.bot.DEBUG(self.uri + " already in chatroom, resubscribing..\n", INFO)
self.buddy.subscribe()
def send_pager(self, body, mime="text/plain"):
self.bot.DEBUG("send_pager() to " + self.uri)
if self.in_chat and not self.im_error and self.buddy:
if self.html:
#This will make us receive html!
#mime = "text/html"
body = body.replace("<", "<")
body = body.replace(">", ">")
body = body.replace('"', """)
body = body.replace("\n", "<BR>\n")
self.buddy.send_pager(body, content_type=mime)
self.bot.DEBUG("..sent\n")
else:
self.bot.DEBUG("..not sent!\n")
def on_call_state(self, call):
ci = call.info()
if ci.state==pj.CallState.DISCONNECTED:
if self.in_voice:
msg = "%(uri)s has left the voice conference (%(1)d/%(2)s)" % \
{'uri': self.uri, '1': ci.last_code, '2': ci.last_reason}
self.bot.DEBUG(msg + "\n", INFO)
self.bot.broadcast_pager(None, msg)
self.in_voice = False
self.call = None
self.bot.on_member_left(self)
elif ci.state==pj.CallState.CONFIRMED:
msg = "%(uri)s has joined the voice conference" % \
{'uri': self.uri}
self.bot.DEBUG(msg + "\n", INFO)
self.bot.broadcast_pager(None, msg)
def on_call_media_state(self, call):
self.bot.DEBUG("Member.on_call_media_state\n")
ci = call.info()
if ci.conf_slot!=-1:
if not self.in_voice:
msg = self.uri + " call media is active"
self.bot.broadcast_pager(None, msg)
self.in_voice = True
self.bot.add_to_voice_conf(self)
else:
if self.in_voice:
msg = self.uri + " call media is inactive"
self.bot.broadcast_pager(None, msg)
self.in_voice = False
def on_call_dtmf_digit(self, call, digits):
msg = "%(uri)s sent DTMF digits %(dig)s" % \
{'uri': self.uri, 'dig': digits}
self.bot.broadcast_pager(None, msg)
def on_call_transfer_request(self, call, dst, code):
msg = "%(uri)s is transfering the call to %(dst)s" % \
{'uri': self.uri, 'dst': dst}
self.bot.broadcast_pager(None, msg)
return 202
def on_call_transfer_status(self, call, code, reason, final, cont):
msg = "%(uri)s call transfer status is %(code)d/%(res)s" % \
{'uri': self.uri, 'code': code, 'res': reason}
self.bot.broadcast_pager(None, msg)
return True
def on_call_replace_request(self, call, code, reason):
msg = "%(uri)s is requesting call replace" % \
{'uri': self.uri}
self.bot.broadcast_pager(None, msg)
return (code, reason)
def on_call_replaced(self, call, new_call):
msg = "%(uri)s call is replaced" % \
{'uri': self.uri}
self.bot.broadcast_pager(None, msg)
def on_pres_state(self, buddy):
old_bi = self.bi
self.bi = buddy.info()
msg = "%(uri)s status is %(st)s" % \
{'uri': self.uri, 'st': self.bi.online_text}
self.bot.DEBUG(msg + "\n", INFO)
self.bot.broadcast_pager(self, msg)
if self.bi.sub_state==pj.SubscriptionState.ACTIVE:
if not self.in_chat:
self.in_chat = True
buddy.send_pager("Welcome to chatroom")
self.bot.broadcast_pager(self, self.uri + " has joined the chat room")
else:
self.in_chat = True
elif self.bi.sub_state==pj.SubscriptionState.NULL or \
self.bi.sub_state==pj.SubscriptionState.TERMINATED or \
self.bi.sub_state==pj.SubscriptionState.UNKNOWN:
self.buddy.delete()
self.buddy = None
if self.in_chat:
self.in_chat = False
self.bot.broadcast_pager(self, self.uri + " has left the chat room")
else:
self.in_chat = False
self.bot.on_member_left(self)
def on_typing(self, is_typing, call=None, buddy=None):
if is_typing:
msg = self.uri + " is typing..."
else:
msg = self.uri + " has stopped typing"
self.bot.broadcast_pager(self, msg)
def on_pager(self, mime_type, body, call=None, buddy=None):
if not self.bot.handle_cmd(self, None, body):
msg = self.uri + ": " + body
self.bot.broadcast_pager(self, msg, mime_type)
def on_pager_status(self, body, im_id, code, reason, call=None, buddy=None):
self.im_error = (code/100 != 2)
##############################################################################
#
#
# The Bot instance (singleton)
#
#
class Bot(pj.AccountCallback):
def __init__(self):
pj.AccountCallback.__init__(self, None)
self.lib = pj.Lib()
self.acc = None
self.calls = []
self.members = {}
self.cfg = None
def DEBUG(self, msg, level=TRACE):
print msg,
def helpstring(self):
return """
--h[elp] Display this help screen
--j[oin] Join the chat room
--html on|off Set to receive HTML or plain text
Participant commands:
--s[how] Show confbot settings
--leave Leave the chatroom
--l[ist] List all members
Admin commands:
--a[dmin] <CMD> Where <CMD> are:
list List the admins
add <URI> Add URI as admin
del <URI> Remove URI as admin
rr Reregister account to server
call <URI> Make call to the URI and add to voice conf
dc <URI> Disconnect call to URI
hold <URI> Hold call with that URI
update <URI> Send UPDATE to call with that URI
reinvite <URI> Send re-INVITE to call with that URI
"""
def listmembers(self):
msg = ""
for uri, m in self.members.iteritems():
msg = msg + str(m) + "\n"
return msg
def showsettings(self):
ai = self.acc.info()
msg = """
ConfBot status and settings:
URI: %(uri)s
Status: %(pres)s
Reg Status: %(reg_st)d
Reg Reason: %(reg_res)s
""" % {'uri': ai.uri, 'pres': ai.online_text, \
'reg_st': ai.reg_status, 'reg_res': ai.reg_reason}
return msg
def main(self, cfg_file):
try:
cfg = self.cfg = __import__(cfg_file)
self.lib.init(ua_cfg=cfg.ua_cfg, log_cfg=cfg.log_cfg, media_cfg=cfg.media_cfg)
self.lib.set_null_snd_dev()
transport = None
if cfg.udp_cfg:
transport = self.lib.create_transport(pj.TransportType.UDP, cfg.udp_cfg)
if cfg.tcp_cfg:
t = self.lib.create_transport(pj.TransportType.TCP, cfg.tcp_cfg)
if not transport:
transport = t
self.lib.start()
if cfg.acc_cfg:
self.DEBUG("Creating account %(uri)s..\n" % {'uri': cfg.acc_cfg.id}, INFO)
self.acc = self.lib.create_account(cfg.acc_cfg, cb=self)
else:
self.DEBUG("Creating account for %(t)s..\n" % \
{'t': transport.info().description}, INFO)
self.acc = self.lib.create_account_for_transport(transport, cb=self)
self.acc.set_basic_status(True)
# Wait for ENTER before quitting
print "Press q to quit or --help/--h for help"
while True:
input = sys.stdin.readline().strip(" \t\r\n")
if not self.handle_cmd(None, None, input):
if input=="q":
break
self.lib.destroy()
self.lib = None
except pj.Error, e:
print "Exception: " + str(e)
if self.lib:
self.lib.destroy()
self.lib = None
def broadcast_pager(self, exclude_member, body, mime_type="text/plain"):
self.DEBUG("Broadcast: " + body + "\n")
for uri, m in self.members.iteritems():
if m != exclude_member:
m.send_pager(body, mime_type)
def add_to_voice_conf(self, member):
if not member.call:
return
src_ci = member.call.info()
self.DEBUG("bot.add_to_voice_conf\n")
for uri, m in self.members.iteritems():
if m==member:
continue
if not m.call:
continue
dst_ci = m.call.info()
if dst_ci.media_state==pj.MediaState.ACTIVE and dst_ci.conf_slot!=-1:
self.lib.conf_connect(src_ci.conf_slot, dst_ci.conf_slot)
self.lib.conf_connect(dst_ci.conf_slot, src_ci.conf_slot)
def on_member_left(self, member):
if not member.call and not member.buddy:
del self.members[member.uri]
del member
def handle_admin_cmd(self, member, body):
if member and self.cfg.admins and not member.uri in self.cfg.admins:
member.send_pager("You are not admin")
return
args = body.split()
msg = ""
if len(args)==1:
args.append(" ")
if args[1]=="list":
if not self.cfg.admins:
msg = "Everyone is admin!"
else:
msg = str(self.cfg.admins)
elif args[1]=="add":
if len(args)!=3:
msg = "Usage: add <URI>"
else:
self.cfg.admins.append(args[2])
msg = args[2] + " added as admin"
elif args[1]=="del":
if len(args)!=3:
msg = "Usage: del <URI>"
elif args[2] not in self.cfg.admins:
msg = args[2] + " is not admin"
else:
self.cfg.admins.remove(args[2])
msg = args[2] + " has been removed from admins"
elif args[1]=="rr":
msg = "Reregistering.."
self.acc.set_registration(True)
elif args[1]=="call":
if len(args)!=3:
msg = "Usage: call <URI>"
else:
uri = args[2]
try:
call = self.acc.make_call(uri)
except pj.Error, e:
msg = "Error: " + str(e)
call = None
if call:
if not uri in self.members:
m = Member(self, uri)
self.members[m.uri] = m
else:
m = self.members[uri]
msg = "Adding " + m.uri + " to voice conference.."
m.join_call(call)
elif args[1]=="dc" or args[1]=="hold" or args[1]=="update" or args[1]=="reinvite":
if len(args)!=3:
msg = "Usage: " + args[1] + " <URI>"
else:
uri = args[2]
if not uri in self.members:
msg = "Member not found/URI doesn't match (note: case matters!)"
else:
m = self.members[uri]
if m.call:
if args[1]=="dc":
msg = "Disconnecting.."
m.call.hangup(603, "You're disconnected by admin")
elif args[1]=="hold":
msg = "Holding the call"
m.call.hold()
elif args[1]=="update":
msg = "Sending UPDATE"
m.call.update()
elif args[1]=="reinvite":
msg = "Sending re-INVITE"
m.call.reinvite()
else:
msg = "He is not in call"
else:
msg = "Unknown admin command " + body
#print "msg is '%(msg)s'" % {'msg': msg}
if True:
if member:
member.send_pager(msg)
else:
print msg
def handle_cmd(self, member, from_uri, body):
body = body.strip(" \t\r\n")
msg = ""
handled = True
if body=="--l" or body=="--list":
msg = self.listmembers()
if msg=="":
msg = "Nobody is here"
elif body[0:3]=="--s":
msg = self.showsettings()
elif body[0:6]=="--html" and member:
if body[8:11]=="off":
member.html = False
else:
member.html = True
elif body=="--h" or body=="--help":
msg = self.helpstring()
elif body=="--leave":
if not member or not member.buddy:
msg = "You are not in chatroom"
else:
member.buddy.unsubscribe()
elif body[0:3]=="--j":
if not from_uri in self.members:
m = Member(self, from_uri)
self.members[m.uri] = m
self.DEBUG("Adding " + m.uri + " to chatroom\n")
m.join_chat()
else:
m = self.members[from_uri]
self.DEBUG("Adding " + m.uri + " to chatroom\n")
m.join_chat()
elif body[0:3]=="--a":
self.handle_admin_cmd(member, body)
handled = True
else:
handled = False
if msg:
if member:
member.send_pager(msg)
elif from_uri:
self.acc.send_pager(from_uri, msg);
else:
print msg
return handled
def on_incoming_call(self, call):
self.DEBUG("on_incoming_call from %(uri)s\n" % {'uri': call.info().remote_uri}, INFO)
ci = call.info()
if not ci.remote_uri in self.members:
m = Member(self, ci.remote_uri)
self.members[m.uri] = m
m.join_call(call)
else:
m = self.members[ci.remote_uri]
m.join_call(call)
call.answer(200)
def on_incoming_subscribe(self, buddy, from_uri, contact_uri, pres_obj):
self.DEBUG("on_incoming_subscribe from %(uri)s\n" % from_uri, INFO)
return (200, 'OK')
def on_reg_state(self):
ai = self.acc.info()
self.DEBUG("Registration state: %(code)d/%(reason)s\n" % \
{'code': ai.reg_status, 'reason': ai.reg_reason}, INFO)
if ai.reg_status/100==2 and ai.reg_expires > 0:
self.acc.set_basic_status(True)
def on_pager(self, from_uri, contact, mime_type, body):
body = body.strip(" \t\r\n")
if not self.handle_cmd(None, from_uri, body):
self.acc.send_pager(from_uri, "You have not joined the chat room. Type '--join' to join or '--help' for the help")
def on_pager_status(self, to_uri, body, im_id, code, reason):
pass
def on_typing(self, from_uri, contact, is_typing):
pass
##############################################################################
#
#
# main()
#
#
if __name__ == "__main__":
bot = Bot()
bot.main(CFG_FILE)
| gpl-2.0 |
kholia/pyrpm | pyrpm/database/lists.py | 2 | 8788 | #
# Copyright (C) 2004, 2005 Red Hat, Inc.
# Author: Thomas Woerner <twoerner@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Library General Public License as published by
# the Free Software Foundation; version 2 only
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
import re, fnmatch
import pyrpm.functions as functions
from pyrpm.base import RPMSENSE_EQUAL
def genBasenames2(oldfilenames):
(basenames, dirnames) = ([], [])
for filename in oldfilenames:
(dirname, basename) = functions.pathsplit2(filename)
basenames.append(basename)
dirnames.append(dirname)
return (basenames, dirnames)
class FilenamesList:
"""A mapping from filenames to RpmPackages."""
def __init__(self):
self.clear()
def clear(self):
"""Clear the mapping."""
self.path = { } # dirname => { basename => RpmPackage }
def addPkg(self, pkg):
"""Add all files from RpmPackage pkg to self."""
path = self.path
basenames = pkg["basenames"]
if basenames != None:
dirindexes = pkg["dirindexes"]
dirnames = pkg["dirnames"]
for dirname in dirnames:
path.setdefault(dirname, {})
dirnames = [ dirnames[di] for di in dirindexes ]
else:
if pkg["oldfilenames"] == None:
return
(basenames, dirnames) = genBasenames2(pkg["oldfilenames"])
for dirname in dirnames:
path.setdefault(dirname, {})
for i in xrange(len(basenames)):
path[dirnames[i]].setdefault(basenames[i], []).append(pkg)
def removePkg(self, pkg):
"""Remove all files from RpmPackage pkg from self."""
basenames = pkg["basenames"]
if basenames != None:
dirindexes = pkg["dirindexes"]
dirnames = pkg["dirnames"]
dirnames = [ dirnames[di] for di in dirindexes ]
else:
if pkg["oldfilenames"] == None:
return
(basenames, dirnames) = genBasenames2(pkg["oldfilenames"])
for i in xrange(len(basenames)):
self.path[dirnames[i]][basenames[i]].remove(pkg)
def numDuplicates(self, filename):
(dirname, basename) = functions.pathsplit2(filename)
return len(self.path.get(dirname, {}).get(basename, {}))
def duplicates(self):
dups = { }
for dirname in self.path.keys():
for filename in self.path[dirname].keys():
if len(self.path[dirname][filename]) > 1:
dups[dirname + filename] = self.path[dirname][filename]
return dups
def search(self, name):
"""Return list of packages providing file with name.
The list may point to internal structures of FilenamesList and may be
changed by calls to addPkg() and removePkg()."""
(dirname, basename) = functions.pathsplit2(name)
return self.path.get(dirname, {}).get(basename, [])
class ProvidesList:
"""A database of Provides:
Files are represented as (filename, 0, "")."""
TAG = "provides"
# TODO: add key, __getitem__, ..
def __init__(self):
self.hash = { }
ProvidesList.clear(self)
self.__len__ = self.hash.__len__
self.__getitem__ = self.hash.__getitem__
self.has_key = self.hash.has_key
self.keys = self.hash.keys
def clear(self):
"""Discard all stored data"""
# %name => [(flag, EVR string, providing RpmPackage)]
self.hash.clear()
def addPkg(self, rpm):
"""Add Provides: by RpmPackage rpm. If no self provide is done it will
be added automatically."""
for (name, flag, version) in rpm[self.TAG]:
self.hash.setdefault(name, [ ]).append((flag, version, rpm))
sver = rpm.getEVR()
if (rpm["name"], RPMSENSE_EQUAL, sver) not in rpm[self.TAG]:
self.hash.setdefault(rpm["name"], [ ]).append((RPMSENSE_EQUAL, sver, rpm))
def removePkg(self, rpm):
"""Remove Provides: by RpmPackage rpm"""
for (name, flag, version) in rpm[self.TAG]:
list = self.hash[name]
list.remove( (flag, version, rpm) )
if len(list) == 0:
del self.hash[name]
sname = rpm["name"]
if not self.hash.has_key(sname):
return
list = self.hash[sname]
sver = rpm.getEVR()
if (RPMSENSE_EQUAL, sver, rpm) in list:
list.remove( (RPMSENSE_EQUAL, sver, rpm) )
if len(list) == 0 and self.hash.has_key(name):
del self.hash[name]
def search(self, name, flag, version):
"""Return a list of RpmPackage's matching the Requires:
(name, RPMSENSE_* flag, EVR string)."""
if not self.hash.has_key(name):
return { }
evr = functions.evrSplit(version)
ret = { }
for (f, v, rpm) in self.hash[name]:
if rpm in ret:
continue
if version == "":
ret.setdefault(rpm, [ ]).append((name, f, v))
continue
if functions.rangeCompare(flag, evr, f, functions.evrSplit(v)):
ret.setdefault(rpm, [ ]).append((name, f, v))
continue
if v == "":
ret.setdefault(rpm, [ ]).append((name, f, v))
return ret
def __iter__(self):
for name, l in self.hash.iteritems():
for entry in l:
yield (name, ) + entry
class ConflictsList(ProvidesList):
"""A database of Conflicts:"""
TAG = "conflicts"
def addPkg(self, rpm):
"""Add Provides: by RpmPackage rpm. If no self provide is done it will
be added automatically."""
for entry in rpm[self.TAG]:
name = entry[0]
self.hash.setdefault(name, [ ]).append( entry[1:] + (rpm,) )
def removePkg(self, rpm):
"""Remove Provides: by RpmPackage rpm"""
for entry in rpm[self.TAG]:
name = entry[0]
list = self.hash[name]
list.remove( entry[1:] + (rpm,) )
if len(list) == 0:
del self.hash[name]
def search(self, name, flag, version):
# s/Conflicts/Obsoletes/ in ObsoletesList
"""Return a list of RpmPackage's with Conflicts: matching
(name, RPMSENSE_* flag, EVR string)."""
if not self.hash.has_key(name):
return { }
evr = functions.evrSplit(version)
ret = { }
for entry in self.hash[name]:
f, v = entry[:2]
rpm = entry[-1]
if rpm in ret:
continue
if version == "":
ret.setdefault(rpm, [ ]).append( (name,) + entry[:-1] )
continue
if functions.rangeCompare(flag, evr, f, functions.evrSplit(v)):
ret.setdefault(rpm, [ ]).append( (name,) + entry[:-1] )
continue
return ret
class RequiresList(ConflictsList):
"""A database of Requires:"""
TAG = "requires"
class ObsoletesList(ConflictsList):
"""A database of Obsoletes:"""
TAG = "obsoletes"
class TriggersList(ConflictsList):
"""A database of Triggers:"""
TAG = "triggers"
class NevraList:
def __init__(self):
self.hash = { }
def clear(self):
self.hash.clear()
def addPkg(self, pkg):
for name in pkg.getAllNames():
self.hash.setdefault(name, []).append(pkg)
def removePkg(self, pkg):
for name in pkg.getAllNames():
self.hash[name].remove(pkg)
if not self.hash[name]:
del self.hash[name]
_fnmatchre = re.compile(".*[\*\[\]\{\}\?].*")
def search(self, pkgnames):
result = []
hash = self.hash
for pkgname in pkgnames:
if hash.has_key(pkgname):
result.extend(hash[pkgname])
if self._fnmatchre.match(pkgname):
restring = fnmatch.translate(pkgname)
regex = re.compile(restring)
for item in hash.keys():
if regex.match(item):
result.extend(hash[item])
functions.normalizeList(result)
return result
# vim:ts=4:sw=4:showmatch:expandtab
| gpl-2.0 |
fullfanta/mxnet | tools/accnn/acc_fc.py | 52 | 2665 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import numpy as np
from scipy import linalg as LA
import mxnet as mx
import argparse
import utils
import pdb
def fc_decomposition(model, args):
W = model.arg_params[args.layer+'_weight'].asnumpy()
b = model.arg_params[args.layer+'_bias'].asnumpy()
W = W.reshape((W.shape[0],-1))
b = b.reshape((b.shape[0],-1))
u, s, v = LA.svd(W, full_matrices=False)
s = np.diag(s)
t = u.dot(s.dot(v))
rk = args.K
P = u[:,:rk]
Q = s[:rk,:rk].dot(v[:rk,:])
name1 = args.layer + '_red'
name2 = args.layer + '_rec'
def sym_handle(data, node):
W1, W2 = Q, P
sym1 = mx.symbol.FullyConnected(data=data, num_hidden=W1.shape[0], no_bias=True, name=name1)
sym2 = mx.symbol.FullyConnected(data=sym1, num_hidden=W2.shape[0], no_bias=False, name=name2)
return sym2
def arg_handle(arg_shape_dic, arg_params):
W1, W2 = Q, P
W1 = W1.reshape(arg_shape_dic[name1+'_weight'])
weight1 = mx.ndarray.array(W1)
W2 = W2.reshape(arg_shape_dic[name2+'_weight'])
b2 = b.reshape(arg_shape_dic[name2+'_bias'])
weight2 = mx.ndarray.array(W2)
bias2 = mx.ndarray.array(b2)
arg_params[name1 + '_weight'] = weight1
arg_params[name2 + '_weight'] = weight2
arg_params[name2 + '_bias'] = bias2
new_model = utils.replace_conv_layer(args.layer, model, sym_handle, arg_handle)
return new_model
def main():
model = utils.load_model(args)
new_model = fc_decomposition(model, args)
new_model.save(args.save_model)
if __name__ == '__main__':
parser=argparse.ArgumentParser()
parser.add_argument('-m', '--model', help='the model to speed up')
parser.add_argument('-g', '--gpus', default='0', help='the gpus to be used in ctx')
parser.add_argument('--load-epoch',type=int,default=1)
parser.add_argument('--layer')
parser.add_argument('--K', type=int)
parser.add_argument('--save-model')
args = parser.parse_args()
main()
| apache-2.0 |
EthanHeilman/bitcoin | test/util/bitcoin-util-test.py | 59 | 6594 | #!/usr/bin/env python3
# Copyright 2014 BitPay Inc.
# Copyright 2016-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test framework for bitcoin utils.
Runs automatically during `make check`.
Can also be run manually."""
import argparse
import binascii
import configparser
import difflib
import json
import logging
import os
import pprint
import subprocess
import sys
def main():
config = configparser.ConfigParser()
config.optionxform = str
config.read_file(open(os.path.join(os.path.dirname(__file__), "../config.ini"), encoding="utf8"))
env_conf = dict(config.items('environment'))
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
verbose = args.verbose
if verbose:
level = logging.DEBUG
else:
level = logging.ERROR
formatter = '%(asctime)s - %(levelname)s - %(message)s'
# Add the format/level to the logger
logging.basicConfig(format=formatter, level=level)
bctester(os.path.join(env_conf["SRCDIR"], "test", "util", "data"), "bitcoin-util-test.json", env_conf)
def bctester(testDir, input_basename, buildenv):
""" Loads and parses the input file, runs all tests and reports results"""
input_filename = os.path.join(testDir, input_basename)
raw_data = open(input_filename, encoding="utf8").read()
input_data = json.loads(raw_data)
failed_testcases = []
for testObj in input_data:
try:
bctest(testDir, testObj, buildenv)
logging.info("PASSED: " + testObj["description"])
except:
logging.info("FAILED: " + testObj["description"])
failed_testcases.append(testObj["description"])
if failed_testcases:
error_message = "FAILED_TESTCASES:\n"
error_message += pprint.pformat(failed_testcases, width=400)
logging.error(error_message)
sys.exit(1)
else:
sys.exit(0)
def bctest(testDir, testObj, buildenv):
"""Runs a single test, comparing output and RC to expected output and RC.
Raises an error if input can't be read, executable fails, or output/RC
are not as expected. Error is caught by bctester() and reported.
"""
# Get the exec names and arguments
execprog = os.path.join(buildenv["BUILDDIR"], "src", testObj["exec"] + buildenv["EXEEXT"])
execargs = testObj['args']
execrun = [execprog] + execargs
# Read the input data (if there is any)
stdinCfg = None
inputData = None
if "input" in testObj:
filename = os.path.join(testDir, testObj["input"])
inputData = open(filename, encoding="utf8").read()
stdinCfg = subprocess.PIPE
# Read the expected output data (if there is any)
outputFn = None
outputData = None
outputType = None
if "output_cmp" in testObj:
outputFn = testObj['output_cmp']
outputType = os.path.splitext(outputFn)[1][1:] # output type from file extension (determines how to compare)
try:
outputData = open(os.path.join(testDir, outputFn), encoding="utf8").read()
except:
logging.error("Output file " + outputFn + " can not be opened")
raise
if not outputData:
logging.error("Output data missing for " + outputFn)
raise Exception
if not outputType:
logging.error("Output file %s does not have a file extension" % outputFn)
raise Exception
# Run the test
proc = subprocess.Popen(execrun, stdin=stdinCfg, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True)
try:
outs = proc.communicate(input=inputData)
except OSError:
logging.error("OSError, Failed to execute " + execprog)
raise
if outputData:
data_mismatch, formatting_mismatch = False, False
# Parse command output and expected output
try:
a_parsed = parse_output(outs[0], outputType)
except Exception as e:
logging.error('Error parsing command output as %s: %s' % (outputType, e))
raise
try:
b_parsed = parse_output(outputData, outputType)
except Exception as e:
logging.error('Error parsing expected output %s as %s: %s' % (outputFn, outputType, e))
raise
# Compare data
if a_parsed != b_parsed:
logging.error("Output data mismatch for " + outputFn + " (format " + outputType + ")")
data_mismatch = True
# Compare formatting
if outs[0] != outputData:
error_message = "Output formatting mismatch for " + outputFn + ":\n"
error_message += "".join(difflib.context_diff(outputData.splitlines(True),
outs[0].splitlines(True),
fromfile=outputFn,
tofile="returned"))
logging.error(error_message)
formatting_mismatch = True
assert not data_mismatch and not formatting_mismatch
# Compare the return code to the expected return code
wantRC = 0
if "return_code" in testObj:
wantRC = testObj['return_code']
if proc.returncode != wantRC:
logging.error("Return code mismatch for " + outputFn)
raise Exception
if "error_txt" in testObj:
want_error = testObj["error_txt"]
# Compare error text
# TODO: ideally, we'd compare the strings exactly and also assert
# That stderr is empty if no errors are expected. However, bitcoin-tx
# emits DISPLAY errors when running as a windows application on
# linux through wine. Just assert that the expected error text appears
# somewhere in stderr.
if want_error not in outs[1]:
logging.error("Error mismatch:\n" + "Expected: " + want_error + "\nReceived: " + outs[1].rstrip())
raise Exception
def parse_output(a, fmt):
"""Parse the output according to specified format.
Raise an error if the output can't be parsed."""
if fmt == 'json': # json: compare parsed data
return json.loads(a)
elif fmt == 'hex': # hex: parse and compare binary data
return binascii.a2b_hex(a.strip())
else:
raise NotImplementedError("Don't know how to compare %s" % fmt)
if __name__ == '__main__':
main()
| mit |
nbproject/nbproject | apps/base/auth.py | 1 | 11960 | """
utils_auth.py - Authentication and per-user rights-check routines
License
Copyright (c) 2010-2012 Massachusetts Institute of Technology.
MIT License (cf. MIT-LICENSE.txt or http://www.opensource.org/licenses/mit-license.php)
$ Id: $
"""
from . import models as M
import random, string
def confirmInvite(id):
invite = M.Invite.objects.filter(key=id)
if len(invite) == 0:
return None
invite = invite[0]
membership = M.Membership.objects.filter(user=invite.user_id, ensemble=invite.ensemble_id, deleted=False)
if len(membership) == 0:
membership = M.Membership()
membership.user = invite.user
membership.ensemble = invite.ensemble
membership.admin = invite.admin
membership.section = invite.section
membership.save()
return invite
def invite2uid(id):
invite = M.Invite.objects.filter(key=id)
if len(invite) == 0:
return None
return invite[0].user.id
def canReadFile(uid, id_source, req=None):
try:
id_source = int(id_source)
except ValueError:
return False
o = M.Membership.objects.filter(ensemble__in=M.Ensemble.objects.filter(ownership__in=M.Ownership.objects.filter(source__id=id_source, deleted=False))).filter(user__id=uid, deleted=False, guest=False)
return len(o)>0 or canGuestReadFile(uid, id_source, req)
def canDownloadFileComments(uid, id_source):
"""need to be an admin for the ensemble that contains that source"""
try:
id_source = int(id_source)
except ValueError:
return False
o = M.Membership.objects.filter(ensemble__in=M.Ensemble.objects.filter(ownership__in=M.Ownership.objects.filter(source__id=id_source))).filter(user__id=uid, deleted=False)
return len(o)>0 and o[0].admin
def canDownloadPDF(uid, id_source):
try:
id_source = int(id_source)
except ValueError:
return False
o = M.Membership.objects.filter(ensemble__in=M.Ensemble.objects.filter(ownership__in=M.Ownership.objects.filter(source__id=id_source))).filter(user__id=uid, deleted=False)
return (len(o)>0 and (o[0].admin or o[0].ensemble.allow_download)) or canGuestDownloadPDF(id_source)
def canGuestReadFile(uid, id_source, req=None):
o = M.Ownership.objects.get(source__id=id_source)
e = M.Ensemble.objects.get(pk=o.ensemble_id)
if o.ensemble.allow_guest and len(M.Membership.objects.filter(user__id=uid, ensemble=e, deleted=False))==0:
#add membership for guest user:
m = M.Membership()
m.user_id = uid
m.ensemble_id = e.id
m.guest = True
if e.section_assignment == M.Ensemble.SECTION_ASSGT_RAND:
#assign guest to a random section if there are sections, unless we find a pgid cookie that correponded to a existing section
sections = M.Section.objects.filter(ensemble=e)
if sections:
if req is not None and "pgid" in req.COOKIES:
prev_sections = M.Section.objects.filter(membership__user__id=int(req.COOKIES.get("pgid")), membership__ensemble__id=e.id)
if len(prev_sections):
m.section = prev_sections[0]
if m.section is None:
m.section = random.choice(sections)
m.save()
return o.ensemble.allow_guest
def canGuestDownloadPDF(id_source):
o = M.Ownership.objects.get(source__id=id_source)
return o.ensemble.allow_guest and o.ensemble.allow_download
def getGuest(ckey=None):
if ckey is None:
return createGuest()
o = None
try:
o = M.User.objects.get(confkey=ckey)
except M.User.DoesNotExist:
pass
return o if o is not None else createGuest()
def getCkeyInfo(ckey):
if ckey is None:
return None
o = None
try:
o = M.User.objects.get(confkey=ckey)
except M.User.DoesNotExist:
pass
if o is not None and o.valid is False and o.guest is False:
#first login as a non-guest: mark that user as valid
o.valid = True
o.save()
return o
def canAnnotate(uid, eid):
"""Need to be a either a member of a group or a registered user for a public group """
o = M.Membership.objects.filter(ensemble__id=eid, user__id=uid, deleted=False)
if len(o)>0:
return True
#TODO registered user and public group ?
e = M.Ensemble.objects.get(pk=eid)
if e.allow_guest:
u = M.User.objects.get(pk=uid)
return not u.guest
return False
def canImportAnnotation(uid, from_id_source, to_id_source):
"""Need to be an admin of the FROM ensemble, and canAnnotate in the TO ensemble"""
from_ownership = M.Ownership.objects.get(source__id=from_id_source)
from_ensemble = from_ownership.ensemble
to_ownership = M.Ownership.objects.get(source__id=to_id_source)
to_ensemble = to_ownership.ensemble
o = M.Membership.objects.filter(ensemble__id=from_ensemble.pk, user__id=uid, deleted=False, admin=True)
if len(o) == 0:
return False
return canAnnotate(uid, to_ensemble.pk)
def canAdministrateLocation(uid, id_location):
ensemble = M.Location.objects.get(pk=id_location).ensemble
o = M.Membership.objects.filter(ensemble__id=ensemble.pk, user__id=uid, deleted=False, admin=True)
return (len(o) != 0)
def addUser(email, password, conf, valid=0, guest=0):
o = M.User()
o.email = email
o.password = None
o.set_password(password)
o.confkey = conf
o.valid = valid
o.guest = guest
o.save()
if o.guest:
gh = M.GuestHistory(user=o)
gh.save()
return o
def addInvite(key, id_user, id_ensemble, id_section, admin):
if id_section == 'None':
o = M.Invite(key=key, user_id=id_user, ensemble_id=id_ensemble, admin=admin)
else:
o = M.Invite(key=key, user_id=id_user, ensemble_id=id_ensemble, section_id=id_section, admin=admin)
o.save()
def createGuest():
key = "".join([ random.choice(string.ascii_letters+string.digits) for i in range(0,20)])
email = "guest_%s@nb.test" % (key, )
passwd = "".join([ random.choice(string.ascii_letters+string.digits) for i in range(0,4)])
return addUser(email,passwd, key, 0, 1)
def getGuestCkey():
return createGuest().confkey
def user_from_email(email):
users = M.User.objects.filter(email=email)
return users[0] if len(users)==1 else None
def checkUser(email, password):
# allow case sensitive username
users = M.User.objects.filter(email=email.strip(), valid=1, guest=0)
if len(users) != 1:
#but backup plan is case insensitive
users = M.User.objects.filter(email__iexact=email.strip(), valid=1, guest=0)
if len(users) != 1:
return None
user = users[0]
return user if user.authenticate(password) else None
def canAddFolder(uid, id_ensemble, id_parent=None):
return canInsertFile(uid, id_ensemble, id_parent)
def canInsertFile(uid, eid, id_folder=None):
"""need to be an admin on that membership, and the folder (if not None) needs to be in this membership"""
m = M.Membership.objects.get(ensemble__id=eid, user__id=uid, deleted=False)
if id_folder is None:
return m.admin
else:
f = M.Folder.objects.get(pk=id_folder)
return f.ensemble_id == int(eid) and m.admin
def canRenameFile(uid, id):
"""need to be an admin on the ensemble that contains that file"""
o = M.Ownership.objects.filter(source__id=id)
e = M.Ensemble.objects.filter(ownership__in=o)
m = M.Membership.objects.filter(user__id=uid, ensemble__in=e, deleted=False)
return m.count()>0 and m[0].admin
def canRenameFolder(uid, id):
"""need to be an admin on the ensemble that contains that folder"""
e = M.Folder.objects.get(pk=id).ensemble
m = M.Membership.objects.filter(user__id=uid, ensemble=e, deleted=False)
return m.count()>0 and m[0].admin
def canEditAssignment(uid, id):
return canRenameFile(uid, id)
def canDeleteFile(uid, id):
return canRenameFile(uid, id)
def canDeleteFolder(uid, id):
"""
- Need to be an admin on the ensemble that contains that folder.
- Can't contain any file that's not already deleted
- Can't contain any folder
"""
e = M.Folder.objects.get(pk=id).ensemble
m = M.Membership.objects.filter(user__id=uid, ensemble=e, deleted=False)
o = M.Ownership.objects.filter(deleted=False, folder__id=id)
f = M.Folder.objects.filter(parent__id=id)
return m.count()>0 and m[0].admin and o.count()==0 and f.count()==0
def canMoveFile(uid, id, id_dest=None):
return canRenameFile(uid, id)
def __isDirOrParent(id_a, id_b):
#returns true is a == b or is a is a parent of b
d = M.Folder.objects.get(pk=id_b)
while d.parent_id is not None:
if d.id == id_a:
return True
d = d.parent
return id_a == d.id
def canMoveFolder(uid, id, id_dest):
"""need to be an admin on the ensemble that contains that folder, and folder dest not to be the same or a subfolder of id"""
e = M.Folder.objects.get(pk=id).ensemble
m = M.Membership.objects.filter(user__id=uid, ensemble=e, deleted=False)
return m.count()>0 and m[0].admin and not __isDirOrParent(id_dest, id)
def canUpdateFile(uid, id):
return canRenameFile(uid, id)
def canSendInvite(uid, eid):
"""need to be an admin on that membership"""
m = M.Membership.objects.filter(user__id=uid, ensemble__id=eid, deleted=False)
return m.count() > 0 and m[0].admin
def canEditEnsemble(uid, eid):
return canSendInvite(uid, eid)
def canSeeGrades(uid, eid):
return canSendInvite(uid, eid)
def canGetSectionsInfo(uid, eid):
return canSendInvite(uid, eid)
def canGetMembers(uid, eid):
return True
def canGrade(uid, id_source, id_student):
"""Need to be admin on ensemble that contains file and student needs to be a member of that ensemble"""
o = M.Ownership.objects.filter(source__id=id_source)
e = M.Ensemble.objects.filter(ownership__in=o)
m = M.Membership.objects.filter(user__id=uid, ensemble__in=e, deleted=False)
m2 = M.Membership.objects.filter(user__id=id_student, ensemble__in=e, deleted=False)
return m.count()>0 and m[0].admin and m2.count()>0
def isMember(user_id, ensemble_id):
return M.Membership.objects.filter(user__id=user_id, ensemble__id=ensemble_id, deleted=False).count() != 0
def canEdit(uid, id_ann):
#uid need to be comment owner and there need to be no dependent non-deleted comment
o = M.Comment.objects.get(pk=id_ann)
return o.author_id==uid and M.Comment.objects.filter(parent=o, deleted=False).count()==0
def canDelete(uid, id_ann):
return canEdit(uid, id_ann) or canLabelComment(uid, id_ann)
def canDeleteThread(uid, id_location):
m = M.Membership.objects.filter(ensemble__location__id = id_location, user__id=uid, deleted=False, admin=True)
return m.count() > 0
def canLabelComment(uid, cid):
#need to be an admin for the ensemble containing that comment.
m = M.Membership.objects.filter(ensemble__location__comment__id = cid, user__id=uid, deleted=False, admin=True)
return m.count()>0
def canPauseComment(uid, id_source):
return canDownloadFileComments(uid, id_source)
def canMarkThread(uid, id_location):
#user needs to be able to read root comment in that location
location = M.Location.objects.get(pk=id_location)
root_comment = M.Comment.objects.get(parent=None, location=location)
if root_comment.author_id == uid:
return True
m = M.Membership.objects.filter(ensemble = location.ensemble, user__id=uid, deleted=False)
return m.count()>0 and (root_comment.type>2 or (m[0].admin and root_comment.type>1))
def log_guest_login(ckey, id_user):
try:
guest = M.User.objects.get(confkey=ckey)
glh = M.GuestLoginHistory(user_id=id_user, guest=guest)
glh.save()
except:
pass
| mit |
davedoesdev/dxf | test/module_example.py | 1 | 1080 | #!/usr/bin/env python
# Requires DOCKER_REG_USERNAME, DOCKER_REG_PASSWORD and DOCKER_REG_REPO env vars
# Defaults to using the Docker Hub unless you specify DOCKER_REG_HOST env var
# If using the Docker Hub, create $DOCKER_REG_REPO first
# pylint: disable=wrong-import-position,superfluous-parens
# pylint: disable=redefined-outer-name
import os
from os import path
import sys
sys.path.append(path.abspath(path.join(path.dirname(__file__), '..')))
os.chdir('/tmp')
from dxf import DXF
def auth(dxf, response):
dxf.authenticate(os.environ['DOCKER_REG_USERNAME'],
os.environ['DOCKER_REG_PASSWORD'],
response=response)
dxf = DXF(os.environ.get('DOCKER_REG_HOST', 'registry-1.docker.io'),
os.environ['DOCKER_REG_REPO'],
auth)
with open('logger.dat', 'wb') as f:
f.write(b'2015-05 11\n')
dgst = dxf.push_blob('logger.dat')
dxf.set_alias('may15-readings', dgst)
assert dxf.get_alias('may15-readings') == [dgst]
s = b''
for chunk in dxf.pull_blob(dgst):
s += chunk
assert s == b'2015-05 11\n'
print(s)
| mit |
ChrisPappalardo/mysql-connector-python | tests/test_mysql_datatypes.py | 9 | 13839 | # MySQL Connector/Python - MySQL driver written in Python.
# Copyright (c) 2009, 2014, Oracle and/or its affiliates. All rights reserved.
# MySQL Connector/Python is licensed under the terms of the GPLv2
# <http://www.gnu.org/licenses/old-licenses/gpl-2.0.html>, like most
# MySQL Connectors. There are special exceptions to the terms and
# conditions of the GPLv2 as it is applied to this software, see the
# FOSS License Exception
# <http://www.mysql.com/about/legal/licensing/foss-exception.html>.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""Unittests for MySQL data types
"""
from decimal import Decimal
import time
import datetime
from mysql.connector import connection, errors
import tests
from tests import foreach_cnx, cnx_config
try:
from mysql.connector.connection_cext import CMySQLConnection
except ImportError:
# Test without C Extension
CMySQLConnection = None
def _get_insert_stmt(tbl, cols):
insert = "INSERT INTO {table} ({columns}) values ({values})".format(
table=tbl,
columns=','.join(cols),
values=','.join(['%s'] * len(cols))
)
return insert
def _get_select_stmt(tbl, cols):
select = "SELECT {columns} FROM {table} ORDER BY id".format(
columns=','.join(cols),
table=tbl
)
return select
class TestsDataTypes(tests.MySQLConnectorTests):
tables = {
'bit': 'myconnpy_mysql_bit',
'int': 'myconnpy_mysql_int',
'bool': 'myconnpy_mysql_bool',
'float': 'myconnpy_mysql_float',
'decimal': 'myconnpy_mysql_decimal',
'temporal': 'myconnpy_mysql_temporal',
'temporal_year': 'myconnpy_mysql_temporal_year',
'set': 'myconnpy_mysql_set',
}
def compare(self, name, val1, val2):
self.assertEqual(val1, val2, "%s %s != %s" % (name, val1, val2))
def drop_tables(self, cnx):
cur = cnx.cursor()
table_names = self.tables.values()
cur.execute("DROP TABLE IF EXISTS {tables}".format(
tables=','.join(table_names))
)
cur.close()
class TestsCursor(TestsDataTypes):
def setUp(self):
pass
#self.config = tests.get_mysql_config()
#cnx = connection.MySQLConnection(**self.config)
#self.drop_tables(cnx)
def tearDown(self):
pass
#cnx = connection.MySQLConnection(**self.config)
#self.drop_tables(cnx)
#cnx.close()
@foreach_cnx()
def test_numeric_int(self):
tbl = self.tables['int']
self.cnx.cmd_query("DROP TABLE IF EXISTS {0}".format(tbl))
cur = self.cnx.cursor()
columns = [
'tinyint_signed',
'tinyint_unsigned',
'bool_signed',
'smallint_signed',
'smallint_unsigned',
'mediumint_signed',
'mediumint_unsigned',
'int_signed',
'int_unsigned',
'bigint_signed',
'bigint_unsigned',
]
cur.execute((
"CREATE TABLE {table} ("
"`id` TINYINT UNSIGNED NOT NULL AUTO_INCREMENT,"
"`tinyint_signed` TINYINT SIGNED,"
"`tinyint_unsigned` TINYINT UNSIGNED,"
"`bool_signed` BOOL,"
"`smallint_signed` SMALLINT SIGNED,"
"`smallint_unsigned` SMALLINT UNSIGNED,"
"`mediumint_signed` MEDIUMINT SIGNED,"
"`mediumint_unsigned` MEDIUMINT UNSIGNED,"
"`int_signed` INT SIGNED,"
"`int_unsigned` INT UNSIGNED,"
"`bigint_signed` BIGINT SIGNED,"
"`bigint_unsigned` BIGINT UNSIGNED,"
"PRIMARY KEY (id))"
).format(table=tbl)
)
data = [
(
-128, # tinyint signed
0, # tinyint unsigned
0, # boolean
-32768, # smallint signed
0, # smallint unsigned
-8388608, # mediumint signed
0, # mediumint unsigned
-2147483648, # int signed
0, # int unsigned
-9223372036854775808, # big signed
0, # big unsigned
),
(
127, # tinyint signed
255, # tinyint unsigned
127, # boolean
32767, # smallint signed
65535, # smallint unsigned
8388607, # mediumint signed
16777215, # mediumint unsigned
2147483647, # int signed
4294967295, # int unsigned
9223372036854775807, # big signed
18446744073709551615, # big unsigned
)
]
insert = _get_insert_stmt(tbl, columns)
select = _get_select_stmt(tbl, columns)
cur.executemany(insert, data)
cur.execute(select)
rows = cur.fetchall()
for i, col in enumerate(columns):
self.compare(col, data[0][i], rows[0][i])
self.compare(col, data[1][i], rows[1][i])
cur.close()
@foreach_cnx()
def test_numeric_bit(self):
tbl = self.tables['bit']
self.cnx.cmd_query("DROP TABLE IF EXISTS {0}".format(tbl))
cur = self.cnx.cursor()
columns = [
'c8', 'c16', 'c24', 'c32',
'c40', 'c48', 'c56', 'c63',
'c64']
cur.execute((
"CREATE TABLE {table} ("
"`id` int NOT NULL AUTO_INCREMENT,"
"`c8` bit(8) DEFAULT NULL,"
"`c16` bit(16) DEFAULT NULL,"
"`c24` bit(24) DEFAULT NULL,"
"`c32` bit(32) DEFAULT NULL,"
"`c40` bit(40) DEFAULT NULL,"
"`c48` bit(48) DEFAULT NULL,"
"`c56` bit(56) DEFAULT NULL,"
"`c63` bit(63) DEFAULT NULL,"
"`c64` bit(64) DEFAULT NULL,"
"PRIMARY KEY (id))"
).format(table=tbl)
)
insert = _get_insert_stmt(tbl, columns)
select = _get_select_stmt(tbl, columns)
data = list()
data.append(tuple([0] * len(columns)))
values = list()
for col in columns:
values.append((1 << int(col.replace('c', ''))) - 1)
data.append(tuple(values))
values = list()
for col in columns:
bits = int(col.replace('c', ''))
values.append((1 << bits) - 1)
data.append(tuple(values))
cur.executemany(insert, data)
cur.execute(select)
rows = cur.fetchall()
self.assertEqual(rows, data)
cur.close()
@foreach_cnx()
def test_numeric_float(self):
tbl = self.tables['float']
self.cnx.cmd_query("DROP TABLE IF EXISTS {0}".format(tbl))
cur = self.cnx.cursor()
columns = [
'float_signed',
'float_unsigned',
'double_signed',
'double_unsigned',
]
cur.execute((
"CREATE TABLE {table} ("
"`id` int NOT NULL AUTO_INCREMENT,"
"`float_signed` FLOAT(6,5) SIGNED,"
"`float_unsigned` FLOAT(6,5) UNSIGNED,"
"`double_signed` DOUBLE(15,10) SIGNED,"
"`double_unsigned` DOUBLE(15,10) UNSIGNED,"
"PRIMARY KEY (id))"
).format(table=tbl)
)
insert = _get_insert_stmt(tbl, columns)
select = _get_select_stmt(tbl, columns)
data = [
(-3.402823466, 0, -1.7976931348623157, 0,),
(-1.175494351, 3.402823466,
1.7976931348623157, 2.2250738585072014),
(-1.23455678, 2.999999, -1.3999999999999999, 1.9999999999999999),
]
cur.executemany(insert, data)
cur.execute(select)
rows = cur.fetchall()
for j in range(0, len(data)):
for i, col in enumerate(columns[0:2]):
self.compare(col, round(data[j][i], 5), rows[j][i])
for i, col in enumerate(columns[2:2]):
self.compare(col, round(data[j][i], 10), rows[j][i])
cur.close()
@foreach_cnx()
def test_numeric_decimal(self):
tbl = self.tables['decimal']
self.cnx.cmd_query("DROP TABLE IF EXISTS {0}".format(tbl))
cur = self.cnx.cursor()
columns = [
'decimal_signed',
'decimal_unsigned',
]
cur.execute((
"CREATE TABLE {table} ("
"`id` int NOT NULL AUTO_INCREMENT,"
"`decimal_signed` DECIMAL(65,30) SIGNED,"
"`decimal_unsigned` DECIMAL(65,30) UNSIGNED,"
"PRIMARY KEY (id))"
).format(table=tbl)
)
insert = _get_insert_stmt(tbl, columns)
select = _get_select_stmt(tbl, columns)
data = [
(Decimal(
'-9999999999999999999999999.999999999999999999999999999999'),
Decimal(
'+9999999999999999999999999.999999999999999999999999999999')),
(Decimal('-1234567.1234'),
Decimal('+123456789012345.123456789012345678901')),
(Decimal(
'-1234567890123456789012345.123456789012345678901234567890'),
Decimal(
'+1234567890123456789012345.123456789012345678901234567890')),
]
cur.executemany(insert, data)
cur.execute(select)
rows = cur.fetchall()
self.assertEqual(data, rows)
cur.close()
@foreach_cnx()
def test_temporal_datetime(self):
tbl = self.tables['temporal']
self.cnx.cmd_query("DROP TABLE IF EXISTS {0}".format(tbl))
cur = self.cnx.cursor()
cur.execute("SET SESSION time_zone = '+00:00'")
columns = [
't_date',
't_datetime',
't_time',
't_timestamp',
't_year_4',
]
cur.execute((
"CREATE TABLE {table} ("
"`id` int NOT NULL AUTO_INCREMENT,"
"`t_date` DATE,"
"`t_datetime` DATETIME,"
"`t_time` TIME,"
"`t_timestamp` TIMESTAMP DEFAULT 0,"
"`t_year_4` YEAR(4),"
"PRIMARY KEY (id))"
).format(table=tbl)
)
insert = _get_insert_stmt(tbl, columns)
select = _get_select_stmt(tbl, columns)
data = [
(datetime.date(2010, 1, 17),
datetime.datetime(2010, 1, 17, 19, 31, 12),
datetime.timedelta(hours=43, minutes=32, seconds=21),
datetime.datetime(2010, 1, 17, 19, 31, 12),
0),
(datetime.date(1000, 1, 1),
datetime.datetime(1000, 1, 1, 0, 0, 0),
datetime.timedelta(hours=-838, minutes=59, seconds=59),
datetime.datetime(*time.gmtime(1)[:6]),
1901),
(datetime.date(9999, 12, 31),
datetime.datetime(9999, 12, 31, 23, 59, 59),
datetime.timedelta(hours=838, minutes=59, seconds=59),
datetime.datetime(2038, 1, 19, 3, 14, 7),
2155),
]
cur.executemany(insert, data)
cur.execute(select)
rows = cur.fetchall()
for j in (range(0, len(data))):
for i, col in enumerate(columns):
self.compare("{column} (data[{count}])".format(
column=col, count=j), data[j][i], rows[j][i])
# Testing YEAR(2), which is now obsolete since MySQL 5.6.6
tblname = self.tables['temporal_year']
cur.execute("DROP TABLE IF EXISTS {0}".format(tblname))
stmt = (
"CREATE TABLE {table} ("
"`id` int NOT NULL AUTO_INCREMENT KEY, "
"`t_year_2` YEAR(2))".format(table=tblname)
)
if tests.MYSQL_VERSION >= (5, 7, 5):
# Support for YEAR(2) removed in MySQL 5.7.5
self.assertRaises(errors.DatabaseError, cur.execute, stmt)
else:
cur.execute(stmt)
cur.execute(_get_insert_stmt(tblname, ['t_year_2']), (10,))
cur.execute(_get_select_stmt(tblname, ['t_year_2']))
row = cur.fetchone()
if tests.MYSQL_VERSION >= (5, 6, 6):
self.assertEqual(2010, row[0])
else:
self.assertEqual(10, row[0])
cur.close()
@cnx_config(consume_results=True)
@foreach_cnx()
def test_set(self):
tbl = self.tables['temporal']
self.cnx.cmd_query("DROP TABLE IF EXISTS {0}".format(tbl))
cur = self.cnx.cursor()
cur.execute((
"CREATE TABLE {table} ("
"`id` int NOT NULL AUTO_INCREMENT,"
"c1 SET ('a', 'b', 'c'),"
"c2 SET ('1', '2', '3'),"
"c3 SET ('ham', 'spam'),"
"PRIMARY KEY (id))"
).format(table=tbl)
)
insert = (
"INSERT INTO {table} (c1, c2, c3) VALUES "
"('a,c', '1,3', 'spam'), ('b', '3,2', 'spam,spam,ham')"
).format(table=tbl)
cur.execute(insert)
cur.execute("SELECT * FROM {table}".format(table=tbl))
exp = [
(1, set([u'a', u'c']), set([u'1', u'3']), set([u'spam'])),
(2, set([u'b']), set([u'3', u'2']), set([u'ham', u'spam']))
]
self.assertEqual(exp, cur.fetchall())
cur.close()
| gpl-2.0 |
Grirrane/odoo | openerp/addons/base/ir/ir_model.py | 4 | 61958 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (C) 2004-2014 OpenERP S.A. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from collections import defaultdict
import logging
import re
import time
import types
import openerp
from openerp import SUPERUSER_ID
from openerp import models, tools, api
from openerp.modules.registry import RegistryManager
from openerp.osv import fields, osv
from openerp.osv.orm import BaseModel, Model, MAGIC_COLUMNS
from openerp.exceptions import UserError, AccessError
from openerp.tools import config
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
MODULE_UNINSTALL_FLAG = '_force_unlink'
def _get_fields_type(self, cr, uid, context=None):
# Avoid too many nested `if`s below, as RedHat's Python 2.6
# break on it. See bug 939653.
return sorted([(k,k) for k,v in fields.__dict__.iteritems()
if type(v) == types.TypeType and \
issubclass(v, fields._column) and \
v != fields._column and \
not v._deprecated and \
not issubclass(v, fields.function)])
def _in_modules(self, cr, uid, ids, field_name, arg, context=None):
#pseudo-method used by fields.function in ir.model/ir.model.fields
module_pool = self.pool["ir.module.module"]
installed_module_ids = module_pool.search(cr, uid, [('state','=','installed')])
installed_module_names = module_pool.read(cr, uid, installed_module_ids, ['name'], context=context)
installed_modules = set(x['name'] for x in installed_module_names)
result = {}
xml_ids = osv.osv._get_xml_ids(self, cr, uid, ids)
for k,v in xml_ids.iteritems():
result[k] = ', '.join(sorted(installed_modules & set(xml_id.split('.')[0] for xml_id in v)))
return result
class unknown(models.AbstractModel):
"""
Abstract model used as a substitute for relational fields with an unknown
comodel.
"""
_name = '_unknown'
class ir_model(osv.osv):
_name = 'ir.model'
_description = "Models"
_order = 'model'
def _is_osv_memory(self, cr, uid, ids, field_name, arg, context=None):
models = self.browse(cr, uid, ids, context=context)
res = dict.fromkeys(ids)
for model in models:
if model.model in self.pool:
res[model.id] = self.pool[model.model].is_transient()
else:
_logger.error('Missing model %s' % (model.model, ))
return res
def _search_osv_memory(self, cr, uid, model, name, domain, context=None):
if not domain:
return []
__, operator, value = domain[0]
if operator not in ['=', '!=']:
raise UserError(_("Invalid Search Criteria") + ": " + _("The osv_memory field can only be compared with = and != operator."))
value = bool(value) if operator == '=' else not bool(value)
all_model_ids = self.search(cr, uid, [], context=context)
is_osv_mem = self._is_osv_memory(cr, uid, all_model_ids, 'osv_memory', arg=None, context=context)
return [('id', 'in', [id for id in is_osv_mem if bool(is_osv_mem[id]) == value])]
def _view_ids(self, cr, uid, ids, field_name, arg, context=None):
models = self.browse(cr, uid, ids)
res = {}
for model in models:
res[model.id] = self.pool["ir.ui.view"].search(cr, uid, [('model', '=', model.model)])
return res
def _inherited_models(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for model in self.browse(cr, uid, ids, context=context):
res[model.id] = []
inherited_models = [model_name for model_name in self.pool[model.model]._inherits]
if inherited_models:
res[model.id] = self.search(cr, uid, [('model', 'in', inherited_models)], context=context)
return res
_columns = {
'name': fields.char('Model Description', translate=True, required=True),
'model': fields.char('Model', required=True, select=1),
'info': fields.text('Information'),
'field_id': fields.one2many('ir.model.fields', 'model_id', 'Fields', required=True, copy=True),
'inherited_model_ids': fields.function(_inherited_models, type="many2many", obj="ir.model", string="Inherited models",
help="The list of models that extends the current model."),
'state': fields.selection([('manual','Custom Object'),('base','Base Object')],'Type', readonly=True),
'access_ids': fields.one2many('ir.model.access', 'model_id', 'Access'),
'osv_memory': fields.function(_is_osv_memory, string='Transient Model', type='boolean',
fnct_search=_search_osv_memory,
help="This field specifies whether the model is transient or not (i.e. if records are automatically deleted from the database or not)"),
'modules': fields.function(_in_modules, type='char', string='In Modules', help='List of modules in which the object is defined or inherited'),
'view_ids': fields.function(_view_ids, type='one2many', obj='ir.ui.view', string='Views'),
}
_defaults = {
'model': 'x_',
'state': lambda self,cr,uid,ctx=None: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
}
def _check_model_name(self, cr, uid, ids, context=None):
for model in self.browse(cr, uid, ids, context=context):
if model.state=='manual':
if not model.model.startswith('x_'):
return False
if not re.match('^[a-z_A-Z0-9.]+$',model.model):
return False
return True
def _model_name_msg(self, cr, uid, ids, context=None):
return _('The Object name must start with x_ and not contain any special character !')
_constraints = [
(_check_model_name, _model_name_msg, ['model']),
]
_sql_constraints = [
('obj_name_uniq', 'unique (model)', 'Each model must be unique!'),
]
# overridden to allow searching both on model name (model field)
# and model description (name field)
def _name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
if args is None:
args = []
domain = args + ['|', ('model', operator, name), ('name', operator, name)]
return self.name_get(cr, name_get_uid or uid,
super(ir_model, self).search(cr, uid, domain, limit=limit, context=context),
context=context)
def _drop_table(self, cr, uid, ids, context=None):
for model in self.browse(cr, uid, ids, context):
model_pool = self.pool[model.model]
cr.execute('select relkind from pg_class where relname=%s', (model_pool._table,))
result = cr.fetchone()
if result and result[0] == 'v':
cr.execute('DROP view %s' % (model_pool._table,))
elif result and result[0] == 'r':
cr.execute('DROP TABLE %s CASCADE' % (model_pool._table,))
return True
def unlink(self, cr, user, ids, context=None):
# Prevent manual deletion of module tables
if context is None: context = {}
if isinstance(ids, (int, long)):
ids = [ids]
if not context.get(MODULE_UNINSTALL_FLAG):
for model in self.browse(cr, user, ids, context):
if model.state != 'manual':
raise UserError(_("Model '%s' contains module data and cannot be removed!") % (model.name,))
self._drop_table(cr, user, ids, context)
res = super(ir_model, self).unlink(cr, user, ids, context)
if not context.get(MODULE_UNINSTALL_FLAG):
# only reload pool for normal unlink. For module uninstall the
# reload is done independently in openerp.modules.loading
cr.commit() # must be committed before reloading registry in new cursor
api.Environment.reset()
RegistryManager.new(cr.dbname)
RegistryManager.signal_registry_change(cr.dbname)
return res
def write(self, cr, user, ids, vals, context=None):
if context:
context = dict(context)
context.pop('__last_update', None)
# Filter out operations 4 link from field id, because openerp-web
# always write (4,id,False) even for non dirty items
if 'field_id' in vals:
vals['field_id'] = [op for op in vals['field_id'] if op[0] != 4]
return super(ir_model,self).write(cr, user, ids, vals, context)
def create(self, cr, user, vals, context=None):
if context is None:
context = {}
if context and context.get('manual'):
vals['state']='manual'
res = super(ir_model,self).create(cr, user, vals, context)
if vals.get('state','base')=='manual':
# add model in registry
self.instanciate(cr, user, vals['model'], context)
self.pool.setup_models(cr, partial=(not self.pool.ready))
# update database schema
model = self.pool[vals['model']]
ctx = dict(context,
field_name=vals['name'],
field_state='manual',
select=vals.get('select_level', '0'),
update_custom_fields=True)
model._auto_init(cr, ctx)
model._auto_end(cr, ctx) # actually create FKs!
RegistryManager.signal_registry_change(cr.dbname)
return res
def instanciate(self, cr, user, model, context=None):
if isinstance(model, unicode):
model = model.encode('utf-8')
class CustomModel(models.Model):
_name = model
_module = False
_custom = True
CustomModel._build_model(self.pool, cr)
class ir_model_fields(osv.osv):
_name = 'ir.model.fields'
_description = "Fields"
_rec_name = 'field_description'
_columns = {
'name': fields.char('Name', required=True, select=1),
'complete_name': fields.char('Complete Name', select=1),
'model': fields.char('Object Name', required=True, select=1,
help="The technical name of the model this field belongs to"),
'relation': fields.char('Object Relation',
help="For relationship fields, the technical name of the target model"),
'relation_field': fields.char('Relation Field',
help="For one2many fields, the field on the target model that implement the opposite many2one relationship"),
'model_id': fields.many2one('ir.model', 'Model', required=True, select=True, ondelete='cascade',
help="The model this field belongs to"),
'field_description': fields.char('Field Label', required=True),
'ttype': fields.selection(_get_fields_type, 'Field Type', required=True),
'selection': fields.char('Selection Options', help="List of options for a selection field, "
"specified as a Python expression defining a list of (key, label) pairs. "
"For example: [('blue','Blue'),('yellow','Yellow')]"),
'required': fields.boolean('Required'),
'readonly': fields.boolean('Readonly'),
'select_level': fields.selection([('0','Not Searchable'),('1','Always Searchable'),('2','Advanced Search (deprecated)')],'Searchable', required=True),
'translate': fields.boolean('Translatable', help="Whether values for this field can be translated (enables the translation mechanism for that field)"),
'size': fields.integer('Size'),
'state': fields.selection([('manual','Custom Field'),('base','Base Field')],'Type', required=True, readonly=True, select=1),
'on_delete': fields.selection([('cascade', 'Cascade'), ('set null', 'Set NULL'), ('restrict', 'Restrict')],
'On Delete', help='On delete property for many2one fields'),
'domain': fields.char('Domain', help="The optional domain to restrict possible values for relationship fields, "
"specified as a Python expression defining a list of triplets. "
"For example: [('color','=','red')]"),
'groups': fields.many2many('res.groups', 'ir_model_fields_group_rel', 'field_id', 'group_id', 'Groups'),
'selectable': fields.boolean('Selectable'),
'modules': fields.function(_in_modules, type='char', string='In Modules', help='List of modules in which the field is defined'),
'serialization_field_id': fields.many2one('ir.model.fields', 'Serialization Field', domain = "[('ttype','=','serialized')]",
ondelete='cascade', help="If set, this field will be stored in the sparse "
"structure of the serialization field, instead "
"of having its own database column. This cannot be "
"changed after creation."),
}
_rec_name='field_description'
_defaults = {
'selection': "",
'domain': "[]",
'name': 'x_',
'state': lambda self,cr,uid,ctx=None: (ctx and ctx.get('manual',False)) and 'manual' or 'base',
'on_delete': 'set null',
'select_level': '0',
'field_description': '',
'selectable': 1,
}
_order = "name"
def _check_selection(self, cr, uid, selection, context=None):
try:
selection_list = eval(selection)
except Exception:
_logger.info('Invalid selection list definition for fields.selection', exc_info=True)
raise UserError(_("The Selection Options expression is not a valid Pythonic expression."
"Please provide an expression in the [('key','Label'), ...] format."))
check = True
if not (isinstance(selection_list, list) and selection_list):
check = False
else:
for item in selection_list:
if not (isinstance(item, (tuple,list)) and len(item) == 2):
check = False
break
if not check:
raise UserError(_("The Selection Options expression is must be in the [('key','Label'), ...] format!"))
return True
def _size_gt_zero_msg(self, cr, user, ids, context=None):
return _('Size of the field can never be less than 0 !')
_sql_constraints = [
('size_gt_zero', 'CHECK (size>=0)',_size_gt_zero_msg ),
]
def _drop_column(self, cr, uid, ids, context=None):
for field in self.browse(cr, uid, ids, context):
if field.name in MAGIC_COLUMNS:
continue
model = self.pool[field.model]
cr.execute('select relkind from pg_class where relname=%s', (model._table,))
result = cr.fetchone()
cr.execute("SELECT column_name FROM information_schema.columns WHERE table_name ='%s' and column_name='%s'" %(model._table, field.name))
column_name = cr.fetchone()
if column_name and (result and result[0] == 'r'):
cr.execute('ALTER table "%s" DROP column "%s" cascade' % (model._table, field.name))
# remove m2m relation table for custom fields
# we consider the m2m relation is only one way as it's not possible
# to specify the relation table in the interface for custom fields
# TODO master: maybe use ir.model.relations for custom fields
if field.state == 'manual' and field.ttype == 'many2many':
rel_name = model._fields[field.name].relation
cr.execute('DROP table "%s"' % (rel_name))
model._pop_field(field.name)
return True
def unlink(self, cr, user, ids, context=None):
# Prevent manual deletion of module columns
if context is None: context = {}
if isinstance(ids, (int, long)):
ids = [ids]
if not context.get(MODULE_UNINSTALL_FLAG) and \
any(field.state != 'manual' for field in self.browse(cr, user, ids, context)):
raise UserError(_("This column contains module data and cannot be removed!"))
self._drop_column(cr, user, ids, context)
res = super(ir_model_fields, self).unlink(cr, user, ids, context)
if not context.get(MODULE_UNINSTALL_FLAG):
# The field we just deleted might have be inherited, and registry is
# inconsistent in this case; therefore we reload the registry.
cr.commit()
api.Environment.reset()
RegistryManager.new(cr.dbname)
RegistryManager.signal_registry_change(cr.dbname)
return res
def create(self, cr, user, vals, context=None):
if 'model_id' in vals:
model_data = self.pool['ir.model'].browse(cr, user, vals['model_id'])
vals['model'] = model_data.model
if context is None:
context = {}
if context and context.get('manual',False):
vals['state'] = 'manual'
if vals.get('ttype', False) == 'selection':
if not vals.get('selection',False):
raise UserError(_('For selection fields, the Selection Options must be given!'))
self._check_selection(cr, user, vals['selection'], context=context)
res = super(ir_model_fields,self).create(cr, user, vals, context)
if vals.get('state','base') == 'manual':
if not vals['name'].startswith('x_'):
raise UserError(_("Custom fields must have a name that starts with 'x_' !"))
if vals.get('relation',False) and not self.pool['ir.model'].search(cr, user, [('model','=',vals['relation'])]):
raise UserError(_("Model %s does not exist!") % vals['relation'])
self.pool.clear_manual_fields()
if vals['model'] in self.pool:
model = self.pool[vals['model']]
if vals['model'].startswith('x_') and vals['name'] == 'x_name':
model._rec_name = 'x_name'
# re-initialize model in registry
model.__init__(self.pool, cr)
self.pool.setup_models(cr, partial=(not self.pool.ready))
# update database schema
model = self.pool[vals['model']]
ctx = dict(context,
field_name=vals['name'],
field_state='manual',
select=vals.get('select_level', '0'),
update_custom_fields=True)
model._auto_init(cr, ctx)
model._auto_end(cr, ctx) # actually create FKs!
RegistryManager.signal_registry_change(cr.dbname)
return res
def write(self, cr, user, ids, vals, context=None):
if context is None:
context = {}
if context and context.get('manual',False):
vals['state'] = 'manual'
#For the moment renaming a sparse field or changing the storing system is not allowed. This may be done later
if 'serialization_field_id' in vals or 'name' in vals:
for field in self.browse(cr, user, ids, context=context):
if 'serialization_field_id' in vals and field.serialization_field_id.id != vals['serialization_field_id']:
raise UserError(_('Changing the storing system for field "%s" is not allowed.') % field.name)
if field.serialization_field_id and (field.name != vals['name']):
raise UserError(_('Renaming sparse field "%s" is not allowed') % field.name)
# if set, *one* column can be renamed here
column_rename = None
# field patches {model: {field_name: {prop_name: prop_value, ...}, ...}, ...}
patches = defaultdict(lambda: defaultdict(dict))
# static table of properties
model_props = [ # (our-name, fields.prop, set_fn)
('field_description', 'string', tools.ustr),
('required', 'required', bool),
('readonly', 'readonly', bool),
('domain', 'domain', eval),
('size', 'size', int),
('on_delete', 'ondelete', str),
('translate', 'translate', bool),
('select_level', 'index', lambda x: bool(int(x))),
('selection', 'selection', eval),
]
if vals and ids:
checked_selection = False # need only check it once, so defer
for item in self.browse(cr, user, ids, context=context):
obj = self.pool.get(item.model)
field = getattr(obj, '_fields', {}).get(item.name)
if item.state != 'manual':
raise UserError(_('Properties of base fields cannot be altered in this manner! '
'Please modify them through Python code, '
'preferably through a custom addon!'))
if item.ttype == 'selection' and 'selection' in vals \
and not checked_selection:
self._check_selection(cr, user, vals['selection'], context=context)
checked_selection = True
final_name = item.name
if 'name' in vals and vals['name'] != item.name:
# We need to rename the column
if column_rename:
raise UserError(_('Can only rename one column at a time!'))
if vals['name'] in obj._columns:
raise UserError(_('Cannot rename column to %s, because that column already exists!') % vals['name'])
if vals.get('state', 'base') == 'manual' and not vals['name'].startswith('x_'):
raise UserError(_('New column name must still start with x_ , because it is a custom field!'))
if '\'' in vals['name'] or '"' in vals['name'] or ';' in vals['name']:
raise ValueError('Invalid character in column name')
column_rename = (obj, (obj._table, item.name, vals['name']))
final_name = vals['name']
if 'model_id' in vals and vals['model_id'] != item.model_id.id:
raise UserError(_("Changing the model of a field is forbidden!"))
if 'ttype' in vals and vals['ttype'] != item.ttype:
raise UserError(_("Changing the type of a column is not yet supported. " "Please drop it and create it again!"))
# We don't check the 'state', because it might come from the context
# (thus be set for multiple fields) and will be ignored anyway.
if obj is not None and field is not None:
# find out which properties (per model) we need to update
for field_name, prop_name, func in model_props:
if field_name in vals:
prop_value = func(vals[field_name])
if getattr(field, prop_name) != prop_value:
patches[obj][final_name][prop_name] = prop_value
# These shall never be written (modified)
for column_name in ('model_id', 'model', 'state'):
if column_name in vals:
del vals[column_name]
res = super(ir_model_fields,self).write(cr, user, ids, vals, context=context)
if column_rename:
obj, rename = column_rename
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % rename)
# This is VERY risky, but let us have this feature:
# we want to change the key of field in obj._fields and obj._columns
field = obj._pop_field(rename[1])
obj._add_field(rename[2], field)
self.pool.setup_models(cr, partial=(not self.pool.ready))
if patches:
# We have to update _columns of the model(s) and then call their
# _auto_init to sync the db with the model. Hopefully, since write()
# was called earlier, they will be in-sync before the _auto_init.
# Anything we don't update in _columns now will be reset from
# the model into ir.model.fields (db).
ctx = dict(context,
select=vals.get('select_level', '0'),
update_custom_fields=True,
)
for obj, model_patches in patches.iteritems():
for field_name, field_patches in model_patches.iteritems():
# update field properties, and adapt corresponding column
field = obj._fields[field_name]
attrs = dict(field._attrs, **field_patches)
obj._add_field(field_name, field.new(**attrs))
# update database schema
self.pool.setup_models(cr, partial=(not self.pool.ready))
obj._auto_init(cr, ctx)
obj._auto_end(cr, ctx) # actually create FKs!
if column_rename or patches:
RegistryManager.signal_registry_change(cr.dbname)
return res
class ir_model_constraint(Model):
"""
This model tracks PostgreSQL foreign keys and constraints used by OpenERP
models.
"""
_name = 'ir.model.constraint'
_columns = {
'name': fields.char('Constraint', required=True, select=1,
help="PostgreSQL constraint or foreign key name."),
'definition': fields.char('Definition', help="PostgreSQL constraint definition"),
'model': fields.many2one('ir.model', string='Model',
required=True, select=1),
'module': fields.many2one('ir.module.module', string='Module',
required=True, select=1),
'type': fields.char('Constraint Type', required=True, size=1, select=1,
help="Type of the constraint: `f` for a foreign key, "
"`u` for other constraints."),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Initialization Date')
}
_sql_constraints = [
('module_name_uniq', 'unique(name, module)',
'Constraints with the same name are unique per module.'),
]
def _module_data_uninstall(self, cr, uid, ids, context=None):
"""
Delete PostgreSQL foreign keys and constraints tracked by this model.
"""
if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"):
raise AccessError(_('Administrator access is required to uninstall a module'))
context = dict(context or {})
ids_set = set(ids)
ids.sort()
ids.reverse()
for data in self.browse(cr, uid, ids, context):
model = data.model.model
model_obj = self.pool[model]
name = openerp.tools.ustr(data.name)
typ = data.type
# double-check we are really going to delete all the owners of this schema element
cr.execute("""SELECT id from ir_model_constraint where name=%s""", (data.name,))
external_ids = [x[0] for x in cr.fetchall()]
if set(external_ids)-ids_set:
# as installed modules have defined this element we must not delete it!
continue
if typ == 'f':
# test if FK exists on this table (it could be on a related m2m table, in which case we ignore it)
cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid)
WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""", ('f', name, model_obj._table))
if cr.fetchone():
cr.execute('ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (model_obj._table, name),)
_logger.info('Dropped FK CONSTRAINT %s@%s', name, model)
if typ == 'u':
# test if constraint exists
cr.execute("""SELECT 1 from pg_constraint cs JOIN pg_class cl ON (cs.conrelid = cl.oid)
WHERE cs.contype=%s and cs.conname=%s and cl.relname=%s""", ('u', name, model_obj._table))
if cr.fetchone():
cr.execute('ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (model_obj._table, name),)
_logger.info('Dropped CONSTRAINT %s@%s', name, model)
self.unlink(cr, uid, ids, context)
class ir_model_relation(Model):
"""
This model tracks PostgreSQL tables used to implement OpenERP many2many
relations.
"""
_name = 'ir.model.relation'
_columns = {
'name': fields.char('Relation Name', required=True, select=1,
help="PostgreSQL table name implementing a many2many relation."),
'model': fields.many2one('ir.model', string='Model',
required=True, select=1),
'module': fields.many2one('ir.module.module', string='Module',
required=True, select=1),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Initialization Date')
}
def _module_data_uninstall(self, cr, uid, ids, context=None):
"""
Delete PostgreSQL many2many relations tracked by this model.
"""
if uid != SUPERUSER_ID and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"):
raise AccessError(_('Administrator access is required to uninstall a module'))
ids_set = set(ids)
to_drop_table = []
ids.sort()
ids.reverse()
for data in self.browse(cr, uid, ids, context):
model = data.model
name = openerp.tools.ustr(data.name)
# double-check we are really going to delete all the owners of this schema element
cr.execute("""SELECT id from ir_model_relation where name = %s""", (data.name,))
external_ids = [x[0] for x in cr.fetchall()]
if set(external_ids)-ids_set:
# as installed modules have defined this element we must not delete it!
continue
cr.execute("SELECT 1 FROM information_schema.tables WHERE table_name=%s", (name,))
if cr.fetchone() and not name in to_drop_table:
to_drop_table.append(name)
self.unlink(cr, uid, ids, context)
# drop m2m relation tables
for table in to_drop_table:
cr.execute('DROP TABLE %s CASCADE'% table,)
_logger.info('Dropped table %s', table)
cr.commit()
class ir_model_access(osv.osv):
_name = 'ir.model.access'
_columns = {
'name': fields.char('Name', required=True, select=True),
'active': fields.boolean('Active', help='If you uncheck the active field, it will disable the ACL without deleting it (if you delete a native ACL, it will be re-created when you reload the module.'),
'model_id': fields.many2one('ir.model', 'Object', required=True, domain=[('osv_memory','=', False)], select=True, ondelete='cascade'),
'group_id': fields.many2one('res.groups', 'Group', ondelete='cascade', select=True),
'perm_read': fields.boolean('Read Access'),
'perm_write': fields.boolean('Write Access'),
'perm_create': fields.boolean('Create Access'),
'perm_unlink': fields.boolean('Delete Access'),
}
_defaults = {
'active': True,
}
def check_groups(self, cr, uid, group):
grouparr = group.split('.')
if not grouparr:
return False
cr.execute("select 1 from res_groups_users_rel where uid=%s and gid IN (select res_id from ir_model_data where module=%s and name=%s)", (uid, grouparr[0], grouparr[1],))
return bool(cr.fetchone())
def check_group(self, cr, uid, model, mode, group_ids):
""" Check if a specific group has the access mode to the specified model"""
assert mode in ['read','write','create','unlink'], 'Invalid access mode'
if isinstance(model, BaseModel):
assert model._name == 'ir.model', 'Invalid model object'
model_name = model.name
else:
model_name = model
if isinstance(group_ids, (int, long)):
group_ids = [group_ids]
for group_id in group_ids:
cr.execute("SELECT perm_" + mode + " "
" FROM ir_model_access a "
" JOIN ir_model m ON (m.id = a.model_id) "
" WHERE m.model = %s AND a.active IS True "
" AND a.group_id = %s", (model_name, group_id)
)
r = cr.fetchone()
if r is None:
cr.execute("SELECT perm_" + mode + " "
" FROM ir_model_access a "
" JOIN ir_model m ON (m.id = a.model_id) "
" WHERE m.model = %s AND a.active IS True "
" AND a.group_id IS NULL", (model_name, )
)
r = cr.fetchone()
access = bool(r and r[0])
if access:
return True
# pass no groups -> no access
return False
def group_names_with_access(self, cr, model_name, access_mode):
"""Returns the names of visible groups which have been granted ``access_mode`` on
the model ``model_name``.
:rtype: list
"""
assert access_mode in ['read','write','create','unlink'], 'Invalid access mode: %s' % access_mode
cr.execute('''SELECT
c.name, g.name
FROM
ir_model_access a
JOIN ir_model m ON (a.model_id=m.id)
JOIN res_groups g ON (a.group_id=g.id)
LEFT JOIN ir_module_category c ON (c.id=g.category_id)
WHERE
m.model=%s AND
a.active IS True AND
a.perm_''' + access_mode, (model_name,))
return [('%s/%s' % x) if x[0] else x[1] for x in cr.fetchall()]
# The context parameter is useful when the method translates error messages.
# But as the method raises an exception in that case, the key 'lang' might
# not be really necessary as a cache key, unless the `ormcache_context`
# decorator catches the exception (it does not at the moment.)
@tools.ormcache_context(accepted_keys=('lang',))
def check(self, cr, uid, model, mode='read', raise_exception=True, context=None):
if uid==1:
# User root have all accesses
# TODO: exclude xml-rpc requests
return True
assert mode in ['read','write','create','unlink'], 'Invalid access mode'
if isinstance(model, BaseModel):
assert model._name == 'ir.model', 'Invalid model object'
model_name = model.model
else:
model_name = model
# TransientModel records have no access rights, only an implicit access rule
if model_name not in self.pool:
_logger.error('Missing model %s' % (model_name, ))
elif self.pool[model_name].is_transient():
return True
# We check if a specific rule exists
cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) '
' FROM ir_model_access a '
' JOIN ir_model m ON (m.id = a.model_id) '
' JOIN res_groups_users_rel gu ON (gu.gid = a.group_id) '
' WHERE m.model = %s '
' AND gu.uid = %s '
' AND a.active IS True '
, (model_name, uid,)
)
r = cr.fetchone()[0]
if r is None:
# there is no specific rule. We check the generic rule
cr.execute('SELECT MAX(CASE WHEN perm_' + mode + ' THEN 1 ELSE 0 END) '
' FROM ir_model_access a '
' JOIN ir_model m ON (m.id = a.model_id) '
' WHERE a.group_id IS NULL '
' AND m.model = %s '
' AND a.active IS True '
, (model_name,)
)
r = cr.fetchone()[0]
if not r and raise_exception:
groups = '\n\t'.join('- %s' % g for g in self.group_names_with_access(cr, model_name, mode))
msg_heads = {
# Messages are declared in extenso so they are properly exported in translation terms
'read': _("Sorry, you are not allowed to access this document."),
'write': _("Sorry, you are not allowed to modify this document."),
'create': _("Sorry, you are not allowed to create this kind of document."),
'unlink': _("Sorry, you are not allowed to delete this document."),
}
if groups:
msg_tail = _("Only users with the following access level are currently allowed to do that") + ":\n%s\n\n(" + _("Document model") + ": %s)"
msg_params = (groups, model_name)
else:
msg_tail = _("Please contact your system administrator if you think this is an error.") + "\n\n(" + _("Document model") + ": %s)"
msg_params = (model_name,)
_logger.info('Access Denied by ACLs for operation: %s, uid: %s, model: %s', mode, uid, model_name)
msg = '%s %s' % (msg_heads[mode], msg_tail)
raise openerp.exceptions.AccessError(msg % msg_params)
return bool(r)
__cache_clearing_methods = []
def register_cache_clearing_method(self, model, method):
self.__cache_clearing_methods.append((model, method))
def unregister_cache_clearing_method(self, model, method):
try:
i = self.__cache_clearing_methods.index((model, method))
del self.__cache_clearing_methods[i]
except ValueError:
pass
def call_cache_clearing_methods(self, cr):
self.invalidate_cache(cr, SUPERUSER_ID)
self.check.clear_cache(self) # clear the cache of check function
for model, method in self.__cache_clearing_methods:
if model in self.pool:
getattr(self.pool[model], method)()
#
# Check rights on actions
#
def write(self, cr, uid, ids, values, context=None):
self.call_cache_clearing_methods(cr)
res = super(ir_model_access, self).write(cr, uid, ids, values, context=context)
return res
def create(self, cr, uid, values, context=None):
self.call_cache_clearing_methods(cr)
res = super(ir_model_access, self).create(cr, uid, values, context=context)
return res
def unlink(self, cr, uid, ids, context=None):
self.call_cache_clearing_methods(cr)
res = super(ir_model_access, self).unlink(cr, uid, ids, context=context)
return res
class ir_model_data(osv.osv):
"""Holds external identifier keys for records in the database.
This has two main uses:
* allows easy data integration with third-party systems,
making import/export/sync of data possible, as records
can be uniquely identified across multiple systems
* allows tracking the origin of data installed by OpenERP
modules themselves, thus making it possible to later
update them seamlessly.
"""
_name = 'ir.model.data'
_order = 'module,model,name'
def name_get(self, cr, uid, ids, context=None):
bymodel = defaultdict(dict)
names = {}
for res in self.browse(cr, uid, ids, context=context):
bymodel[res.model][res.res_id] = res
names[res.id] = res.complete_name
#result[res.model][res.res_id] = res.id
for model, id_map in bymodel.iteritems():
try:
ng = dict(self.pool[model].name_get(cr, uid, id_map.keys(), context=context))
except Exception:
pass
else:
for r in id_map.itervalues():
names[r.id] = ng.get(r.res_id, r.complete_name)
return [(i, names[i]) for i in ids]
def _complete_name_get(self, cr, uid, ids, prop, unknow_none, context=None):
result = {}
for res in self.browse(cr, uid, ids, context=context):
result[res.id] = (res.module and (res.module + '.') or '')+res.name
return result
_columns = {
'name': fields.char('External Identifier', required=True, select=1,
help="External Key/Identifier that can be used for "
"data integration with third-party systems"),
'complete_name': fields.function(_complete_name_get, type='char', string='Complete ID'),
'model': fields.char('Model Name', required=True, select=1),
'module': fields.char('Module', required=True, select=1),
'res_id': fields.integer('Record ID', select=1,
help="ID of the target record in the database"),
'noupdate': fields.boolean('Non Updatable'),
'date_update': fields.datetime('Update Date'),
'date_init': fields.datetime('Init Date')
}
_defaults = {
'date_init': fields.datetime.now,
'date_update': fields.datetime.now,
'noupdate': False,
'module': ''
}
_sql_constraints = [
('module_name_uniq', 'unique(name, module)', 'You cannot have multiple records with the same external ID in the same module!'),
]
def __init__(self, pool, cr):
osv.osv.__init__(self, pool, cr)
# also stored in pool to avoid being discarded along with this osv instance
if getattr(pool, 'model_data_reference_ids', None) is None:
self.pool.model_data_reference_ids = {}
# put loads on the class, in order to share it among all instances
type(self).loads = self.pool.model_data_reference_ids
def _auto_init(self, cr, context=None):
super(ir_model_data, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = \'ir_model_data_module_name_index\'')
if not cr.fetchone():
cr.execute('CREATE INDEX ir_model_data_module_name_index ON ir_model_data (module, name)')
# NEW V8 API
@tools.ormcache(skiparg=3)
def xmlid_lookup(self, cr, uid, xmlid):
"""Low level xmlid lookup
Return (id, res_model, res_id) or raise ValueError if not found
"""
module, name = xmlid.split('.', 1)
ids = self.search(cr, uid, [('module','=',module), ('name','=', name)])
if not ids:
raise ValueError('External ID not found in the system: %s' % (xmlid))
# the sql constraints ensure us we have only one result
res = self.read(cr, uid, ids[0], ['model', 'res_id'])
if not res['res_id']:
raise ValueError('External ID not found in the system: %s' % (xmlid))
return ids[0], res['model'], res['res_id']
def xmlid_to_res_model_res_id(self, cr, uid, xmlid, raise_if_not_found=False):
""" Return (res_model, res_id)"""
try:
return self.xmlid_lookup(cr, uid, xmlid)[1:3]
except ValueError:
if raise_if_not_found:
raise
return (False, False)
def xmlid_to_res_id(self, cr, uid, xmlid, raise_if_not_found=False):
""" Returns res_id """
return self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found)[1]
def xmlid_to_object(self, cr, uid, xmlid, raise_if_not_found=False, context=None):
""" Return a browse_record
if not found and raise_if_not_found is True return None
"""
t = self.xmlid_to_res_model_res_id(cr, uid, xmlid, raise_if_not_found)
res_model, res_id = t
if res_model and res_id:
record = self.pool[res_model].browse(cr, uid, res_id, context=context)
if record.exists():
return record
if raise_if_not_found:
raise ValueError('No record found for unique ID %s. It may have been deleted.' % (xmlid))
return None
# OLD API
def _get_id(self, cr, uid, module, xml_id):
"""Returns the id of the ir.model.data record corresponding to a given module and xml_id (cached) or raise a ValueError if not found"""
return self.xmlid_lookup(cr, uid, "%s.%s" % (module, xml_id))[0]
def get_object_reference(self, cr, uid, module, xml_id):
"""Returns (model, res_id) corresponding to a given module and xml_id (cached) or raise ValueError if not found"""
return self.xmlid_lookup(cr, uid, "%s.%s" % (module, xml_id))[1:3]
def check_object_reference(self, cr, uid, module, xml_id, raise_on_access_error=False):
"""Returns (model, res_id) corresponding to a given module and xml_id (cached), if and only if the user has the necessary access rights
to see that object, otherwise raise a ValueError if raise_on_access_error is True or returns a tuple (model found, False)"""
model, res_id = self.get_object_reference(cr, uid, module, xml_id)
#search on id found in result to check if current user has read access right
check_right = self.pool.get(model).search(cr, uid, [('id', '=', res_id)])
if check_right:
return model, res_id
if raise_on_access_error:
raise AccessError('Not enough access rights on the external ID: %s.%s' % (module, xml_id))
return model, False
def get_object(self, cr, uid, module, xml_id, context=None):
""" Returns a browsable record for the given module name and xml_id.
If not found, raise a ValueError or return None, depending
on the value of `raise_exception`.
"""
return self.xmlid_to_object(cr, uid, "%s.%s" % (module, xml_id), raise_if_not_found=True, context=context)
def _update_dummy(self,cr, uid, model, module, xml_id=False, store=True):
if not xml_id:
return False
id = False
try:
# One step to check the ID is defined and the record actually exists
record = self.get_object(cr, uid, module, xml_id)
if record:
id = record.id
self.loads[(module,xml_id)] = (model,id)
for table, inherit_field in self.pool[model]._inherits.iteritems():
parent_id = record[inherit_field].id
parent_xid = '%s_%s' % (xml_id, table.replace('.', '_'))
self.loads[(module, parent_xid)] = (table, parent_id)
except Exception:
pass
return id
def clear_caches(self):
""" Clears all orm caches on the object's methods
:returns: itself
"""
self.xmlid_lookup.clear_cache(self)
return self
def unlink(self, cr, uid, ids, context=None):
""" Regular unlink method, but make sure to clear the caches. """
self.clear_caches()
return super(ir_model_data,self).unlink(cr, uid, ids, context=context)
def _update(self,cr, uid, model, module, values, xml_id=False, store=True, noupdate=False, mode='init', res_id=False, context=None):
model_obj = self.pool[model]
if not context:
context = {}
# records created during module install should not display the messages of OpenChatter
context = dict(context, install_mode=True)
if xml_id and ('.' in xml_id):
assert len(xml_id.split('.'))==2, _("'%s' contains too many dots. XML ids should not contain dots ! These are used to refer to other modules data, as in module.reference_id") % xml_id
module, xml_id = xml_id.split('.')
action_id = False
if xml_id:
cr.execute('''SELECT imd.id, imd.res_id, md.id, imd.model, imd.noupdate
FROM ir_model_data imd LEFT JOIN %s md ON (imd.res_id = md.id)
WHERE imd.module=%%s AND imd.name=%%s''' % model_obj._table,
(module, xml_id))
results = cr.fetchall()
for imd_id2,res_id2,real_id2,real_model,noupdate_imd in results:
# In update mode, do not update a record if it's ir.model.data is flagged as noupdate
if mode == 'update' and noupdate_imd:
return res_id2
if not real_id2:
self.clear_caches()
cr.execute('delete from ir_model_data where id=%s', (imd_id2,))
res_id = False
else:
assert model == real_model, "External ID conflict, %s already refers to a `%s` record,"\
" you can't define a `%s` record with this ID." % (xml_id, real_model, model)
res_id,action_id = res_id2,imd_id2
if action_id and res_id:
model_obj.write(cr, uid, [res_id], values, context=context)
self.write(cr, uid, [action_id], {
'date_update': time.strftime('%Y-%m-%d %H:%M:%S'),
},context=context)
elif res_id:
model_obj.write(cr, uid, [res_id], values, context=context)
if xml_id:
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id,context=context)[model_obj._inherits[table]]
self.create(cr, uid, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id.id,
'noupdate': noupdate,
},context=context)
self.create(cr, uid, {
'name': xml_id,
'model': model,
'module':module,
'res_id':res_id,
'noupdate': noupdate,
},context=context)
else:
if mode=='init' or (mode=='update' and xml_id):
res_id = model_obj.create(cr, uid, values, context=context)
if xml_id:
if model_obj._inherits:
for table in model_obj._inherits:
inherit_id = model_obj.browse(cr, uid,
res_id,context=context)[model_obj._inherits[table]]
self.create(cr, uid, {
'name': xml_id + '_' + table.replace('.', '_'),
'model': table,
'module': module,
'res_id': inherit_id.id,
'noupdate': noupdate,
},context=context)
self.create(cr, uid, {
'name': xml_id,
'model': model,
'module': module,
'res_id': res_id,
'noupdate': noupdate
},context=context)
if xml_id and res_id:
self.loads[(module, xml_id)] = (model, res_id)
for table, inherit_field in model_obj._inherits.iteritems():
inherit_id = model_obj.read(cr, uid, [res_id],
[inherit_field])[0][inherit_field]
self.loads[(module, xml_id + '_' + table.replace('.', '_'))] = (table, inherit_id)
return res_id
def ir_set(self, cr, uid, key, key2, name, models, value, replace=True, isobject=False, meta=None, xml_id=False):
if isinstance(models[0], (list, tuple)):
model,res_id = models[0]
else:
res_id=None
model = models[0]
if res_id:
where = ' and res_id=%s' % (res_id,)
else:
where = ' and (res_id is null)'
if key2:
where += ' and key2=\'%s\'' % (key2,)
else:
where += ' and (key2 is null)'
cr.execute('select * from ir_values where model=%s and key=%s and name=%s'+where,(model, key, name))
res = cr.fetchone()
ir_values_obj = openerp.registry(cr.dbname)['ir.values']
if not res:
ir_values_obj.set(cr, uid, key, key2, name, models, value, replace, isobject, meta)
elif xml_id:
cr.execute('UPDATE ir_values set value=%s WHERE model=%s and key=%s and name=%s'+where,(value, model, key, name))
ir_values_obj.invalidate_cache(cr, uid, ['value'])
return True
def _module_data_uninstall(self, cr, uid, modules_to_remove, context=None):
"""Deletes all the records referenced by the ir.model.data entries
``ids`` along with their corresponding database backed (including
dropping tables, columns, FKs, etc, as long as there is no other
ir.model.data entry holding a reference to them (which indicates that
they are still owned by another module).
Attempts to perform the deletion in an appropriate order to maximize
the chance of gracefully deleting all records.
This step is performed as part of the full uninstallation of a module.
"""
ids = self.search(cr, uid, [('module', 'in', modules_to_remove)])
if uid != 1 and not self.pool['ir.model.access'].check_groups(cr, uid, "base.group_system"):
raise AccessError(_('Administrator access is required to uninstall a module'))
context = dict(context or {})
context[MODULE_UNINSTALL_FLAG] = True # enable model/field deletion
ids_set = set(ids)
wkf_todo = []
to_unlink = []
ids.sort()
ids.reverse()
for data in self.browse(cr, uid, ids, context):
model = data.model
res_id = data.res_id
pair_to_unlink = (model, res_id)
if pair_to_unlink not in to_unlink:
to_unlink.append(pair_to_unlink)
if model == 'workflow.activity':
# Special treatment for workflow activities: temporarily revert their
# incoming transition and trigger an update to force all workflow items
# to move out before deleting them
cr.execute('select res_type,res_id from wkf_instance where id IN (select inst_id from wkf_workitem where act_id=%s)', (res_id,))
wkf_todo.extend(cr.fetchall())
cr.execute("update wkf_transition set condition='True', group_id=NULL, signal=NULL,act_to=act_from,act_from=%s where act_to=%s", (res_id,res_id))
self.invalidate_cache(cr, uid, context=context)
for model,res_id in wkf_todo:
try:
openerp.workflow.trg_write(uid, model, res_id, cr)
except Exception:
_logger.info('Unable to force processing of workflow for item %s@%s in order to leave activity to be deleted', res_id, model, exc_info=True)
def unlink_if_refcount(to_unlink):
for model, res_id in to_unlink:
external_ids = self.search(cr, uid, [('model', '=', model),('res_id', '=', res_id)])
if set(external_ids)-ids_set:
# if other modules have defined this record, we must not delete it
continue
if model == 'ir.model.fields':
# Don't remove the LOG_ACCESS_COLUMNS unless _log_access
# has been turned off on the model.
field = self.pool[model].browse(cr, uid, [res_id], context=context)[0]
if not field.exists():
_logger.info('Deleting orphan external_ids %s', external_ids)
self.unlink(cr, uid, external_ids)
continue
if field.name in openerp.models.LOG_ACCESS_COLUMNS and self.pool[field.model]._log_access:
continue
if field.name == 'id':
continue
_logger.info('Deleting %s@%s', res_id, model)
try:
cr.execute('SAVEPOINT record_unlink_save')
self.pool[model].unlink(cr, uid, [res_id], context=context)
except Exception:
_logger.info('Unable to delete %s@%s', res_id, model, exc_info=True)
cr.execute('ROLLBACK TO SAVEPOINT record_unlink_save')
else:
cr.execute('RELEASE SAVEPOINT record_unlink_save')
# Remove non-model records first, then model fields, and finish with models
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model not in ('ir.model','ir.model.fields','ir.model.constraint'))
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model == 'ir.model.constraint')
ir_module_module = self.pool['ir.module.module']
ir_model_constraint = self.pool['ir.model.constraint']
modules_to_remove_ids = ir_module_module.search(cr, uid, [('name', 'in', modules_to_remove)], context=context)
constraint_ids = ir_model_constraint.search(cr, uid, [('module', 'in', modules_to_remove_ids)], context=context)
ir_model_constraint._module_data_uninstall(cr, uid, constraint_ids, context)
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model == 'ir.model.fields')
ir_model_relation = self.pool['ir.model.relation']
relation_ids = ir_model_relation.search(cr, uid, [('module', 'in', modules_to_remove_ids)])
ir_model_relation._module_data_uninstall(cr, uid, relation_ids, context)
unlink_if_refcount((model, res_id) for model, res_id in to_unlink
if model == 'ir.model')
cr.commit()
self.unlink(cr, uid, ids, context)
def _process_end(self, cr, uid, modules):
""" Clear records removed from updated module data.
This method is called at the end of the module loading process.
It is meant to removed records that are no longer present in the
updated data. Such records are recognised as the one with an xml id
and a module in ir_model_data and noupdate set to false, but not
present in self.loads.
"""
if not modules or config.get('import_partial'):
return True
bad_imd_ids = []
context = {MODULE_UNINSTALL_FLAG: True}
cr.execute("""SELECT id,name,model,res_id,module FROM ir_model_data
WHERE module IN %s AND res_id IS NOT NULL AND noupdate=%s ORDER BY id DESC
""", (tuple(modules), False))
for (id, name, model, res_id, module) in cr.fetchall():
if (module, name) not in self.loads:
if model in self.pool:
_logger.info('Deleting %s@%s (%s.%s)', res_id, model, module, name)
if self.pool[model].exists(cr, uid, [res_id], context=context):
self.pool[model].unlink(cr, uid, [res_id], context=context)
else:
bad_imd_ids.append(id)
if bad_imd_ids:
self.unlink(cr, uid, bad_imd_ids, context=context)
self.loads.clear()
class wizard_model_menu(osv.osv_memory):
_name = 'wizard.ir.model.menu.create'
_columns = {
'menu_id': fields.many2one('ir.ui.menu', 'Parent Menu', required=True),
'name': fields.char('Menu Name', required=True),
}
def menu_create(self, cr, uid, ids, context=None):
if not context:
context = {}
model_pool = self.pool.get('ir.model')
for menu in self.browse(cr, uid, ids, context):
model = model_pool.browse(cr, uid, context.get('model_id'), context=context)
val = {
'name': menu.name,
'res_model': model.model,
'view_type': 'form',
'view_mode': 'tree,form'
}
action_id = self.pool.get('ir.actions.act_window').create(cr, uid, val)
self.pool.get('ir.ui.menu').create(cr, uid, {
'name': menu.name,
'parent_id': menu.menu_id.id,
'action': 'ir.actions.act_window,%d' % (action_id,),
'icon': 'STOCK_INDENT'
}, context)
return {'type':'ir.actions.act_window_close'}
| agpl-3.0 |
hendradarwin/VTK | Common/ComputationalGeometry/Testing/Python/TestParametricFunctions.py | 15 | 25978 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import vtk
import vtk.test.Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# ------------------------------------------------------------
# Purpose: Test the parametric functions.
# ------------------------------------------------------------
class TestParametricFunctions(vtk.test.Testing.vtkTest):
def testParametricFunctions(self):
# ------------------------------------------------------------
# Get a texture
# ------------------------------------------------------------
textureReader = vtk.vtkJPEGReader()
textureReader.SetFileName(VTK_DATA_ROOT + "/Data/beach.jpg")
texture = vtk.vtkTexture()
texture.SetInputConnection(textureReader.GetOutputPort())
# ------------------------------------------------------------
# For each parametric surface:
# 1) Create it
# 2) Assign mappers and actors
# 3) Position the object
# 5) Add a label
# ------------------------------------------------------------
# ------------------------------------------------------------
# Create a torus
# ------------------------------------------------------------
torus = vtk.vtkParametricTorus()
torusSource = vtk.vtkParametricFunctionSource()
torusSource.SetParametricFunction(torus)
torusSource.SetScalarModeToPhase()
torusMapper = vtk.vtkPolyDataMapper()
torusMapper.SetInputConnection(torusSource.GetOutputPort())
torusMapper.SetScalarRange(0, 360)
torusActor = vtk.vtkActor()
torusActor.SetMapper(torusMapper)
torusActor.SetPosition(0, 12, 0)
torusTextMapper = vtk.vtkTextMapper()
torusTextMapper.SetInput("Torus")
torusTextMapper.GetTextProperty().SetJustificationToCentered()
torusTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
torusTextMapper.GetTextProperty().SetColor(1, 0, 0)
torusTextMapper.GetTextProperty().SetFontSize(14)
torusTextActor = vtk.vtkActor2D()
torusTextActor.SetMapper(torusTextMapper)
torusTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
torusTextActor.GetPositionCoordinate().SetValue(0, 9.5, 0)
# ------------------------------------------------------------
# Create a Klein bottle
# ------------------------------------------------------------
klein = vtk.vtkParametricKlein()
kleinSource = vtk.vtkParametricFunctionSource()
kleinSource.SetParametricFunction(klein)
kleinSource.SetScalarModeToU0V0()
kleinMapper = vtk.vtkPolyDataMapper()
kleinMapper.SetInputConnection(kleinSource.GetOutputPort())
kleinMapper.SetScalarRange(0, 3)
kleinActor = vtk.vtkActor()
kleinActor.SetMapper(kleinMapper)
kleinActor.SetPosition(8, 10.5, 0)
kleinTextMapper = vtk.vtkTextMapper()
kleinTextMapper.SetInput("Klein")
kleinTextMapper.GetTextProperty().SetJustificationToCentered()
kleinTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
kleinTextMapper.GetTextProperty().SetColor(1, 0, 0)
kleinTextMapper.GetTextProperty().SetFontSize(14)
kleinTextActor = vtk.vtkActor2D()
kleinTextActor.SetMapper(kleinTextMapper)
kleinTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
kleinTextActor.GetPositionCoordinate().SetValue(8, 9.5, 0)
# ------------------------------------------------------------
# Create a Figure-8 Klein
# ------------------------------------------------------------
klein2 = vtk.vtkParametricFigure8Klein()
klein2Source = vtk.vtkParametricFunctionSource()
klein2Source.SetParametricFunction(klein2)
klein2Source.GenerateTextureCoordinatesOn()
klein2Mapper = vtk.vtkPolyDataMapper()
klein2Mapper.SetInputConnection(klein2Source.GetOutputPort())
klein2Mapper.SetScalarRange(0, 3)
klein2Actor = vtk.vtkActor()
klein2Actor.SetMapper(klein2Mapper)
klein2Actor.SetPosition(16, 12, 0)
klein2Actor.SetTexture(texture)
fig8KleinTextMapper = vtk.vtkTextMapper()
fig8KleinTextMapper.SetInput("Fig-8.Klein")
fig8KleinTextMapper.GetTextProperty().SetJustificationToCentered()
fig8KleinTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
fig8KleinTextMapper.GetTextProperty().SetColor(1, 0, 0)
fig8KleinTextMapper.GetTextProperty().SetFontSize(14)
fig8KleinTextActor = vtk.vtkActor2D()
fig8KleinTextActor.SetMapper(fig8KleinTextMapper)
fig8KleinTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
fig8KleinTextActor.GetPositionCoordinate().SetValue(16, 9.5, 0)
# ------------------------------------------------------------
# Create a Mobius strip
# ------------------------------------------------------------
mobius = vtk.vtkParametricMobius()
mobiusSource = vtk.vtkParametricFunctionSource()
mobiusSource.SetParametricFunction(mobius)
mobiusSource.GenerateTextureCoordinatesOn()
mobiusMapper = vtk.vtkPolyDataMapper()
mobiusMapper.SetInputConnection(mobiusSource.GetOutputPort())
mobiusActor = vtk.vtkActor()
mobiusActor.SetMapper(mobiusMapper)
mobiusActor.RotateX(45)
mobiusActor.SetPosition(24, 12, 0)
mobiusActor.SetTexture(texture)
mobiusTextMapper = vtk.vtkTextMapper()
mobiusTextMapper.SetInput("Mobius")
mobiusTextMapper.GetTextProperty().SetJustificationToCentered()
mobiusTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
mobiusTextMapper.GetTextProperty().SetColor(1, 0, 0)
mobiusTextMapper.GetTextProperty().SetFontSize(14)
mobiusTextActor = vtk.vtkActor2D()
mobiusTextActor.SetMapper(mobiusTextMapper)
mobiusTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
mobiusTextActor.GetPositionCoordinate().SetValue(24, 9.5, 0)
# ------------------------------------------------------------
# Create a super toroid
# ------------------------------------------------------------
toroid = vtk.vtkParametricSuperToroid()
toroid.SetN1(2)
toroid.SetN2(3)
toroidSource = vtk.vtkParametricFunctionSource()
toroidSource.SetParametricFunction(toroid)
toroidSource.SetScalarModeToU()
toroidMapper = vtk.vtkPolyDataMapper()
toroidMapper.SetInputConnection(toroidSource.GetOutputPort())
toroidMapper.SetScalarRange(0, 6.28)
toroidActor = vtk.vtkActor()
toroidActor.SetMapper(toroidMapper)
toroidActor.SetPosition(0, 4, 0)
superToroidTextMapper = vtk.vtkTextMapper()
superToroidTextMapper.SetInput("Super.Toroid")
superToroidTextMapper.GetTextProperty().SetJustificationToCentered()
superToroidTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
superToroidTextMapper.GetTextProperty().SetColor(1, 0, 0)
superToroidTextMapper.GetTextProperty().SetFontSize(14)
superToroidTextActor = vtk.vtkActor2D()
superToroidTextActor.SetMapper(superToroidTextMapper)
superToroidTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
superToroidTextActor.GetPositionCoordinate().SetValue(0, 1.5, 0)
# ------------------------------------------------------------
# Create a super ellipsoid
# ------------------------------------------------------------
superEllipsoid = vtk.vtkParametricSuperEllipsoid()
superEllipsoid.SetXRadius(1.25)
superEllipsoid.SetYRadius(1.5)
superEllipsoid.SetZRadius(1.0)
superEllipsoid.SetN1(1.1)
superEllipsoid.SetN2(1.75)
superEllipsoidSource = vtk.vtkParametricFunctionSource()
superEllipsoidSource.SetParametricFunction(superEllipsoid)
superEllipsoidSource.SetScalarModeToV()
superEllipsoidMapper = vtk.vtkPolyDataMapper()
superEllipsoidMapper.SetInputConnection(superEllipsoidSource.GetOutputPort())
superEllipsoidMapper.SetScalarRange(0, 3.14)
superEllipsoidActor = vtk.vtkActor()
superEllipsoidActor.SetMapper(superEllipsoidMapper)
superEllipsoidActor.SetPosition(8, 4, 0)
superEllipsoidTextMapper = vtk.vtkTextMapper()
superEllipsoidTextMapper.SetInput("Super.Ellipsoid")
superEllipsoidTextMapper.GetTextProperty().SetJustificationToCentered()
superEllipsoidTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
superEllipsoidTextMapper.GetTextProperty().SetColor(1, 0, 0)
superEllipsoidTextMapper.GetTextProperty().SetFontSize(14)
superEllipsoidTextActor = vtk.vtkActor2D()
superEllipsoidTextActor.SetMapper(superEllipsoidTextMapper)
superEllipsoidTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
superEllipsoidTextActor.GetPositionCoordinate().SetValue(8, 1.5, 0)
# ------------------------------------------------------------
# Create an open 1D spline
# ------------------------------------------------------------
splinePoints = [
[0.50380158308139134, -0.60679315105396936, -0.37248976406291578],
[-0.4354646054261665, -0.85362339758017258, -0.84844312996065385],
[0.2163147512899315, -0.39797507012168643, -0.76700353518454523],
[0.97158415334838644, -0.58513467367046257, -0.35846037946569753],
[-0.64359767997804918, -0.94620739107309249, -0.90762176546623086],
[-0.39901219094126117, -0.1978931497772658, 0.0098316934936828471],
[-0.75872745167404765, 0.067719714281950116, 0.165237936733867],
[-0.84599731389712418, -0.67685466896596114, 0.10357868909071133],
[0.84702754758625654, -0.0080077177882230677, -0.58571286666473044],
[-0.076150034124101484, 0.14637647622561856, 0.1494359239700418] ]
inputPoints = vtk.vtkPoints()
for i in range(0, 10):
inputPoints.InsertPoint(i, splinePoints[i])
spline = vtk.vtkParametricSpline()
spline.SetPoints(inputPoints)
spline.ClosedOff()
splineSource = vtk.vtkParametricFunctionSource()
splineSource.SetParametricFunction(spline)
splineMapper = vtk.vtkPolyDataMapper()
splineMapper.SetInputConnection(splineSource.GetOutputPort())
splineActor = vtk.vtkActor()
splineActor.SetMapper(splineMapper)
splineActor.SetPosition(16, 4, 0)
splineActor.GetProperty().SetColor(0, 0, 0)
splineTextMapper = vtk.vtkTextMapper()
splineTextMapper.SetInput("Open.Spline")
splineTextMapper.GetTextProperty().SetJustificationToCentered()
splineTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
splineTextMapper.GetTextProperty().SetColor(1, 0, 0)
splineTextMapper.GetTextProperty().SetFontSize(14)
splineTextActor = vtk.vtkActor2D()
splineTextActor.SetMapper(splineTextMapper)
splineTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
splineTextActor.GetPositionCoordinate().SetValue(16, 1.5, 0)
# ------------------------------------------------------------
# Create a closed 1D spline
# ------------------------------------------------------------
spline2 = vtk.vtkParametricSpline()
spline2.SetPoints(inputPoints)
spline2.ClosedOn()
spline2Source = vtk.vtkParametricFunctionSource()
spline2Source.SetParametricFunction(spline2)
spline2Mapper = vtk.vtkPolyDataMapper()
spline2Mapper.SetInputConnection(spline2Source.GetOutputPort())
spline2Actor = vtk.vtkActor()
spline2Actor.SetMapper(spline2Mapper)
spline2Actor.SetPosition(24, 4, 0)
spline2Actor.GetProperty().SetColor(0, 0, 0)
spline2TextMapper = vtk.vtkTextMapper()
spline2TextMapper.SetInput("Closed.Spline")
spline2TextMapper.GetTextProperty().SetJustificationToCentered()
spline2TextMapper.GetTextProperty().SetVerticalJustificationToCentered()
spline2TextMapper.GetTextProperty().SetColor(1, 0, 0)
spline2TextMapper.GetTextProperty().SetFontSize(14)
spline2TextActor = vtk.vtkActor2D()
spline2TextActor.SetMapper(spline2TextMapper)
spline2TextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
spline2TextActor.GetPositionCoordinate().SetValue(24, 1.5, 0)
# ------------------------------------------------------------
# Create a spiral conic
# ------------------------------------------------------------
sconic = vtk.vtkParametricConicSpiral()
sconic.SetA(0.8)
sconic.SetB(2.5)
sconic.SetC(0.4)
sconicSource = vtk.vtkParametricFunctionSource()
sconicSource.SetParametricFunction(sconic)
sconicSource.SetScalarModeToDistance()
sconicMapper = vtk.vtkPolyDataMapper()
sconicMapper.SetInputConnection(sconicSource.GetOutputPort())
sconicActor = vtk.vtkActor()
sconicActor.SetMapper(sconicMapper)
sconicMapper.SetScalarRange(0, 9)
sconicActor.SetPosition(0, -4, 0)
sconicActor.SetScale(1.2, 1.2, 1.2)
sconicTextMapper = vtk.vtkTextMapper()
sconicTextMapper.SetInput("Spiral.Conic")
sconicTextMapper.GetTextProperty().SetJustificationToCentered()
sconicTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
sconicTextMapper.GetTextProperty().SetColor(1, 0, 0)
sconicTextMapper.GetTextProperty().SetFontSize(14)
sconicTextActor = vtk.vtkActor2D()
sconicTextActor.SetMapper(sconicTextMapper)
sconicTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
sconicTextActor.GetPositionCoordinate().SetValue(0, -6.5, 0)
# ------------------------------------------------------------
# Create Boy's surface
# ------------------------------------------------------------
boy = vtk.vtkParametricBoy()
boySource = vtk.vtkParametricFunctionSource()
boySource.SetParametricFunction(boy)
boySource.SetScalarModeToModulus()
boyMapper = vtk.vtkPolyDataMapper()
boyMapper.SetInputConnection(boySource.GetOutputPort())
boyMapper.SetScalarRange(0, 2)
boyActor = vtk.vtkActor()
boyActor.SetMapper(boyMapper)
boyActor.SetPosition(8, -4, 0)
boyActor.SetScale(1.5, 1.5, 1.5)
boyTextMapper = vtk.vtkTextMapper()
boyTextMapper.SetInput("Boy")
boyTextMapper.GetTextProperty().SetJustificationToCentered()
boyTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
boyTextMapper.GetTextProperty().SetColor(1, 0, 0)
boyTextMapper.GetTextProperty().SetFontSize(14)
boyTextActor = vtk.vtkActor2D()
boyTextActor.SetMapper(boyTextMapper)
boyTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
boyTextActor.GetPositionCoordinate().SetValue(8, -6.5, 0)
# ------------------------------------------------------------
# Create a cross cap
# ------------------------------------------------------------
crossCap = vtk.vtkParametricCrossCap()
crossCapSource = vtk.vtkParametricFunctionSource()
crossCapSource.SetParametricFunction(crossCap)
crossCapSource.SetScalarModeToY()
crossCapMapper = vtk.vtkPolyDataMapper()
crossCapMapper.SetInputConnection(crossCapSource.GetOutputPort())
crossCapActor = vtk.vtkActor()
crossCapActor.SetMapper(crossCapMapper)
crossCapActor.RotateX(65)
crossCapActor.SetPosition(16, -4, 0)
crossCapActor.SetScale(1.5, 1.5, 1.5)
crossCapTextMapper = vtk.vtkTextMapper()
crossCapTextMapper.SetInput("Cross.Cap")
crossCapTextMapper.GetTextProperty().SetJustificationToCentered()
crossCapTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
crossCapTextMapper.GetTextProperty().SetColor(1, 0, 0)
crossCapTextMapper.GetTextProperty().SetFontSize(14)
crossCapTextActor = vtk.vtkActor2D()
crossCapTextActor.SetMapper(crossCapTextMapper)
crossCapTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
crossCapTextActor.GetPositionCoordinate().SetValue(16, -6.5, 0)
# ------------------------------------------------------------
# Create Dini's surface
# ------------------------------------------------------------
dini = vtk.vtkParametricDini()
diniSource = vtk.vtkParametricFunctionSource()
diniSource.SetScalarModeToDistance()
diniSource.SetParametricFunction(dini)
diniMapper = vtk.vtkPolyDataMapper()
diniMapper.SetInputConnection(diniSource.GetOutputPort())
diniActor = vtk.vtkActor()
diniActor.SetMapper(diniMapper)
diniActor.RotateX(-90)
diniActor.SetPosition(24, -3, 0)
diniActor.SetScale(1.5, 1.5, 0.5)
diniTextMapper = vtk.vtkTextMapper()
diniTextMapper.SetInput("Dini")
diniTextMapper.GetTextProperty().SetJustificationToCentered()
diniTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
diniTextMapper.GetTextProperty().SetColor(1, 0, 0)
diniTextMapper.GetTextProperty().SetFontSize(14)
diniTextActor = vtk.vtkActor2D()
diniTextActor.SetMapper(diniTextMapper)
diniTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
diniTextActor.GetPositionCoordinate().SetValue(24, -6.5, 0)
# ------------------------------------------------------------
# Create Enneper's surface
# ------------------------------------------------------------
enneper = vtk.vtkParametricEnneper()
enneperSource = vtk.vtkParametricFunctionSource()
enneperSource.SetParametricFunction(enneper)
enneperSource.SetScalarModeToQuadrant()
enneperMapper = vtk.vtkPolyDataMapper()
enneperMapper.SetInputConnection(enneperSource.GetOutputPort())
enneperMapper.SetScalarRange(1, 4)
enneperActor = vtk.vtkActor()
enneperActor.SetMapper(enneperMapper)
enneperActor.SetPosition(0, -12, 0)
enneperActor.SetScale(0.25, 0.25, 0.25)
enneperTextMapper = vtk.vtkTextMapper()
enneperTextMapper.SetInput("Enneper")
enneperTextMapper.GetTextProperty().SetJustificationToCentered()
enneperTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
enneperTextMapper.GetTextProperty().SetColor(1, 0, 0)
enneperTextMapper.GetTextProperty().SetFontSize(14)
enneperTextActor = vtk.vtkActor2D()
enneperTextActor.SetMapper(enneperTextMapper)
enneperTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
enneperTextActor.GetPositionCoordinate().SetValue(0, -14.5, 0)
# ------------------------------------------------------------
# Create an ellipsoidal surface
# ------------------------------------------------------------
ellipsoid = vtk.vtkParametricEllipsoid()
ellipsoid.SetXRadius(1)
ellipsoid.SetYRadius(0.75)
ellipsoid.SetZRadius(0.5)
ellipsoidSource = vtk.vtkParametricFunctionSource()
ellipsoidSource.SetParametricFunction(ellipsoid)
ellipsoidSource.SetScalarModeToZ()
ellipsoidMapper = vtk.vtkPolyDataMapper()
ellipsoidMapper.SetInputConnection(ellipsoidSource.GetOutputPort())
ellipsoidMapper.SetScalarRange(-0.5, 0.5)
ellipsoidActor = vtk.vtkActor()
ellipsoidActor.SetMapper(ellipsoidMapper)
ellipsoidActor.SetPosition(8, -12, 0)
ellipsoidActor.SetScale(1.5, 1.5, 1.5)
ellipsoidTextMapper = vtk.vtkTextMapper()
ellipsoidTextMapper.SetInput("Ellipsoid")
ellipsoidTextMapper.GetTextProperty().SetJustificationToCentered()
ellipsoidTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
ellipsoidTextMapper.GetTextProperty().SetColor(1, 0, 0)
ellipsoidTextMapper.GetTextProperty().SetFontSize(14)
ellipsoidTextActor = vtk.vtkActor2D()
ellipsoidTextActor.SetMapper(ellipsoidTextMapper)
ellipsoidTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
ellipsoidTextActor.GetPositionCoordinate().SetValue(8, -14.5, 0)
# ------------------------------------------------------------
# Create a surface with random hills on it.
# ------------------------------------------------------------
randomHills = vtk.vtkParametricRandomHills()
randomHills.AllowRandomGenerationOn()
randomHillsSource = vtk.vtkParametricFunctionSource()
randomHillsSource.SetParametricFunction(randomHills)
randomHillsSource.GenerateTextureCoordinatesOn()
randomHillsMapper = vtk.vtkPolyDataMapper()
randomHillsMapper.SetInputConnection(randomHillsSource.GetOutputPort())
randomHillsActor = vtk.vtkActor()
randomHillsActor.SetMapper(randomHillsMapper)
randomHillsActor.SetPosition(16, -14, 0)
randomHillsActor.SetScale(0.2, 0.2, 0.2)
randomHillsActor.SetTexture(texture)
randomHillsTextMapper = vtk.vtkTextMapper()
randomHillsTextMapper.SetInput("Random.Hills")
randomHillsTextMapper.GetTextProperty().SetJustificationToCentered()
randomHillsTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
randomHillsTextMapper.GetTextProperty().SetColor(1, 0, 0)
randomHillsTextMapper.GetTextProperty().SetFontSize(14)
randomHillsTextActor = vtk.vtkActor2D()
randomHillsTextActor.SetMapper(randomHillsTextMapper)
randomHillsTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
randomHillsTextActor.GetPositionCoordinate().SetValue(16, -14.5, 0)
# ------------------------------------------------------------
# Create Steiner's Roman Surface.
# ------------------------------------------------------------
roman = vtk.vtkParametricRoman()
roman.SetRadius(1.5)
romanSource = vtk.vtkParametricFunctionSource()
romanSource.SetParametricFunction(roman)
romanSource.SetScalarModeToX()
romanMapper = vtk.vtkPolyDataMapper()
romanMapper.SetInputConnection(romanSource.GetOutputPort())
romanActor = vtk.vtkActor()
romanActor.SetMapper(romanMapper)
romanActor.SetPosition(24, -12, 0)
romanTextMapper = vtk.vtkTextMapper()
romanTextMapper.SetInput("Roman")
romanTextMapper.GetTextProperty().SetJustificationToCentered()
romanTextMapper.GetTextProperty().SetVerticalJustificationToCentered()
romanTextMapper.GetTextProperty().SetColor(1, 0, 0)
romanTextMapper.GetTextProperty().SetFontSize(14)
romanTextActor = vtk.vtkActor2D()
romanTextActor.SetMapper(romanTextMapper)
romanTextActor.GetPositionCoordinate().SetCoordinateSystemToWorld()
romanTextActor.GetPositionCoordinate().SetValue(24, -14.5, 0)
# ------------------------------------------------------------
# Create the RenderWindow, Renderer and both Actors
# ------------------------------------------------------------
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# add actors
ren.AddViewProp(torusActor)
ren.AddViewProp(kleinActor)
ren.AddViewProp(klein2Actor)
ren.AddViewProp(toroidActor)
ren.AddViewProp(superEllipsoidActor)
ren.AddViewProp(mobiusActor)
ren.AddViewProp(splineActor)
ren.AddViewProp(spline2Actor)
ren.AddViewProp(sconicActor)
ren.AddViewProp(boyActor)
ren.AddViewProp(crossCapActor)
ren.AddViewProp(diniActor)
ren.AddViewProp(enneperActor)
ren.AddViewProp(ellipsoidActor)
ren.AddViewProp(randomHillsActor)
ren.AddViewProp(romanActor)
#add text actors
ren.AddViewProp(torusTextActor)
ren.AddViewProp(kleinTextActor)
ren.AddViewProp(fig8KleinTextActor)
ren.AddViewProp(mobiusTextActor)
ren.AddViewProp(superToroidTextActor)
ren.AddViewProp(superEllipsoidTextActor)
ren.AddViewProp(splineTextActor)
ren.AddViewProp(spline2TextActor)
ren.AddViewProp(sconicTextActor)
ren.AddViewProp(boyTextActor)
ren.AddViewProp(crossCapTextActor)
ren.AddViewProp(diniTextActor)
ren.AddViewProp(enneperTextActor)
ren.AddViewProp(ellipsoidTextActor)
ren.AddViewProp(randomHillsTextActor)
ren.AddViewProp(romanTextActor)
ren.SetBackground(0.7, 0.8, 1)
renWin.SetSize(500, 500)
ren.ResetCamera()
ren.GetActiveCamera().Zoom(1.3)
iren.Initialize()
renWin.Render()
img_file = "TestParametricFunctions.png"
# NOTE: this test has a companion .tcl test. The threshold set
# here should be the same as the threshold in the .tcl
# test. Both tests should produce exactly the same results.
vtk.test.Testing.compareImage(iren.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=10)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestParametricFunctions, 'test')])
| bsd-3-clause |
ivanhorvath/openshift-tools | openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/src/ansible/oc_label.py | 84 | 1037 | # pylint: skip-file
# flake8: noqa
def main():
''' ansible oc module for labels '''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list', 'add']),
debug=dict(default=False, type='bool'),
kind=dict(default='node', type='str',
choices=['node', 'pod', 'namespace']),
name=dict(default=None, type='str'),
namespace=dict(default=None, type='str'),
labels=dict(default=None, type='list'),
selector=dict(default=None, type='str'),
),
supports_check_mode=True,
mutually_exclusive=(['name', 'selector']),
)
results = OCLabel.run_ansible(module.params, module.check_mode)
if 'failed' in results:
module.fail_json(**results)
module.exit_json(**results)
if __name__ == '__main__':
main()
| apache-2.0 |
nghia-huynh/gem5-stable | src/arch/x86/isa/insts/general_purpose/compare_and_test/test.py | 91 | 2776 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop TEST_M_R
{
ld t1, seg, sib, disp
and t0, t1, reg, flags=(OF, SF, ZF, PF, CF)
};
def macroop TEST_P_R
{
rdip t7
ld t1, seg, riprel, disp
and t0, t1, reg, flags=(OF, SF, ZF, PF, CF)
};
def macroop TEST_R_R
{
and t0, reg, regm, flags=(OF, SF, ZF, PF, CF)
};
def macroop TEST_M_I
{
ld t1, seg, sib, disp
limm t2, imm
and t0, t1, t2, flags=(OF, SF, ZF, PF, CF)
};
def macroop TEST_P_I
{
rdip t7
ld t1, seg, riprel, disp
limm t2, imm
and t0, t1, t2, flags=(OF, SF, ZF, PF, CF)
};
def macroop TEST_R_I
{
limm t1, imm
and t0, reg, t1, flags=(OF, SF, ZF, PF, CF)
};
'''
| bsd-3-clause |
tbinjiayou/Odoo | addons/l10n_be_coda/wizard/account_coda_import.py | 45 | 23190 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2012 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp import tools
import logging
_logger = logging.getLogger(__name__)
class account_coda_import(osv.osv_memory):
_name = 'account.coda.import'
_description = 'Import CODA File'
_columns = {
'coda_data': fields.binary('CODA File', required=True),
'coda_fname': fields.char('CODA Filename', required=True),
'note': fields.text('Log'),
}
_defaults = {
'coda_fname': 'coda.txt',
}
def coda_parsing(self, cr, uid, ids, context=None, batch=False, codafile=None, codafilename=None):
if context is None:
context = {}
if batch:
codafile = str(codafile)
codafilename = codafilename
else:
data = self.browse(cr, uid, ids)[0]
try:
codafile = data.coda_data
codafilename = data.coda_fname
except:
raise osv.except_osv(_('Error'), _('Wizard in incorrect state. Please hit the Cancel button'))
return {}
recordlist = unicode(base64.decodestring(codafile), 'windows-1252', 'strict').split('\n')
statements = []
globalisation_comm = {}
for line in recordlist:
if not line:
pass
elif line[0] == '0':
#Begin of a new Bank statement
statement = {}
statements.append(statement)
statement['version'] = line[127]
if statement['version'] not in ['1', '2']:
raise osv.except_osv(_('Error') + ' R001', _('CODA V%s statements are not supported, please contact your bank') % statement['version'])
statement['lines'] = []
statement['date'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[5:11]), '%d%m%y'))
statement['separateApplication'] = rmspaces(line[83:88])
elif line[0] == '1':
#Statement details
if statement['version'] == '1':
statement['acc_number'] = rmspaces(line[5:17])
statement['currency'] = rmspaces(line[18:21])
elif statement['version'] == '2':
if line[1] == '0': # Belgian bank account BBAN structure
statement['acc_number'] = rmspaces(line[5:17])
statement['currency'] = rmspaces(line[18:21])
elif line[1] == '1': # foreign bank account BBAN structure
raise osv.except_osv(_('Error') + ' R1001', _('Foreign bank accounts with BBAN structure are not supported '))
elif line[1] == '2': # Belgian bank account IBAN structure
statement['acc_number'] = rmspaces(line[5:21])
statement['currency'] = rmspaces(line[39:42])
elif line[1] == '3': # foreign bank account IBAN structure
raise osv.except_osv(_('Error') + ' R1002', _('Foreign bank accounts with IBAN structure are not supported '))
else: # Something else, not supported
raise osv.except_osv(_('Error') + ' R1003', _('Unsupported bank account structure '))
statement['journal_id'] = False
statement['bank_account'] = False
# Belgian Account Numbers are composed of 12 digits.
# In OpenERP, the user can fill the bank number in any format: With or without IBan code, with or without spaces, with or without '-'
# The two following sql requests handle those cases.
if len(statement['acc_number']) >= 12:
# If the Account Number is >= 12 digits, it is mostlikely a Belgian Account Number (With or without IBAN).
# The following request try to find the Account Number using a 'like' operator.
# So, if the Account Number is stored with IBAN code, it can be found thanks to this.
cr.execute("select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') like %s", ('%' + statement['acc_number'] + '%',))
else:
# This case is necessary to avoid cases like the Account Number in the CODA file is set to a single or few digits,
# and so a 'like' operator would return the first account number in the database which matches.
cr.execute("select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') = %s", (statement['acc_number'],))
bank_ids = [id[0] for id in cr.fetchall()]
# Filter bank accounts which are not allowed
bank_ids = self.pool.get('res.partner.bank').search(cr, uid, [('id', 'in', bank_ids)])
if bank_ids and len(bank_ids) > 0:
bank_accs = self.pool.get('res.partner.bank').browse(cr, uid, bank_ids)
for bank_acc in bank_accs:
if bank_acc.journal_id.id and ((bank_acc.journal_id.currency.id and bank_acc.journal_id.currency.name == statement['currency']) or (not bank_acc.journal_id.currency.id and bank_acc.journal_id.company_id.currency_id.name == statement['currency'])):
statement['journal_id'] = bank_acc.journal_id
statement['bank_account'] = bank_acc
break
if not statement['bank_account']:
raise osv.except_osv(_('Error') + ' R1004', _("No matching Bank Account (with Account Journal) found.\n\nPlease set-up a Bank Account with as Account Number '%s' and as Currency '%s' and an Account Journal.") % (statement['acc_number'], statement['currency']))
statement['description'] = rmspaces(line[90:125])
statement['balance_start'] = float(rmspaces(line[43:58])) / 1000
if line[42] == '1': #1 = Debit, the starting balance is negative
statement['balance_start'] = - statement['balance_start']
statement['balance_start_date'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[58:64]), '%d%m%y'))
statement['accountHolder'] = rmspaces(line[64:90])
statement['paperSeqNumber'] = rmspaces(line[2:5])
statement['codaSeqNumber'] = rmspaces(line[125:128])
elif line[0] == '2':
if line[1] == '1':
#New statement line
statementLine = {}
statementLine['ref'] = rmspaces(line[2:10])
statementLine['ref_move'] = rmspaces(line[2:6])
statementLine['ref_move_detail'] = rmspaces(line[6:10])
statementLine['sequence'] = len(statement['lines']) + 1
statementLine['transactionRef'] = rmspaces(line[10:31])
statementLine['debit'] = line[31] # 0 = Credit, 1 = Debit
statementLine['amount'] = float(rmspaces(line[32:47])) / 1000
if statementLine['debit'] == '1':
statementLine['amount'] = - statementLine['amount']
statementLine['transactionDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[47:53]), '%d%m%y'))
statementLine['transaction_family'] = rmspaces(line[54:56])
statementLine['transaction_code'] = rmspaces(line[56:58])
statementLine['transaction_category'] = rmspaces(line[58:61])
if line[61] == '1':
#Structured communication
statementLine['communication_struct'] = True
statementLine['communication_type'] = line[62:65]
statementLine['communication'] = '+++' + line[65:68] + '/' + line[68:72] + '/' + line[72:77] + '+++'
else:
#Non-structured communication
statementLine['communication_struct'] = False
statementLine['communication'] = rmspaces(line[62:115])
statementLine['entryDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[115:121]), '%d%m%y'))
statementLine['type'] = 'normal'
statementLine['globalisation'] = int(line[124])
if statementLine['globalisation'] > 0:
statementLine['type'] = 'globalisation'
globalisation_comm[statementLine['ref_move']] = statementLine['communication']
if not statementLine.get('communication'):
statementLine['communication'] = globalisation_comm.get(statementLine['ref_move'], '')
statement['lines'].append(statementLine)
elif line[1] == '2':
if statement['lines'][-1]['ref'][0:4] != line[2:6]:
raise osv.except_osv(_('Error') + 'R2004', _('CODA parsing error on movement data record 2.2, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:63])
statement['lines'][-1]['payment_reference'] = rmspaces(line[63:98])
statement['lines'][-1]['counterparty_bic'] = rmspaces(line[98:109])
elif line[1] == '3':
if statement['lines'][-1]['ref'][0:4] != line[2:6]:
raise osv.except_osv(_('Error') + 'R2005', _('CODA parsing error on movement data record 2.3, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
if statement['version'] == '1':
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:22])
statement['lines'][-1]['counterpartyName'] = rmspaces(line[47:73])
statement['lines'][-1]['counterpartyAddress'] = rmspaces(line[73:125])
statement['lines'][-1]['counterpartyCurrency'] = ''
else:
if line[22] == ' ':
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:22])
statement['lines'][-1]['counterpartyCurrency'] = rmspaces(line[23:26])
else:
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:44])
statement['lines'][-1]['counterpartyCurrency'] = rmspaces(line[44:47])
statement['lines'][-1]['counterpartyName'] = rmspaces(line[47:82])
statement['lines'][-1]['communication'] += rmspaces(line[82:125])
else:
# movement data record 2.x (x != 1,2,3)
raise osv.except_osv(_('Error') + 'R2006', _('\nMovement data records of type 2.%s are not supported ') % line[1])
elif line[0] == '3':
if line[1] == '1':
infoLine = {}
infoLine['entryDate'] = statement['lines'][-1]['entryDate']
infoLine['type'] = 'information'
infoLine['sequence'] = len(statement['lines']) + 1
infoLine['ref'] = rmspaces(line[2:10])
infoLine['transactionRef'] = rmspaces(line[10:31])
infoLine['transaction_family'] = rmspaces(line[32:34])
infoLine['transaction_code'] = rmspaces(line[34:36])
infoLine['transaction_category'] = rmspaces(line[36:39])
infoLine['communication'] = rmspaces(line[40:113])
statement['lines'].append(infoLine)
elif line[1] == '2':
if infoLine['ref'] != rmspaces(line[2:10]):
raise osv.except_osv(_('Error') + 'R3004', _('CODA parsing error on information data record 3.2, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:100])
elif line[1] == '3':
if infoLine['ref'] != rmspaces(line[2:10]):
raise osv.except_osv(_('Error') + 'R3005', _('CODA parsing error on information data record 3.3, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:100])
elif line[0] == '4':
comm_line = {}
comm_line['type'] = 'communication'
comm_line['sequence'] = len(statement['lines']) + 1
comm_line['ref'] = rmspaces(line[2:10])
comm_line['communication'] = rmspaces(line[32:112])
statement['lines'].append(comm_line)
elif line[0] == '8':
# new balance record
statement['debit'] = line[41]
statement['paperSeqNumber'] = rmspaces(line[1:4])
statement['balance_end_real'] = float(rmspaces(line[42:57])) / 1000
statement['balance_end_realDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[57:63]), '%d%m%y'))
if statement['debit'] == '1': # 1=Debit
statement['balance_end_real'] = - statement['balance_end_real']
if statement['balance_end_realDate']:
period_id = self.pool.get('account.period').search(cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['balance_end_realDate']), ('date_stop', '>=', statement['balance_end_realDate'])])
else:
period_id = self.pool.get('account.period').search(cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['date']), ('date_stop', '>=', statement['date'])])
if not period_id and len(period_id) == 0:
raise osv.except_osv(_('Error') + 'R0002', _("The CODA Statement New Balance date doesn't fall within a defined Accounting Period! Please create the Accounting Period for date %s for the company %s.") % (statement['balance_end_realDate'], statement['journal_id'].company_id.name))
statement['period_id'] = period_id[0]
elif line[0] == '9':
statement['balanceMin'] = float(rmspaces(line[22:37])) / 1000
statement['balancePlus'] = float(rmspaces(line[37:52])) / 1000
if not statement.get('balance_end_real'):
statement['balance_end_real'] = statement['balance_start'] + statement['balancePlus'] - statement['balanceMin']
for i, statement in enumerate(statements):
statement['coda_note'] = ''
balance_start_check_date = (len(statement['lines']) > 0 and statement['lines'][0]['entryDate']) or statement['date']
cr.execute('SELECT balance_end_real \
FROM account_bank_statement \
WHERE journal_id = %s and date <= %s \
ORDER BY date DESC,id DESC LIMIT 1', (statement['journal_id'].id, balance_start_check_date))
res = cr.fetchone()
balance_start_check = res and res[0]
if balance_start_check == None:
if statement['journal_id'].default_debit_account_id and (statement['journal_id'].default_credit_account_id == statement['journal_id'].default_debit_account_id):
balance_start_check = statement['journal_id'].default_debit_account_id.balance
else:
raise osv.except_osv(_('Error'), _("Configuration Error in journal %s!\nPlease verify the Default Debit and Credit Account settings.") % statement['journal_id'].name)
if balance_start_check != statement['balance_start']:
statement['coda_note'] = _("The CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s!") % (statement['description'] + ' #' + statement['paperSeqNumber'], statement['balance_start'], balance_start_check, statement['journal_id'].name)
if not(statement.get('period_id')):
raise osv.except_osv(_('Error') + ' R3006', _(' No transactions or no period in coda file !'))
data = {
'name': statement['paperSeqNumber'],
'date': statement['date'],
'journal_id': statement['journal_id'].id,
'period_id': statement['period_id'],
'balance_start': statement['balance_start'],
'balance_end_real': statement['balance_end_real'],
}
statement['id'] = self.pool.get('account.bank.statement').create(cr, uid, data, context=context)
for line in statement['lines']:
if line['type'] == 'information':
statement['coda_note'] = "\n".join([statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Date: ' + str(line['entryDate']), 'Communication: ' + line['communication'], ''])
elif line['type'] == 'communication':
statement['coda_note'] = "\n".join([statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Ref: ', 'Communication: ' + line['communication'], ''])
elif line['type'] == 'normal':
note = []
if 'counterpartyName' in line and line['counterpartyName'] != '':
note.append(_('Counter Party') + ': ' + line['counterpartyName'])
else:
line['counterpartyName'] = False
if 'counterpartyNumber' in line and line['counterpartyNumber'] != '':
try:
if int(line['counterpartyNumber']) == 0:
line['counterpartyNumber'] = False
except:
pass
if line['counterpartyNumber']:
note.append(_('Counter Party Account') + ': ' + line['counterpartyNumber'])
else:
line['counterpartyNumber'] = False
if 'counterpartyAddress' in line and line['counterpartyAddress'] != '':
note.append(_('Counter Party Address') + ': ' + line['counterpartyAddress'])
partner_id = None
structured_com = False
bank_account_id = False
if line['communication_struct'] and 'communication_type' in line and line['communication_type'] == '101':
structured_com = line['communication']
if 'counterpartyNumber' in line and line['counterpartyNumber']:
ids = self.pool.get('res.partner.bank').search(cr, uid, [('acc_number', '=', str(line['counterpartyNumber']))])
if ids:
bank_account_id = ids[0]
partner_id = self.pool.get('res.partner.bank').browse(cr, uid, bank_account_id, context=context).partner_id.id
else:
#create the bank account, not linked to any partner. The reconciliation will link the partner manually
#chosen at the bank statement final confirmation time.
try:
type_model, type_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'bank_normal')
type_id = self.pool.get('res.partner.bank.type').browse(cr, uid, type_id, context=context)
bank_code = type_id.code
except ValueError:
bank_code = 'bank'
bank_account_id = self.pool.get('res.partner.bank').create(cr, uid, {'acc_number': str(line['counterpartyNumber']), 'state': bank_code}, context=context)
if line.get('communication', ''):
note.append(_('Communication') + ': ' + line['communication'])
data = {
'name': structured_com or (line.get('communication', '') != '' and line['communication'] or '/'),
'note': "\n".join(note),
'date': line['entryDate'],
'amount': line['amount'],
'partner_id': partner_id,
'partner_name': line['counterpartyName'],
'statement_id': statement['id'],
'ref': line['ref'],
'sequence': line['sequence'],
'bank_account_id': bank_account_id,
}
self.pool.get('account.bank.statement.line').create(cr, uid, data, context=context)
if statement['coda_note'] != '':
self.pool.get('account.bank.statement').write(cr, uid, [statement['id']], {'coda_note': statement['coda_note']}, context=context)
model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'action_bank_reconcile_bank_statements')
action = self.pool[model].browse(cr, uid, action_id, context=context)
statements_ids = [statement['id'] for statement in statements]
return {
'name': action.name,
'tag': action.tag,
'context': {'statement_ids': statements_ids},
'type': 'ir.actions.client',
}
def rmspaces(s):
return " ".join(s.split())
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
zsiciarz/django | django/core/files/utils.py | 20 | 1872 | class FileProxyMixin:
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
@property
def closed(self):
return not self.file or self.file.closed
def readable(self):
if self.closed:
return False
if hasattr(self.file, 'readable'):
return self.file.readable()
return True
def writable(self):
if self.closed:
return False
if hasattr(self.file, 'writable'):
return self.file.writable()
return 'w' in getattr(self.file, 'mode', '')
def seekable(self):
if self.closed:
return False
if hasattr(self.file, 'seekable'):
return self.file.seekable()
return True
def __iter__(self):
return iter(self.file)
| bsd-3-clause |
proticom/google-python-exercises | basic/solution/list2.py | 207 | 2774 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Additional basic list exercises
# D. Given a list of numbers, return a list where
# all adjacent == elements have been reduced to a single element,
# so [1, 2, 2, 3] returns [1, 2, 3]. You may create a new list or
# modify the passed in list.
def remove_adjacent(nums):
# +++your code here+++
# LAB(begin solution)
result = []
for num in nums:
if len(result) == 0 or num != result[-1]:
result.append(num)
return result
# LAB(replace solution)
# return
# LAB(end solution)
# E. Given two lists sorted in increasing order, create and return a merged
# list of all the elements in sorted order. You may modify the passed in lists.
# Ideally, the solution should work in "linear" time, making a single
# pass of both lists.
def linear_merge(list1, list2):
# +++your code here+++
# LAB(begin solution)
result = []
# Look at the two lists so long as both are non-empty.
# Take whichever element [0] is smaller.
while len(list1) and len(list2):
if list1[0] < list2[0]:
result.append(list1.pop(0))
else:
result.append(list2.pop(0))
# Now tack on what's left
result.extend(list1)
result.extend(list2)
return result
# LAB(replace solution)
# return
# LAB(end solution)
# Note: the solution above is kind of cute, but unforunately list.pop(0)
# is not constant time with the standard python list implementation, so
# the above is not strictly linear time.
# An alternate approach uses pop(-1) to remove the endmost elements
# from each list, building a solution list which is backwards.
# Then use reversed() to put the result back in the correct order. That
# solution works in linear time, but is more ugly.
# Simple provided test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected))
# Calls the above functions with interesting inputs.
def main():
print 'remove_adjacent'
test(remove_adjacent([1, 2, 2, 3]), [1, 2, 3])
test(remove_adjacent([2, 2, 3, 3, 3]), [2, 3])
test(remove_adjacent([]), [])
print
print 'linear_merge'
test(linear_merge(['aa', 'xx', 'zz'], ['bb', 'cc']),
['aa', 'bb', 'cc', 'xx', 'zz'])
test(linear_merge(['aa', 'xx'], ['bb', 'cc', 'zz']),
['aa', 'bb', 'cc', 'xx', 'zz'])
test(linear_merge(['aa', 'aa'], ['aa', 'bb', 'bb']),
['aa', 'aa', 'aa', 'bb', 'bb'])
if __name__ == '__main__':
main()
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.