text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2016 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import os
import atexit
from . import core
# Numpy place holder for files and cleanup
numpy_files = []
def register_numpy_file(filename):
if filename not in numpy_files:
numpy_files.append(filename)
def clean_numpy_files():
for nfile in numpy_files:
os.unlink(nfile)
atexit.register(clean_numpy_files)
# Exit printing
def exit_printing():
if _success_flag_:
core.print_out( "\n*** Psi4 exiting successfully. Buy a developer a beer!\n")
else:
core.print_out( "\n*** Psi4 encountered an error. Buy a developer more coffee!\n")
core.print_out( "*** Resources and help at github.com/psi4/psi4.\n")
_success_flag_ = False
# Working directory
_input_dir_ = os.getcwd()
def get_input_directory():
return _input_dir_
|
kannon92/psi4
|
psi4/extras.py
|
Python
|
gpl-2.0
| 1,727
|
[
"Psi4"
] |
93238e2dfc0b5a2287eb113838da4b68251ab8bd8dc33623afdd84f053bdc600
|
#!/home/e89/e89/zd242/src/anaconda2/bin/python
# Application for mechanical properties calculation and analysis using VASP
# Zeyu Deng 16.03.2017
import os
import sys
import shutil
import subprocess
import numpy as np
import argparse
import sympy
#import matplotlib.pyplot as plt
###################################################### Constants #########
# Info
info_text = "Mechanical Properties Calculation and Analysis Application ver 1.05\n" +\
"Zeyu Deng <zd242@cam.ac.uk or dengzeyu@gmail.com>\n" +\
"Department of Materials Science and Metallurgy\n" +\
"University of Cambridge\n" +\
"16.03.2017"
def printInfo():
print info_text
# POSCAR
poscar = "POSCAR"
# Files to copy to the calculation directories
file_list = [poscar]
c11,c22,c33,c44,c55,c66,c12,c13,c14,c15,c16,c23,c24,c25,c26,c34,c35,c36,c45,c46,c56,d = sympy.symbols("c11,c22,c33,c44,c55,c66,c12,c13,c14,c15,c16,c23,c24,c25,c26,c34,c35,c36,c45,c46,c56,d")
def get_strain_pattern(crystSys):
if crystSys == 1: # cubic -> e1+e4
pattern = [[1, 0, 0, 1, 0, 0]]
# tetragonal high 4mm, -42m, 422, 4/mmm; tetragonal low 4, -4, 4/m ->
# e1+e4, e3+e6
elif crystSys in [2, 21]:
pattern = [[1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1]]
elif crystSys == 3: # orthohombic -> e1+e4, e2+e5, e3+e6
pattern = [[1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 1]]
elif crystSys == 4: # monoclinic type I Diad || x2 -> e1+e4, e3+e6, e2, e5
pattern = [[1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1],
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0]]
# monoclinic type II Diad || x3 -> e1+e4, e3+e5, e2, e6
elif crystSys == 41:
pattern = [[1, 0, 0, 1, 0, 0],
[0, 0, 1, 0, 1, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1]]
elif crystSys == 5: # triclinic -> e1, e2, e3, e4, e5, e6
pattern = [[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 0, 0],
[0, 0, 0, 0, 1, 0],
[0, 0, 0, 0, 0, 1]]
# hexagonal; trignal high 32, -3m, 3m; trignal low 3, -3 -> e1, e3+e4
elif crystSys in [6, 7, 71]:
pattern = [[1, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0]]
return pattern
def get_strain_pattern_sym(crystSys):
return sympy.Matrix(get_strain_pattern(crystSys))*d
def get_stress_pattern_sym(crystSys): # linear algebra calculation to get symbolic expression of stress
strain_pattern_sym=get_strain_pattern_sym(crystSys)
cij_pattern_sym = get_cij_pattern_sym(crystSys)
stress_pattern_sym = sympy.Matrix([[0, 0, 0, 0, 0, 0]])
for i in range(strain_pattern_sym.shape[0]):
stress_pattern_sym = stress_pattern_sym.row_insert(-1, (cij_pattern_sym*strain_pattern_sym.row(i).T).T)
stress_pattern_sym.row_del(-1)
return sympy.simplify(stress_pattern_sym)
def get_cij_pattern_sym(crystSys): # convert cij_pattern into symbolic matrix
cij_to_string = {1: 'c11', 2: 'c22', 3: 'c33', 4: 'c44', 5: 'c55', 6: 'c66',
7: 'c12', 8: 'c13', 9: 'c14', 10: 'c15', 11: 'c16',
12: 'c23', 13: 'c24', 14: 'c25', 15: 'c26',
16: 'c34', 17: 'c35', 18: 'c36',
19: 'c45', 20: 'c46',
21: 'c56', 0: 0, -9: '-c14', -14: '-c25', -11: '-c16'}
cij_pattern = get_cij_pattern(crystSys)
cij_pattern_sym = []
for index_i, i in enumerate(cij_pattern):
cij_pattern_sym_row = []
for index_j, j in enumerate(i):
cij_pattern_sym_row.append(cij_to_string[cij_pattern[index_i][index_j]])
cij_pattern_sym.append(cij_pattern_sym_row)
cij_pattern_sym = sympy.Matrix(cij_pattern_sym)
return cij_pattern_sym
def get_deform(crystSys, strain_index=0, deform=0):
# Cubic volume-conserving for energy method
if crystSys == 100:
deformation = [[[1+deform, 0, 0], [0, 1+deform, 0], [0, 0, 1/((1+deform)**2)]],
[[1+deform, 0, 0], [0, 1+deform, 0], [0, 0, 1+deform]],
[[1, deform/2, 0], [deform/2, 1, 0], [0, 0, 1+(deform**2)/(4-deform**2)]]]
# Stress-Strain methods
if crystSys < 100:
if crystSys != 0:
strain = get_strain_pattern(crystSys)
deformation = np.identity(3)+tran(strain[strain_index])*deform
return deformation
def get_deform_file():
f = open("deformation.dat", 'r')
lines = f.readlines()
f.close()
strain_list = [[float(num) for num in line.strip().split()]
for line in lines]
deformation = [np.identity(3)+tran(strain_list[strain_index])
for strain_index in range(len(strain_list))]
return deformation
# See J.F. Nye, Physical Properties of Crystals, P140-141
def get_cij_pattern(crystSys):
if crystSys == 1: # cubic -> e1+e4
pattern = [[1, 7, 7, 0, 0, 0],
[7, 1, 7, 0, 0, 0],
[7, 7, 1, 0, 0, 0],
[0, 0, 0, 4, 0, 0],
[0, 0, 0, 0, 4, 0],
[0, 0, 0, 0, 0, 4]]
# tetragonal high 4mm, -42m, 422, 4/mmm -> e1+e4, e3+e6
elif crystSys == 2:
pattern = [[1, 7, 8, 0, 0, 0],
[7, 1, 8, 0, 0, 0],
[8, 8, 3, 0, 0, 0],
[0, 0, 0, 4, 0, 0],
[0, 0, 0, 0, 4, 0],
[0, 0, 0, 0, 0, 6]]
elif crystSys == 21: # tetragonal low 4, -4, 4/m -> e1+e4, e3+e6
pattern = [[1, 7, 8, 0, 0, 11],
[7, 1, 8, 0, 0, -11],
[8, 8, 3, 0, 0, 0],
[0, 0, 0, 4, 0, 0],
[0, 0, 0, 0, 4, 0],
[11, -11, 0, 0, 0, 6]]
elif crystSys == 3: # orthohombic -> e1+e4, e2+e5, e3+e6
pattern = [[1, 7, 8, 0, 0, 0],
[7, 2, 12, 0, 0, 0],
[8, 12, 3, 0, 0, 0],
[0, 0, 0, 4, 0, 0],
[0, 0, 0, 0, 5, 0],
[0, 0, 0, 0, 0, 6]]
elif crystSys == 4: # monoclinic type I Diad || x2 -> e1+e4, e3+e6, e2, e5
pattern = [[1, 7, 8, 0, 10, 0],
[7, 2, 12, 0, 14, 0],
[8, 12, 3, 0, 17, 0],
[0, 0, 0, 4, 0, 20],
[10, 14, 17, 0, 5, 0],
[0, 0, 0, 20, 0, 6]]
# monoclinic type II Diad || x3 -> e1+e4, e3+e5, e2, e6
elif crystSys == 41:
pattern = [[1, 7, 8, 0, 0, 11],
[7, 2, 12, 0, 0, 15],
[8, 12, 3, 0, 0, 18],
[0, 0, 0, 4, 19, 0],
[0, 0, 0, 19, 5, 0],
[11, 15, 18, 0, 0, 6]]
elif crystSys == 5: # triclinic -> e1, e2, e3, e4, e5, e6
pattern = [[1, 7, 8, 9, 10, 11],
[7, 2, 12, 13, 14, 15],
[8, 12, 3, 16, 17, 18],
[9, 13, 16, 4, 19, 20],
[10, 14, 17, 19, 5, 21],
[11, 15, 18, 20, 21, 6]]
elif crystSys == 6: # hexagonal -> e1, e3+e4
pattern = [[1, 7, 8, 0, 0, 0],
[7, 1, 8, 0, 0, 0],
[8, 8, 3, 0, 0, 0],
[0, 0, 0, 4, 0, 0],
[0, 0, 0, 0, 4, 0],
[0, 0, 0, 0, 0, 6]]
elif crystSys == 7: # trignal high 32, -3m, 3m -> e1, e3+e4
pattern = [[1, 7, 8, 9, 0, 0],
[7, 1, 8, -9, 0, 0],
[8, 8, 3, 0, 0, 0],
[9, -9, 0, 4, 0, 0],
[0, 0, 0, 0, 4, 9],
[0, 0, 0, 0, 9, 6]]
elif crystSys == 71: # trignal low 3, -3 -> e1, e3+e4
pattern = [[1, 7, 8, 9, -14, 0],
[7, 1, 8, -9, 14, 0],
[8, 8, 3, 0, 0, 0],
[9, -9, 0, 4, 0, 14],
[-14, 14, 0, 0, 4, 9],
[0, 0, 0, 14, 9, 6]]
return pattern
def get_num_strain(crystSys):
if crystSys == 1:
return 1
elif crystSys in [2, 21]:
return 2
elif crystSys == 3:
return 3
elif crystSys in [4, 41]:
return 4
elif crystSys == 5:
return 6
elif crystSys in [6, 7, 71]:
return 2
return num_strain
voigt_mat = np.array([[0, 5, 4], [5, 1, 3], [4, 3, 2]])
###################################################### Preprocessing Funct
def preprocess(crystSys, num_calcPoint, delta):
[p, bot, top] = readPoscar()
deform_matrix = []
shutil.move(poscar, 'POSCAR_backup')
num_strain = get_num_strain(crystSys)
if num_calcPoint % 2 != 0:
num_calcPoint = num_calcPoint-1
for index_Strain in range(num_strain):
deform = -num_calcPoint/2*delta
mkdir("./str"+str(index_Strain+1))
for j in range(num_calcPoint/2):
deform_dir = "./str"+str(index_Strain+1)+"/d"+str(deform)
mkdir(deform_dir)
deformation = get_deform(crystSys, index_Strain, deform)
pos = apply_deform(p, deformation)
writePoscar(pos, bot, top)
deform = deform+delta
copyfiles(file_list, deform_dir)
deform = delta
for j in range(num_calcPoint/2):
deform_dir = "./str"+str(index_Strain+1)+"/d"+str(deform)
mkdir(deform_dir)
deformation = get_deform(crystSys, index_Strain, deform)
pos = apply_deform(p, deformation)
writePoscar(pos, bot, top)
deform = deform+delta
copyfiles(file_list, deform_dir)
print "Strain complete! (%d/%d)" % (index_Strain+1, num_strain)
index_Strain = index_Strain+1
shutil.move('POSCAR_backup', poscar)
return elastic
def readPoscar():
f = open(poscar, 'r')
lines = f.readlines()
f.close()
index = 2
scal = float(lines[index-1].strip().split()[0])
p = np.zeros((3, 3))
for i in range(3):
for j in range(3):
p[i][j] = float(lines[3+i-1].strip().split()[j])
print "POSCAR has been read!"
print "\n-----------Lattice Vector-------------"
print scal
print p
print "--------------------------------------"
top = lines[0:2]
bot = lines[5:len(lines)]
return p, bot, top
def writePoscar(pos, bot, top, file_name='POSCAR'):
np.savetxt('poscar', pos, fmt='%21.16f')
f = open('poscar', 'r')
lines = f.readlines()
f.close()
posstring = top+lines+bot
pos_file = open(file_name, 'w')
pos_file.write(''.join(posstring))
pos_file.close()
os.remove('poscar')
def copyfiles(file_list, dst):
for file in file_list:
try:
shutil.copy2(file, dst+'/'+file)
except IOError:
print "WARNING! Problems on copy file: "+file
pass
def mkdir(folder):
try:
os.mkdir(folder)
except OSError:
print "WARNING! File/Folder exists! ("+folder+")"
pass
def tran(mat):
mat_tr = np.zeros((3, 3))
for i in range(3):
for j in range(3):
if voigt_mat[i, j] > 2:
coeff = 0.5
else:
coeff = 1
mat_tr[i][j] = coeff*mat[voigt_mat[i, j]]
return np.array(mat_tr)
def apply_deform(p, deformation):
print "\n-------------Deformation--------------"
print np.array(deformation)
pos = np.transpose(np.dot(deformation, np.transpose(p)))
print "----------Deformed Structure----------"
print np.array(pos)
print "--------------------------------------"
return pos
###################################################### Postprocessing Func
def postprocess(arguments):
crystSys = arguments.crystSys
c, std_err = fitCij(crystSys)
Cij = elast_consts(arguments, c, std_err)
def postprocess_test(arguments):
crystSys = arguments.crystSys
c, std_err = fitCij(crystSys,arguments)
Cij = elast_consts(arguments, c, std_err)
def postprocess_read_cij(arguments):
print "Reading data...."
c = readCij()
Cij = elast_consts(arguments, c)
def fitCij(crystSys,arguments):
print "\n------------------------------------Fitted Results-------------------------------------"
def fit(i, j):
from scipy import stats, sqrt, square
stress_fit = np.array(stress[i, j])
slope, intercept, r, p, stderr = stats.linregress(delta, stress_fit)
print '\n'
print 'Fitted result ', Stress_string[i, j], ' : ', slope
print 'Error : ', stderr
if abs(r) > 0.9:
print 'Correlation coefficient (r) : ', r
else:
print 'Correlation coefficient (r) : ', r, ' <----- WARNING: BAD FIT'
return slope, stderr
def createCij():
CijMatrix = np.zeros((6, 6))
CijErrorMatrix = np.zeros((6, 6))
c = np.array(get_cij_pattern(crystSys))
for i in range(0, 6):
for j in range(0, 6):
index = int(c[i, j])
if index > 0:
CijMatrix[i, j] = Cij_list[index-1]
CijErrorMatrix[i, j] = abs(Cij_errors_list[index-1])
elif index < 0:
CijMatrix[i, j] = -Cij_list[-index-1]
CijErrorMatrix[i, j] = abs(Cij_errors_list[-index-1])
return CijMatrix, CijErrorMatrix
# get stress from OUTCAR
command = (
"grep 'in kB' outcar.2 | tail -1 | awk '{print -$3/10.0, -$4/10.0, -$5/10.0, -$7/10.0, -$8/10.0, -$6/10.0}'")
stress = []
strain = []
delta_list = []
str_list = os.listdir("./")
str_list = [int(elem[3:]) for elem in str_list if "str" in elem]
str_list.sort()
os.chdir("./rlx")
process = subprocess.Popen(command, stdout=subprocess.PIPE, shell=True)
stress0 = [float(elem) for elem in process.stdout.read().strip().split()]
os.chdir("../")
for this_stress in str_list:
stress_str = []
delta_list_in = []
os.chdir("./str"+str(this_stress))
delta_string = os.listdir("./")
delta = [float(elem[1:]) for elem in delta_string if "d" in elem]
delta.sort()
for this_delta in delta:
delta_list_in.append(this_delta)
os.chdir("./d"+str(this_delta))
process = subprocess.Popen(
command, stdout=subprocess.PIPE, shell=True)
stress_str.append(
[float(elem) for elem in process.stdout.read().strip().split()])
os.chdir("../")
os.chdir("../")
stress.append(stress_str)
delta_list.append(delta_list_in)
strain.append(get_strain_pattern(crystSys))
delta = np.array(delta)
stress = (np.array(stress)-np.array(stress0)).tolist()
c = np.zeros((6, 6))
re_stress = np.zeros((6, 6))
r_value = np.zeros((6, 6))
p_value = np.zeros((6, 6))
std_err = np.zeros((6, 6))
stress = np.array([np.array(stress_elem).T for stress_elem in stress])
# Start fitting Cijs
Stress_string = np.chararray((6, 6), itemsize=15)
Cij_list = np.zeros(21)
Cij_errors_list = np.zeros(21)
print "This code is trying to evaluate: "
sympy.init_printing()
stress_sym=(get_stress_pattern_sym(arguments.crystSys)/d)
strain_sym=get_strain_pattern_sym(arguments.crystSys)
cij_sym=get_cij_pattern_sym(arguments.crystSys)
sympy.pprint(sympy.relational.Eq((stress_sym*d).T,sympy.MatMul(cij_sym,strain_sym.T)))
# Initialize string for Stresses using symbolic notation e.g. C11+C12
stress_sym=np.array(stress_sym)
for i in range(stress_sym.shape[0]):
for j in range(stress_sym.shape[1]):
Stress_string[i][j]=stress_sym[i][j]
# Fitting results stored in Fit_results and error in Fit_errors
Fit_results=np.zeros((stress_sym.shape))
Fit_errors=np.zeros((stress_sym.shape))
for i in range(stress_sym.shape[0]):
for j in range(stress_sym.shape[1]):
if stress_sym[i][j] != 0:
Fit_results[i][j],Fit_errors[i][j]= fit(i,j)
# Solve symbolic equations to get values for each Cijs
# if crystSys == 1: # cubic
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C12"
# Stress_string[0, 3] = "C44"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# Cij_list[3], Cij_errors_list[3] = fit(0, 3) # c44
# fit01, error01 = fit(0, 1) # 12
# fit02, error02 = fit(0, 2) # 12
# Cij_list[6] = (fit01+fit02)/2 # c12
# Cij_errors_list[6] = (error01+error02)/2
# elif crystSys == 2: # tetragonal high 4mm,-42m,422,4/mmm e1+e4, e3+e6
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C44"
# Stress_string[1, 0] = "C13"
# Stress_string[1, 1] = "C13"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 5] = "C66"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# Cij_list[6], Cij_errors_list[6] = fit(0, 1) # c12
# fit02, error02 = fit(0, 2) # 13
# Cij_list[3], Cij_errors_list[3] = fit(0, 3) # c44
# fit10, error10 = fit(1, 0) # 13
# fit11, error11 = fit(1, 1) # 13
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# Cij_list[5], Cij_errors_list[5] = fit(1, 5) # c66
# Cij_list[7] = (fit02+fit10+fit11)/3 # c13
# Cij_errors_list[7] = (error02+error10+error11)/3
# elif crystSys == 21: # tetragonal low 4, -4, 4/m e1+e4, e3+e6
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C44"
# Stress_string[0, 5] = "C16"
# Stress_string[1, 0] = "C13 + C16"
# Stress_string[1, 1] = "C13 - C16"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 5] = "C66"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# Cij_list[6], Cij_errors_list[6] = fit(0, 1) # c12
# fit02, error02 = fit(0, 2) # 13
# Cij_list[3], Cij_errors_list[3] = fit(0, 3) # c44
# fit05, error05 = fit(0, 5) # 16
# fit10, error10 = fit(1, 0) # 13+16
# fit11, error11 = fit(1, 1) # 13-16
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# Cij_list[5], Cij_errors_list[5] = fit(1, 5) # c66
# Cij_list[7] = (fit02+fit10+fit11)/3 # c13
# Cij_errors_list[7] = (error02+error10+error11)/3
# Cij_list[10] = (fit10-fit11+fit05)/3 # c16
# Cij_errors_list[10] = (error10-error11+error05)/3
# elif crystSys == 3: # orthohombic
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C44"
# Stress_string[1, 0] = "C12"
# Stress_string[1, 1] = "C22"
# Stress_string[1, 2] = "C23"
# Stress_string[1, 4] = "C55"
# Stress_string[2, 0] = "C13"
# Stress_string[2, 1] = "C23"
# Stress_string[2, 2] = "C33"
# Stress_string[2, 5] = "C66"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# fit01, error01 = fit(0, 1) # 12
# fit02, error02 = fit(0, 2) # 13
# Cij_list[3], Cij_errors_list[3] = fit(0, 3) # c44
# fit10, error10 = fit(1, 0) # 12
# Cij_list[1], Cij_errors_list[1] = fit(1, 1) # c22
# fit12, error12 = fit(1, 2) # 23
# Cij_list[4], Cij_errors_list[4] = fit(1, 4) # c55
# fit20, error20 = fit(2, 0) # 13
# fit21, error21 = fit(2, 1) # 23
# Cij_list[2], Cij_errors_list[2] = fit(2, 2) # c33
# Cij_list[5], Cij_errors_list[5] = fit(2, 5) # c66
# Cij_list[6] = (fit10+fit01)/2 # c12
# Cij_errors_list[6] = (error10+error01)/2
# Cij_list[7] = (fit02+fit20)/2 # c13
# Cij_errors_list[7] = (error02+error20)/2
# Cij_list[11] = (fit12+fit21)/2 # c23
# Cij_errors_list[11] = (error12+error21)/2
# elif crystSys == 4: # monoclinic
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C44"
# Stress_string[0, 4] = "C15"
# Stress_string[0, 5] = "C46"
# Stress_string[1, 0] = "C13"
# Stress_string[1, 1] = "C23"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 3] = "C46"
# Stress_string[1, 4] = "C53"
# Stress_string[1, 5] = "C66"
# Stress_string[2, 0] = "C12"
# Stress_string[2, 1] = "C22"
# Stress_string[2, 2] = "C23"
# Stress_string[2, 4] = "C25"
# Stress_string[3, 0] = "C15"
# Stress_string[3, 1] = "C25"
# Stress_string[3, 2] = "C35"
# Stress_string[3, 4] = "C55"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# fit01, error01 = fit(0, 1) # 12
# fit02, error02 = fit(0, 2) # 13
# Cij_list[3], Cij_errors_list[3] = fit(0, 3) # c44
# fit04, error04 = fit(0, 4) # 15
# fit05, error05 = fit(0, 5) # 46
# fit10, error10 = fit(1, 0) # 13
# fit11, error11 = fit(1, 1) # 23
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# fit13, error13 = fit(1, 3) # 46
# fit14, error14 = fit(1, 4) # 35
# Cij_list[5], Cij_errors_list[5] = fit(1, 5) # c66
# fit20, error20 = fit(2, 0) # 12
# Cij_list[1], Cij_errors_list[1] = fit(2, 1) # c22
# fit22, error22 = fit(2, 2) # 23
# fit24, error24 = fit(2, 4) # 25
# fit30, error30 = fit(3, 0) # 15
# fit31, error31 = fit(3, 1) # 25
# fit32, error32 = fit(3, 2) # 35
# Cij_list[4], Cij_errors_list[4] = fit(3, 4) # c55
# Cij_list[6] = (fit20+fit01)/2 # c12
# Cij_errors_list[6] = (error20+error01)/2
# Cij_list[7] = (fit02+fit10)/2 # c13
# Cij_errors_list[7] = (error02+error10)/2
# Cij_list[11] = (fit11+fit22)/2 # c23
# Cij_errors_list[11] = (error11+error22)/2
# Cij_list[9] = (fit04+fit30)/2 # c15
# Cij_errors_list[9] = (error04+error30)/2
# Cij_list[13] = (fit24+fit31)/2 # c25
# Cij_errors_list[13] = (error24+error31)/2
# Cij_list[16] = (fit14+fit32)/2 # c35
# Cij_errors_list[16] = (error14+error32)/2
# Cij_list[19] = (fit13+fit05)/2 # c46
# Cij_errors_list[19] = (error13+error05)/2
# elif crystSys == 41: # monoclinic
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C44"
# Stress_string[0, 4] = "C45"
# Stress_string[0, 5] = "C16"
# Stress_string[1, 0] = "C13"
# Stress_string[1, 1] = "C23"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 3] = "C45"
# Stress_string[1, 4] = "C55"
# Stress_string[1, 5] = "C36"
# Stress_string[2, 0] = "C12"
# Stress_string[2, 1] = "C22"
# Stress_string[2, 2] = "C23"
# Stress_string[2, 5] = "C26"
# Stress_string[3, 0] = "C16"
# Stress_string[3, 1] = "C26"
# Stress_string[3, 2] = "C36"
# Stress_string[3, 5] = "C66"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# fit01, error01 = fit(0, 1) # 12
# fit02, error02 = fit(0, 2) # 13
# Cij_list[3], Cij_errors_list[3] = fit(0, 3) # c44
# fit04, error04 = fit(0, 4) # 45
# fit05, error05 = fit(0, 5) # 16
# fit10, error10 = fit(1, 0) # 13
# fit11, error11 = fit(1, 1) # 23
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# fit13, error13 = fit(1, 3) # 45
# Cij_list[4], Cij_errors_list[4] = fit(1, 4) # c55
# fit15, error15 = fit(1, 5) # 36
# fit20, error20 = fit(2, 0) # 12
# Cij_list[1], Cij_errors_list[1] = fit(2, 1) # c22
# fit22, error22 = fit(2, 2) # 23
# fit25, error25 = fit(2, 5) # 26
# fit30, error30 = fit(3, 0) # 16
# fit31, error31 = fit(3, 1) # 26
# fit32, error32 = fit(3, 2) # 36
# Cij_list[5], Cij_errors_list[5] = fit(3, 5) # c66
# Cij_list[6] = (fit20+fit01)/2 # c12
# Cij_errors_list[6] = (error20+error01)/2
# Cij_list[7] = (fit02+fit10)/2 # c13
# Cij_errors_list[7] = (error02+error10)/2
# Cij_list[11] = (fit22+fit11)/2 # c23
# Cij_errors_list[11] = (error22+error11)/2
# Cij_list[10] = (fit05+fit30)/2 # c16
# Cij_errors_list[10] = (error05+error30)/2
# Cij_list[14] = (fit25+fit31)/2 # c26
# Cij_errors_list[14] = (error25+error31)/2
# Cij_list[17] = (fit15+fit15)/2 # c36
# Cij_errors_list[17] = (error32+error32)/2
# Cij_list[18] = (fit13+fit04)/2 # c45
# Cij_errors_list[18] = (error13+error04)/2
# elif crystSys == 5: # triclinic
# index = 0
# c = np.array(get_cij_pattern(crystSys))
# temp = np.zeros((6, 6))
# temp_err = np.zeros((6, 6))
# for i in range(6):
# for j in range(6):
# Stress_string[i, j] = "C"+str(i+1)+str(j+1)
# temp[i, j], temp_err[i, j] = fit(i, j)
# temp = (temp+temp.T)/2
# temp_err = (temp_err+temp_err.T)/2
# for i in range(6):
# for j in range(i, 6):
# Cij_list[c[i, j]-1] = temp[i, j]
# Cij_errors_list[c[i, j]-1] = temp_err[i, j]
# index += 1
# elif crystSys == 6: # hexagonal
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[1, 0] = "C13"
# Stress_string[1, 1] = "C13"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 3] = "C44"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# Cij_list[6], Cij_errors_list[6] = fit(0, 1) # c12
# fit02, error02 = fit(0, 2) # 13
# fit10, error10 = fit(1, 0) # 13
# fit11, error11 = fit(1, 1) # 13
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# Cij_list[3], Cij_errors_list[3] = fit(1, 3) # c44
# Cij_list[7] = (fit02+fit10+fit11)/3 # c13
# Cij_errors_list[7] = (error02+error10+error11)/3
# Cij_list[5] = 0.5*(Cij_list[0]-Cij_list[6]) # c66
# Cij_errors_list[5] = 0.5*(Cij_errors_list[0]-Cij_errors_list[6])
# elif crystSys == 7: # trignoal high
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C14"
# Stress_string[1, 0] = "C13 + C14"
# Stress_string[1, 1] = "C13 - C14"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 3] = "C44"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# Cij_list[6], Cij_errors_list[6] = fit(0, 1) # c12
# fit02, error02 = fit(0, 2) # 13
# fit03, error03 = fit(0, 3) # 14
# fit10, error10 = fit(1, 0) # 13+14
# fit11, error11 = fit(1, 1) # 13-14
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# Cij_list[3], Cij_errors_list[3] = fit(1, 3) # c44
# Cij_list[7] = (fit02+fit10+fit11)/3 # c13
# Cij_errors_list[7] = (error02+error10+error11)/3
# Cij_list[8] = (fit03+fit10-fit11)/3 # c14
# Cij_errors_list[8] = (error03+error10-error11)/3
# Cij_list[5] = 0.5*(Cij_list[0]-Cij_list[6]) # c66
# Cij_errors_list[5] = 0.5*(Cij_errors_list[0]-Cij_errors_list[6])
# elif crystSys == 71: # trignoal low
# Stress_string[0, 0] = "C11"
# Stress_string[0, 1] = "C12"
# Stress_string[0, 2] = "C13"
# Stress_string[0, 3] = "C14"
# Stress_string[0, 4] = "-C25"
# Stress_string[1, 0] = "C13 + C14"
# Stress_string[1, 1] = "C13 - C14"
# Stress_string[1, 2] = "C33"
# Stress_string[1, 3] = "C44"
# Stress_string[1, 5] = "C25"
# Cij_list[0], Cij_errors_list[0] = fit(0, 0) # c11
# Cij_list[6], Cij_errors_list[6] = fit(0, 1) # c12
# fit02, error02 = fit(0, 2) # 13
# fit03, error03 = fit(0, 3) # 14
# fit04, error04 = fit(0, 4) # -25
# fit10, error10 = fit(1, 0) # 13+14
# fit11, error11 = fit(1, 1) # 13-14
# Cij_list[2], Cij_errors_list[2] = fit(1, 2) # c33
# Cij_list[3], Cij_errors_list[3] = fit(1, 3) # c44
# fit15, error15 = fit(1, 5) # 25
# Cij_list[7] = (fit02+fit10+fit11)/3 # c13
# Cij_errors_list[7] = (error02+error10+error11)/3
# Cij_list[8] = (fit03+fit10-fit11)/3 # c14
# Cij_errors_list[8] = (error03+error10-error11)/3
# Cij_list[13] = (-fit04+fit15)/2 # c25
# Cij_errors_list[13] = (-error04+error15)/2
# Cij_list[5] = 0.5*(Cij_list[0]-Cij_list[6]) # c66
# Cij_errors_list[5] = 0.5*(Cij_errors_list[0]-Cij_errors_list[6])
c, std_err = createCij()
return c, std_err
def readCij():
f = open("cij.dat", 'r')
lines = f.readlines()
f.close()
c = np.zeros((6, 6))
for i in range(6):
for j in range(6):
c[i][j] = float(lines[i].strip().split()[j])
return c
###################################################### Analysis ##########
class elast_consts:
def __init__(self, arguments, cvoigt=np.zeros((6, 6)), std_err=np.zeros((6, 6))):
print "\n----------------------------------------Analysis----------------------------------------"
self.cvoigt = cvoigt
self.std_err = std_err
self.svoigt = np.linalg.inv(cvoigt)
self.smat = self.getSmat()
self.cmat = self.getCmat()
if arguments.isPrintCijs:
self.print_cvoigt()
if arguments.isCheckStability:
self.check_stability()
if arguments.isCalcPolyModulus:
self.calc_poly_modulus()
if arguments.isWriteCijs:
self.write_cvoigt()
if arguments.isCalcMaxMin:
self.minimum_elastic_moduli()
self.maximum_elastic_moduli()
if arguments.isCalcDirYoung:
self.calc_dir_youngs_modulus()
if arguments.isCalcDirLinCompress:
self.calc_dir_lin_compress()
self.show_dir_youngs_modulus(arguments.angles)
def print_cvoigt(self):
print "\nSymmetrized Elastic Constant (C +- err) (GPa):"
for i in range(6):
for j in range(6):
print "%5.2f +- %5.2f\t" % (self.cvoigt[i, j], self.std_err[i, j]),
print "\n",
print ""
def coeff(self, i, j):
if i < 3 and j < 3:
return 1.0
if (i > 2 and j < 3) or (i < 3 and j > 2):
return 0.5
if i > 2 and j > 2:
return 0.25
def getSmat(self):
smat = np.zeros((3, 3, 3, 3))
for i in range(3):
for j in range(3):
for k in range(3):
for l in range(3):
smat[i, j, k, l] = self.coeff(
voigt_mat[i, j], voigt_mat[k, l])*self.svoigt[voigt_mat[i, j], voigt_mat[k, l]]
return smat
def getCmat(self):
voigt_mat = np.array([[0, 5, 4], [5, 1, 3], [4, 3, 2]])
cmat = np.zeros((3, 3, 3, 3))
for i in range(3):
for j in range(3):
for k in range(3):
for l in range(3):
cmat[i, j, k, l] = self.cvoigt[
voigt_mat[i, j], voigt_mat[k, l]]
return cmat
def check_stability(self):
eigvals = np.linalg.eigvals(self.cvoigt)
if any(eig < 0 for eig in eigvals):
print "\nThe structure is instable! (Cij matrix has negative value)"
else:
print "\nThe structure is mechanically stable! (Cij matrix has no negative value)"
print "Eigenvalues of Cij matrix:\n", eigvals
def calc_poly_modulus(self):
s = self.svoigt
c = self.cvoigt
Br = ((s[0, 0]+s[1, 1]+s[2, 2])+2*(s[0, 1]+s[0, 2]+s[1, 2]))**(-1)
Bv = (1.0/9)*(c[0, 0]+c[1, 1]+c[2, 2]+2*(c[0, 1]+c[0, 2]+c[1, 2]))
B = 0.5*(Bv+Br)
Gr = 15*(4*(s[0, 0]+s[1, 1]+s[2, 2])-4 *
(s[0, 1]+s[0, 2]+s[1, 2])+3*(s[3, 3]+s[4, 4]+s[5, 5]))**(-1)
Gv = (1.0/15)*((c[0, 0]+c[1, 1]+c[2, 2])+3 *
(c[3, 3]+c[4, 4]+c[5, 5])-(c[0, 1]+c[0, 2]+c[1, 2]))
G = 0.5*(Gv+Gr)
E = 9*B*G/(3*B+G)
v = (3*B-2*G)/(6*B+2*G)
print "\nIsotropic Elastic Modulus (GPa):"
print "Young's Modulus: E=", "%6.2f" % E
print "Shear Modulus: G=", "%6.2f" % G
print "Bulk Modulus: B=", "%6.2f" % B
print "Poisson's Ratio: v=", "%6.2f" % v
# a and b are directional vectors
def a(self, theta, phi):
return np.sin(theta)*np.cos(phi), np.sin(theta)*np.sin(phi), np.cos(theta)
def b(self, theta, phi, chi):
return np.cos(theta)*np.cos(chi)*np.cos(phi)-np.sin(chi)*np.sin(phi), np.cos(theta)*np.cos(chi)*np.sin(phi)+np.sin(chi)*np.cos(phi), -np.sin(theta)*np.cos(chi)
def dir_youngs_moduli(self, angles):
theta, phi = angles
a = self.a(theta, phi)
youngs_moduli = 0
for i in range(3):
for j in range(3):
for k in range(3):
for l in range(3):
youngs_moduli += a[i]*a[j]*a[k] * \
a[l]*self.smat[i, j, k, l]
return 1.0/youngs_moduli
def show_dir_youngs_modulus(self, angles):
if len(angles) != 0:
angles = [float(ang) for ang in angles]
angles_rad = [float(ang)*np.pi/180 for ang in angles]
print "Young's Modulus along theta = %5.2f, phi = %5.2f : E = %6.2f" % (angles[0], angles[1], self.dir_youngs_moduli(angles_rad))
def dir_shear_moduli(self, angles):
theta, phi, chi = angles
a = self.a(theta, phi)
b = self.b(theta, phi, chi)
shear_moduli = 0
for i in range(3):
for j in range(3):
for k in range(3):
for l in range(3):
shear_moduli += a[i]*a[k]*b[j] * \
b[l]*self.smat[i, j, k, l]
return 1.0/(4*shear_moduli)
def minimum_elastic_moduli(self):
from scipy.optimize import minimize
x0 = np.array([0, 0, 0])
x1 = np.array([0, 0])
poisson_min = minimize(
self.dir_poisson_ratio, x0, method='Powell', options={'xtol': 1e-8}).x
shear_min = minimize(
self.dir_shear_moduli, x0, method='Nelder-Mead', options={'xtol': 1e-8}).x
youngs_min = minimize(
self.dir_youngs_moduli, x1, method='Powell', options={'xtol': 1e-8}).x
print "\nMinimum Modulus:\tEmin\tat\t(Theta,\tPhi)"
print "Young's Modulus:\t%6.2f\tat\t%6.2f\t%6.2f" % (self.dir_youngs_moduli(youngs_min), youngs_min[0]*180/np.pi, youngs_min[1]*180/np.pi)
print "Shear Modulus: \t%6.2f\tat\t%6.2f\t%6.2f" % (self.dir_shear_moduli(shear_min), shear_min[0]*180/np.pi, shear_min[1]*180/np.pi)
print "Poisson's Ratio:\t%6.2f\tat\t%6.2f\t%6.2f" % (self.dir_poisson_ratio(poisson_min), poisson_min[0]*180/np.pi, poisson_min[1]*180/np.pi)
def maximum_elastic_moduli(self):
from scipy.optimize import minimize
dir_poisson_ratio = lambda angles: -self.dir_poisson_ratio(angles)
dir_shear_moduli = lambda angles: -self.dir_shear_moduli(angles)
dir_youngs_moduli = lambda angles: -self.dir_youngs_moduli(angles)
x0 = np.array([0, 0, 0])
x1 = np.array([0, 0])
poisson_max = -1 * \
minimize(
dir_poisson_ratio, x0, method='Nelder-Mead', options={'xtol': 1e-8}).x
shear_max = -1 * \
minimize(
dir_shear_moduli, x0, method='Nelder-Mead', options={'xtol': 1e-8}).x
youngs_max = -1 * \
minimize(
dir_youngs_moduli, x1, method='Nelder-Mead', options={'xtol': 1e-8}).x
print "\nMaximum Modulus:\tEmax\tat\t(Theta,\tPhi)"
print "Young's Modulus:\t%6.2f\tat\t%6.2f\t%6.2f" % (self.dir_youngs_moduli(youngs_max), youngs_max[0]*180/np.pi, youngs_max[1]*180/np.pi)
print "Shear Modulus: \t%6.2f\tat\t%6.2f\t%6.2f" % (self.dir_shear_moduli(shear_max), shear_max[0]*180/np.pi, shear_max[1]*180/np.pi)
print "Poisson's Ratio:\t%6.2f\tat\t%6.2f\t%6.2f" % (self.dir_poisson_ratio(poisson_max), poisson_max[0]*180/np.pi, poisson_max[1]*180/np.pi)
def dir_poisson_ratio(self, angles):
theta, phi, chi = angles
a = self.a(theta, phi)
b = self.b(theta, phi, chi)
p_up = 0
p_down = 0
for i in range(3):
for j in range(3):
for k in range(3):
for l in range(3):
p_up += a[i]*a[j]*b[k]*b[l]*self.smat[i, j, k, l]
return -p_up*self.dir_youngs_moduli([theta, phi])
def dir_lin_compress(self, angles):
theta, phi = angles
a = self.a(theta, phi)
lin_compress = 0
for i in range(3):
for j in range(3):
for k in range(3):
lin_compress += a[i]*a[j]*self.smat[i, j, k, k]
return lin_compress
def calc_dir_youngs_modulus(self, npt=200):
print "\nCalculating Directional Young's Modulus Data....."
e = open("e.dat", 'w')
p = np.linspace(0, 2*np.pi, npt)
t = np.linspace(0, np.pi, npt)
for theta in t:
for phi in p:
r = self.dir_youngs_moduli([theta, phi])
e.write("%6.4f %6.4f %6.4f %6.4f\n" % (
r*np.sin(theta)*np.cos(phi), r*np.sin(theta)*np.sin(phi), r*np.cos(theta), r))
e.write("\n")
e.close()
print "\nCalculating projections of Young's Modulus"
p = np.linspace(0, 2*np.pi, npt)
t = np.linspace(0, 2*np.pi, npt)
np.savetxt('e_xy.dat', np.c_[
p, self.dir_youngs_moduli([np.pi/2, p])], delimiter='\t', fmt='%6.4f')
np.savetxt('e_yz.dat', np.c_[
t, self.dir_youngs_moduli([t, np.pi/2])], delimiter='\t', fmt='%6.4f')
np.savetxt('e_xz.dat', np.c_[
t, self.dir_youngs_moduli([t, 0])], delimiter='\t', fmt='%6.4f')
print "Complete!"
def calc_dir_lin_compress(self, npt=200):
print "\nCalculating Directional Linear Compressibility Data....."
beta = open("beta.dat", 'w')
p = np.linspace(0, 2*np.pi, npt)
t = np.linspace(0, np.pi, npt)
for theta in t:
for phi in p:
r = self.dir_lin_compress([theta, phi])
beta.write("%6.4f %6.4f %6.4f %6.4f\n" % (
r*np.sin(theta)*np.cos(phi), r*np.sin(theta)*np.sin(phi), r*np.cos(theta), r))
beta.write("\n")
beta.close()
print "Complete!"
def write_cvoigt(self):
np.savetxt(
'cij.dat', np.array(self.cvoigt), delimiter='\t', fmt='%5.2f')
def write_svoigt(self):
np.savetxt(
'sij.dat', np.array(self.svoigt), delimiter='\t', fmt='%5.2f')
###################################################### Apply strain ######
def task_applystrain():
deformation = get_deform_file()
[p, bot, top] = readPoscar()
shutil.move(poscar, 'POSCAR_backup')
num_deform = len(deformation)
for index_deform in range(num_deform):
pos = apply_deform(p, deformation[index_deform])
writePoscar(pos, bot, top, 'POSCAR'+str(index_deform+1))
print "Applied deformation complete! (%d/%d)" % (index_deform+1, num_deform)
index_deform = index_deform+1
shutil.move('POSCAR_backup', poscar)
###################################################### Main Program ######
def main():
options = argparse.ArgumentParser(
description=info_text, formatter_class=argparse.RawTextHelpFormatter)
options.add_argument(
dest='task_opt', action='store', type=int, metavar='task_option',
help="Select task (required):\n"
"1) Calculation preparation\n"
"2) Extract data from DFT calculation and analysis\n"
"3) Read Cijs from cij.dat file and anaylsis\n"
"4) Read strain from pattern.dat and apply strain")
options.add_argument(
dest='crystSys', action='store', type=int, metavar='crystal_system',
help="Select crystal system (required):\n"
"1) Cubic\n"
"2) Tetragonal (4mm, -42m, 422, 4/mmm)\n"
"21) Tetragonal (4, -4, 4/m)\n"
"3) Orthorhombic\n"
"4) Monoclinic (beta <> 90)\n"
"41) Monoclinic (gamma <> 90)\n"
"5) Triclinic\n"
"6) Hexagonal\n"
"7) Trigonal (32, -3m, 3m)\n"
"71) Trigonal (3, -3)")
options.add_argument(
'-n', dest='num_calcPoint', action='store', type=int, metavar='num_calcPoint', default=4,
help="Number of calculation points per group of deformation (default: 4)")
options.add_argument(
'-e', dest='angles', action='append', metavar='theta_and_phi', default=[],
help="Calculate Young's Modulus along specific direction (theta, phi) (first theta then phi in degree)")
options.add_argument(
'-d', dest='delta', action='store', type=float, metavar='delta', default=0.005,
help="Magnitude of deformations intervals (default: 0.005 (0.5 percentage))")
options.add_argument(
'-cy', dest='isCalcDirYoung', action='store_true',
help="Analysis: Calculate directional Young\'s modulus")
options.add_argument(
'-cl', dest='isCalcDirLinCompress', action='store_true',
help="Analysis: Calculate directional linear compressiblity")
options.add_argument(
'-cm', dest='isCalcMaxMin', action='store_true',
help="Analysis: Find the maximum and minimum of directional elastic modulus")
options.add_argument(
'-no-cs', dest='isCheckStability', action='store_false',
help="Disable Analysis: Check stability")
options.add_argument(
'-no-cp', dest='isCalcPolyModulus', action='store_false',
help="Disable Analysis: Calculate polycrystalline elastic modulus")
options.add_argument(
'-p', dest='isPrintCijs', action='store_false',
help="Analysis: Print Cij")
options.add_argument(
'-no-cw', dest='isWriteCijs', action='store_false',
help="Disable Analysis: Write Cij to cij.dat")
options.add_argument(
'-debug', dest='isDebug', action='store_false',
help="Debug tag to write all strain and stress data to stress.txt")
arguments = options.parse_args()
printInfo()
if arguments.task_opt == 1:
try:
elastic = preprocess(
arguments.crystSys, arguments.num_calcPoint, arguments.delta)
except:
try:
shutil.move('POSCAR_backup', poscar)
except:
pass
sys.exit(1)
elif arguments.task_opt == 2:
postprocess(arguments)
elif arguments.task_opt == 3:
postprocess_read_cij(arguments)
elif arguments.task_opt == 4:
task_applystrain()
elif arguments.task_opt == 5:
postprocess_test(arguments)
else:
print "Error in Selection!"
sys.exit(1)
# Invoke main program
if __name__ == '__main__':
main()
|
dengzeyu/Elastic
|
elasticity.py
|
Python
|
mit
| 43,961
|
[
"CRYSTAL",
"VASP"
] |
38d897452f1c231b3c9d9736f61eca0ae480e1d63e4f9fde9a238f237d4cbd19
|
"""
================================================
Kernel Density Estimate of Species Distributions
================================================
This shows an example of a neighbors-based query (in particular a kernel
density estimate) on geospatial data, using a Ball Tree built upon the
Haversine distance metric -- i.e. distances over points in latitude/longitude.
The dataset is provided by Phillips et. al. (2006).
If available, the example uses
`basemap <http://matplotlib.sourceforge.net/basemap/doc/html/>`_
to plot the coast lines and national boundaries of South America.
This example does not perform any learning over the data
(see :ref:`example_applications_plot_species_distribution_modeling.py` for
an example of classification based on the attributes in this dataset). It
simply shows the kernel density estimate of observed data points in
geospatial coordinates.
The two species are:
- `"Bradypus variegatus"
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
the Brown-throated Sloth.
- `"Microryzomys minutus"
<http://www.iucnredlist.org/apps/redlist/details/13408/0>`_ ,
also known as the Forest Small Rice Rat, a rodent that lives in Peru,
Colombia, Ecuador, Peru, and Venezuela.
References
----------
* `"Maximum entropy modeling of species geographic distributions"
<http://www.cs.princeton.edu/~schapire/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.
"""
# Author: Jake Vanderplas <jakevdp@cs.washington.edu>
#
# License: BSD 3 clause
import matplotlib.pyplot as plt
import numpy as np
from sklearn.datasets import fetch_species_distributions
from sklearn.datasets.species_distributions import construct_grids
from sklearn.neighbors import KernelDensity
# if basemap is available, we'll use it.
# otherwise, we'll improvise later...
try:
from mpl_toolkits.basemap import Basemap
basemap = True
except ImportError:
basemap = False
# Get matrices/arrays of species IDs and locations
data = fetch_species_distributions()
species_names = ['Bradypus Variegatus', 'Microryzomys Minutus']
Xtrain = np.vstack([data['train']['dd lat'],
data['train']['dd long']]).T
ytrain = np.array([d.decode('ascii').startswith('micro')
for d in data['train']['species']], dtype='int')
Xtrain *= np.pi / 180. # Convert lat/long to radians
# Set up the data grid for the contour plot
xgrid, ygrid = construct_grids(data)
X, Y = np.meshgrid(xgrid[::5], ygrid[::5][::-1])
land_reference = data.coverages[6][::5, ::5]
land_mask = (land_reference > -9999).ravel()
xy = np.vstack([Y.ravel(), X.ravel()]).T
xy = xy[land_mask]
xy *= np.pi / 180.
# Plot map of South America with distributions of each species
fig = plt.figure()
fig.subplots_adjust(left=0.05, right=0.95, wspace=0.05)
for i in range(2):
plt.subplot(1, 2, i + 1)
# construct a kernel density estimate of the distribution
print(" - computing KDE in spherical coordinates")
kde = KernelDensity(bandwidth=0.04, metric='haversine',
kernel='gaussian', algorithm='ball_tree')
kde.fit(Xtrain[ytrain == i])
# evaluate only on the land: -9999 indicates ocean
Z = -9999 + np.zeros(land_mask.shape[0])
Z[land_mask] = np.exp(kde.score_samples(xy))
Z = Z.reshape(X.shape)
# plot contours of the density
levels = np.linspace(0, Z.max(), 25)
plt.contourf(X, Y, Z, levels=levels, cmap=plt.cm.Reds)
if basemap:
print(" - plot coastlines using basemap")
m = Basemap(projection='cyl', llcrnrlat=Y.min(),
urcrnrlat=Y.max(), llcrnrlon=X.min(),
urcrnrlon=X.max(), resolution='c')
m.drawcoastlines()
m.drawcountries()
else:
print(" - plot coastlines from coverage")
plt.contour(X, Y, land_reference,
levels=[-9999], colors="k",
linestyles="solid")
plt.xticks([])
plt.yticks([])
plt.title(species_names[i])
plt.show()
|
DailyActie/Surrogate-Model
|
01-codes/scikit-learn-master/examples/neighbors/plot_species_kde.py
|
Python
|
mit
| 4,061
|
[
"Gaussian"
] |
c16471670053c97fbf1141106dfdec50f64a25164a7e165fa7f03ca29c3e8d47
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import functools
import itertools
import multiprocessing.pool
import sys
import time
import weakref
from absl.testing import parameterized
import numpy
from tensorflow.core.protobuf import config_pb2
from tensorflow.core.protobuf import rewriter_config_pb2
from tensorflow.python.autograph.core import ag_ctx
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.eager import backprop
from tensorflow.python.eager import cancellation
from tensorflow.python.eager import context
from tensorflow.python.eager import def_function
from tensorflow.python.eager import function
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import config
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import func_graph
from tensorflow.python.framework import function as tf_function
from tensorflow.python.framework import indexed_slices
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.framework import type_spec
from tensorflow.python.layers import convolutional
from tensorflow.python.module import module
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import gen_functional_ops
from tensorflow.python.ops import gen_random_ops
from tensorflow.python.ops import gen_resource_variable_ops
from tensorflow.python.ops import gen_sendrecv_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import list_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import string_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.ops.ragged import ragged_factory_ops
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.ops.structured import structured_tensor
from tensorflow.python.platform import test
from tensorflow.python.training import training_ops
from tensorflow.python.util import compat
from tensorflow.python.util import nest
from tensorflow.python.util import tf_inspect
try:
import attr # pylint:disable=g-import-not-at-top
except ImportError:
attr = None
def total_function_cache(defined):
# pylint: disable=protected-access
return (set(defined._function_cache.primary)
| set(defined._function_cache.arg_relaxed))
# pylint: enable=protected-access
def _example_indexed_slices_with_dense_shape():
return indexed_slices.IndexedSlices(
constant_op.constant([1, 2]), constant_op.constant([0, 1]),
constant_op.constant([2]))
def _example_indexed_slices_without_dense_shape():
return indexed_slices.IndexedSlices(
constant_op.constant([1, 2]), constant_op.constant([0, 1]))
def _spec_for_value(value):
"""Returns the (nested) TypeSpec for a value."""
if nest.is_sequence(value):
return nest.map_structure(_spec_for_value, value)
elif isinstance(value, (ops.Tensor, composite_tensor.CompositeTensor)):
return type_spec.type_spec_from_value(value)
else:
return value
class FunctionTest(test.TestCase, parameterized.TestCase):
def setUp(self):
super(FunctionTest, self).setUp()
cpus = config.list_physical_devices('CPU')
# Set 4 virtual CPUs
config.set_logical_device_configuration(cpus[0], [
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration(),
context.LogicalDeviceConfiguration()
])
def testBasic(self):
matmul = def_function.function(math_ops.matmul)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq = matmul(t, t, transpose_a=True)
sq2 = matmul(sq, t, transpose_a=True)
self.assertAllEqual(sq.numpy().reshape(-1), [10, 14, 14, 20])
self.assertAllEqual(sq2.numpy().reshape(-1), [52, 76, 74, 108])
def testOnExitCallback(self):
values = []
def append_1():
values.append(1)
def append_2():
values.append(2)
def g(x):
old_values = list(values)
ops.add_exit_callback_to_default_func_graph(append_1)
self.assertEqual(old_values, values)
return x + 1
tf_g = def_function.function(g)
def f(x):
old_values = list(values)
ops.add_exit_callback_to_default_func_graph(append_2)
self.assertEqual(old_values, values)
return tf_g(x)
tf_f = def_function.function(f)
self.assertEmpty(values)
tf_f(constant_op.constant(1.0))
self.assertEqual(values, [1, 2]) # Once for g, once for f.
tf_f(constant_op.constant([1.0])) # force a retrace
self.assertEqual(values, [1, 2, 1, 2]) # And again.
def testCannotAddExitCallbackWhenNotInFunctionScope(self):
with self.assertRaisesRegex(RuntimeError, 'when not building a function.'):
ops.add_exit_callback_to_default_func_graph(lambda: None)
def testVariable(self):
v1 = variables.Variable(1.0)
add = def_function.function(lambda x, v: x + v1 + v)
v2 = variables.Variable(1.0)
x = constant_op.constant(1.0)
r = add(x, v2)
self.assertEqual(3.0, self.evaluate(r))
def testVariableOnly(self):
v = variables.Variable(1.0)
add = def_function.function(lambda x: x.assign_add(1.0))
r1 = add(v)
self.assertEqual(2.0, self.evaluate(r1))
c = constant_op.constant(1.0)
with self.assertRaisesRegex(AttributeError, 'no attribute'):
add(c)
def testPackedVariable(self):
with ops.device('/cpu:0'):
v0_0 = resource_variable_ops.ResourceVariable(1.0)
with ops.device('/cpu:1'):
v0_1 = resource_variable_ops.ResourceVariable(2.0)
v1_0 = resource_variable_ops.ResourceVariable(3.0)
with ops.device('/cpu:2'):
v1_1 = resource_variable_ops.ResourceVariable(4.0)
packed_var_0 = ops.pack_eager_tensors([v0_0.handle, v0_1.handle])
packed_var_1 = ops.pack_eager_tensors([v1_0.handle, v1_1.handle])
# TODO(b/145922293): use ResourceVariable.assign_add and
# ResourceVariable.read_value directly once we support packing multiple
# ResourceVariable into one ResourceVariable.
@def_function.function
def read_var():
resource_variable_ops.assign_add_variable_op(
packed_var_0, constant_op.constant(5.0))
resource_variable_ops.assign_add_variable_op(
packed_var_1, constant_op.constant(6.0))
with ops.device('/cpu:0'):
read0 = resource_variable_ops.read_variable_op(
packed_var_0, dtype=dtypes.float32)
with ops.device('/cpu:1'):
read1 = resource_variable_ops.read_variable_op(
packed_var_0, dtype=dtypes.float32)
read2 = resource_variable_ops.read_variable_op(
packed_var_1, dtype=dtypes.float32)
with ops.device('/cpu:2'):
read3 = resource_variable_ops.read_variable_op(
packed_var_1, dtype=dtypes.float32)
return read0, read1, read2, read3
arg_attrs = read_var.get_concrete_function().function_def.arg_attr
self.assertLen(arg_attrs, 2)
self.assertEqual(arg_attrs[0].attr['_composite_device'].s,
compat.as_bytes(packed_var_0.device))
self.assertEqual(arg_attrs[1].attr['_composite_device'].s,
compat.as_bytes(packed_var_1.device))
self.assertAllEqual(read_var(), (1 + 5, 2 + 5, 3 + 6, 4 + 6))
def testImplementsAttributeBasic(self):
v = def_function.function(
experimental_implements='func')(lambda x, y: x + y)
with context.graph_mode(), self.cached_session():
a = array_ops.placeholder(dtypes.float32, ())
b = array_ops.placeholder(dtypes.float32, ())
v(a, b)
gradients_impl.gradients(v(a, b), [a, b])
fdefs = ops.get_default_graph().as_graph_def().library.function
self.assertLen(fdefs, 3)
not_present = 0
present = 0
for f in fdefs:
name = f.signature.name
if 'forward' in name or 'backward' in name:
not_present += 1
self.assertNotIn(function.IMPLEMENTS_ATTRIBUTE_NAME, f.attr, f)
else:
present += 1
self.assertEqual(f.attr[function.IMPLEMENTS_ATTRIBUTE_NAME].s,
'func'.encode('ascii'), f)
self.assertEqual(not_present, 2, fdefs)
self.assertEqual(present, 1, fdefs)
def testImplementsAttributeAssertsOnSideInput(self):
with context.graph_mode(), self.cached_session():
z = array_ops.zeros(0)
v = def_function.function(
experimental_implements='func')(lambda x, y: x + y + z)
a = array_ops.ones((1.0,))
b = array_ops.ones((1.0,))
with self.assertRaisesRegex(AssertionError,
'variables are always captured'):
v(a, b)
functions = ops.get_default_graph().as_graph_def().library.function
self.assertEmpty(functions)
def testImplementsAttributeWorksOnVariables(self):
with context.graph_mode(), self.cached_session():
v = def_function.function(
experimental_implements='func')(lambda x, y: x + y)
a = variables.Variable((1.0,))
b = variables.Variable((1.0,))
r1 = v(a, b)
_ = v(a, a)
functions = ops.get_default_graph().as_graph_def().library.function
# Verify that we created only one function
self.assertLen(functions, 1)
# Verify that eval() reads the current values.
a.initializer.run()
b.initializer.run()
self.assertEqual(r1.eval(), 2)
a.assign_add([1]).eval()
self.assertEqual(r1.eval(), 3)
def testImplementsAttributeWorksOnConstants(self):
with context.graph_mode(), self.cached_session():
v = def_function.function(
experimental_implements='func')(lambda x, y: x + y)
a = variables.Variable(1.0)
r1 = v(a, 2.)
r2 = v(2., a)
functions = ops.get_default_graph().as_graph_def().library.function
self.assertLen(functions, 1)
self.assertLen(functions[0].signature.input_arg, 2)
# Verify that eval() reads the current values.
a.initializer.run()
self.assertEqual(r1.eval(), 3)
self.assertEqual(r2.eval(), 3)
def testImplementsAttributeSpecializes(self):
with context.graph_mode(), self.cached_session():
v = def_function.function(
experimental_implements='func')(lambda x, y: x + y)
a = variables.Variable(1.0)
r1 = v(a, [2.])
r2 = v([2., 2], a)
functions = ops.get_default_graph().as_graph_def().library.function
self.assertLen(functions, 2)
# Ensure that all parameters are still there and haven't been inlined!
self.assertLen(functions[0].signature.input_arg, 2)
self.assertLen(functions[1].signature.input_arg, 2)
# Verify that eval() reads the current values.
a.initializer.run()
numpy.testing.assert_equal(r1.eval(), [3.])
numpy.testing.assert_equal(r2.eval(), [3., 3.])
def testImplementsAttributeAsNameAttrList(self):
implements_attr = (
'name: "embedding_matmul" attr { key: "key1" value { i: 2 } '
'} attr { key: "key2" value { b: false } }')
v = def_function.function(
experimental_implements=implements_attr)(lambda x, y: x + y)
with context.graph_mode(), self.cached_session():
a = array_ops.placeholder(dtypes.float32, ())
b = array_ops.placeholder(dtypes.float32, ())
v(a, b)
gradients_impl.gradients(v(a, b), [a, b])
fdefs = ops.get_default_graph().as_graph_def().library.function
self.assertLen(fdefs, 3)
not_present = 0
present = 0
for f in fdefs:
name = f.signature.name
if 'forward' in name or 'backward' in name:
not_present += 1
self.assertNotIn(function.IMPLEMENTS_ATTRIBUTE_NAME, f.attr, f)
else:
present += 1
attr_value = f.attr[function.IMPLEMENTS_ATTRIBUTE_NAME]
self.assertIsNotNone(attr_value.func, f)
self.assertEqual(attr_value.func.name, 'embedding_matmul')
name_attrs = attr_value.func.attr
self.assertLen(name_attrs, 2)
self.assertEqual(not_present, 2, fdefs)
self.assertEqual(present, 1, fdefs)
def testExternalControlDependency(self):
with ops.Graph().as_default(), self.test_session():
v = variables.Variable(1.0)
v.initializer.run()
op = v.assign_add(1.0)
@function.defun
def f():
with ops.control_dependencies([op]):
return 1.0
self.evaluate(f())
self.assertAllEqual(self.evaluate(v), 2.0)
def testInputShapeFunctionRelaxation(self):
unknown_dim = [False]
@function.defun(experimental_relax_shapes=True)
def func(a):
if a._shape_tuple()[0] is None:
unknown_dim[0] = True
return a + 1
func(constant_op.constant([]))
self.assertFalse(unknown_dim[0])
self.assertLen(total_function_cache(func), 1)
func(constant_op.constant([1.0]))
self.assertFalse(unknown_dim[0])
self.assertLen(total_function_cache(func), 2)
func(constant_op.constant([1.0, 2.0]))
self.assertTrue(unknown_dim[0])
self.assertLen(total_function_cache(func), 2)
def testInputShapeRelaxationOnInstanceMethod(self):
# Test that experimental_relax_shapes is passed during
# instance method bounding.
unknown_dim = [False]
class Foo(object):
@def_function.function(experimental_relax_shapes=True)
def func(self, a):
if a._shape_tuple()[0] is None:
unknown_dim[0] = True
return a + 1
foo = Foo()
foo.func(constant_op.constant([]))
self.assertFalse(unknown_dim[0])
foo.func(constant_op.constant([1.0]))
self.assertFalse(unknown_dim[0])
foo.func(constant_op.constant([1.0, 2.0]))
self.assertTrue(unknown_dim[0])
def testInputShapeFunctionRelaxationWithRaggedTensors(self):
traced_type_spec = [None]
@def_function.function(experimental_relax_shapes=True)
def func(x):
traced_type_spec[0] = x._type_spec
return x
def check_trace(x, expected_trace):
traced_type_spec[0] = None
func(x)
self.assertEqual(traced_type_spec[0], expected_trace)
check_trace( # Initial call gets traced.
ragged_factory_ops.constant([[1], [2, 3, 4]]),
ragged_tensor.RaggedTensorSpec([2, None], dtypes.int32))
check_trace( # Input TypeSpec is the same -> no retrace.
ragged_factory_ops.constant([[1, 2], [3, 4]]), None)
check_trace( # Even if component tensor shapes change -> no retrace.
ragged_factory_ops.constant([[1, 2], [3, 4, 5, 6]]), None)
check_trace( # Different TypeSpec shape (nrows): retrace
ragged_factory_ops.constant([[1], [2], [3]]),
ragged_tensor.RaggedTensorSpec([3, None], dtypes.int32))
check_trace( # Different nrows again: relax & retrace
ragged_factory_ops.constant([[1], [2], [3], [4]]),
ragged_tensor.RaggedTensorSpec([None, None], dtypes.int32))
check_trace( # Different nrows yet again: not retrace
ragged_factory_ops.constant([[1]]), None)
check_trace( # Different ragged_rank: retrace
ragged_factory_ops.constant([[[1]]]),
ragged_tensor.RaggedTensorSpec([1, None, None], dtypes.int32))
check_trace( # Different ragged_rank again: retrace & relax
ragged_factory_ops.constant([[[1]], [[2]]]),
ragged_tensor.RaggedTensorSpec([None, None, None], dtypes.int32))
def testInputShapeFunctionRelaxationWithStructuredTensors(self):
traced_type_spec = [None]
@def_function.function(experimental_relax_shapes=True)
def func(x):
traced_type_spec[0] = x._type_spec
return x
def check_trace(x, expected_trace):
traced_type_spec[0] = None
func(x)
self.assertEqual(traced_type_spec[0], expected_trace)
# If we have TypeSpecs that differ in ways other than just their shape,
# then retrace each time.
check_trace(
structured_tensor.StructuredTensor.from_pyval({'a': [1]}),
structured_tensor.StructuredTensorSpec(
[], {'a': tensor_spec.TensorSpec((1,), dtypes.int32)}))
check_trace(
structured_tensor.StructuredTensor.from_pyval({'b': [1]}),
structured_tensor.StructuredTensorSpec(
[], {'b': tensor_spec.TensorSpec((1,), dtypes.int32)}))
check_trace(
structured_tensor.StructuredTensor.from_pyval({'c': [1]}),
structured_tensor.StructuredTensorSpec(
[], {'c': tensor_spec.TensorSpec((1,), dtypes.int32)}))
# But if we call again with only shape different, then do relax:
check_trace( # retrace
structured_tensor.StructuredTensor.from_pyval({'a': [1, 2]}),
structured_tensor.StructuredTensorSpec(
[], {'a': tensor_spec.TensorSpec((2,), dtypes.int32)}))
check_trace( # relax & retrace
structured_tensor.StructuredTensor.from_pyval({'a': [1, 2, 3]}),
structured_tensor.StructuredTensorSpec(
[], {'a': tensor_spec.TensorSpec((None,), dtypes.int32)}))
check_trace( # use relaxed graph
structured_tensor.StructuredTensor.from_pyval({'a': [1, 2, 3, 4]}),
None)
def testInputShapeFunctionRelaxationWithDatasetIterators(self):
# For dataset iterators, the TypeSpec includes type information that's
# not derivable from the component tensors. Make sure that the TypeSpec
# shapes get relaxed as appropriate.
traced_type_spec = [None]
@def_function.function(experimental_relax_shapes=True)
def func(x):
traced_type_spec[0] = x._type_spec
return x
def check_trace(x, expected_trace):
traced_type_spec[0] = None
func(x)
self.assertEqual(traced_type_spec[0], expected_trace)
ds_1_2 = dataset_ops.DatasetV2.from_tensors(array_ops.zeros([1, 2]))
ds_2_2 = dataset_ops.DatasetV2.from_tensors(array_ops.zeros([2, 2]))
ds_3_2 = dataset_ops.DatasetV2.from_tensors(array_ops.zeros([3, 2]))
ds_4_2 = dataset_ops.DatasetV2.from_tensors(array_ops.zeros([4, 2]))
ds_2_1 = dataset_ops.DatasetV2.from_tensors(array_ops.zeros([2, 1]))
check_trace( # shape=[1, 2]: retrace
dataset_ops.make_one_shot_iterator(ds_1_2),
iterator_ops.IteratorSpec(
tensor_spec.TensorSpec([1, 2], dtypes.float32)))
check_trace( # shape=[1, 2]: no retrace (use the [1, 2] graph)
dataset_ops.make_one_shot_iterator(ds_1_2), None)
check_trace( # shape=[2, 2]: retrace
dataset_ops.make_one_shot_iterator(ds_2_2),
iterator_ops.IteratorSpec(
tensor_spec.TensorSpec([2, 2], dtypes.float32)))
check_trace( # shape=[3, 2]: relax to [None, 2] and retrace
dataset_ops.make_one_shot_iterator(ds_3_2),
iterator_ops.IteratorSpec(
tensor_spec.TensorSpec([None, 2], dtypes.float32)))
check_trace( # shape=[4, 2]: no retrace (use the [None, 2] graph)
dataset_ops.make_one_shot_iterator(ds_4_2), None)
check_trace( # shape=[2, 1]: relax to [None, None] and retrace
dataset_ops.make_one_shot_iterator(ds_2_1),
iterator_ops.IteratorSpec(
tensor_spec.TensorSpec([None, None], dtypes.float32)))
def testCapturesVariables(self):
a = variables.Variable(1.0, trainable=False)
b = variables.Variable(1.0)
cc = [None]
@def_function.function
def f():
c = cc[0]
if c is None:
c = cc[0] = variables.Variable(1.)
return a + b + c + 1
cf = f.get_concrete_function()
c = cc[0]
captured_variables = {v.ref() for v in (a, b, c)}
trainable_variables = {v.ref() for v in (b, c)}
self.assertEqual({v.ref() for v in cf.variables}, captured_variables)
self.assertEqual({v.ref() for v in cf.trainable_variables},
trainable_variables)
self.assertEqual(cf.variables, cf.graph.variables)
self.assertEqual(cf.trainable_variables, cf.graph.trainable_variables)
def testNestedInputShapeFunctionRelaxation(self):
unknown_dim = [False]
@function.defun(experimental_relax_shapes=True)
def func(a_, b_=None):
del a_ # Only used to check which cache is used.
self.assertEqual(b_[0]._shape_tuple(), ())
if b_[1]._shape_tuple()[0] is None:
unknown_dim[0] = True
return b_[0] + 1
a = 'hi'
b0 = constant_op.constant(1.0)
func(a, b_=[b0, constant_op.constant([])])
self.assertFalse(unknown_dim[0])
self.assertLen(total_function_cache(func), 1)
func(a, b_=[b0, constant_op.constant([1.0])])
self.assertFalse(unknown_dim[0])
self.assertLen(total_function_cache(func), 2)
func(a, b_=[b0, constant_op.constant([1.0, 1.0])])
self.assertTrue(unknown_dim[0])
self.assertLen(total_function_cache(func), 2)
unknown_dim[0] = False
# Now do the same except with a new a which is not a tensor; this should
# change the cache key.
a = 'bye'
func(a, b_=[b0, constant_op.constant([])])
self.assertFalse(unknown_dim[0])
self.assertLen(total_function_cache(func), 3)
# Since we already marked a cache miss for a function with the same
# non-input signatures, here we will immediately start relaxing shapes.
func(a, b_=[b0, constant_op.constant([1.0])])
self.assertTrue(unknown_dim[0])
self.assertLen(total_function_cache(func), 3)
def testNestedShapeFunctionRelaxation(self):
got_shape = [None]
# The inner function will go through shape relaxation because the shapes it
# receives will be [1], [2], [3], ...
@def_function.function(experimental_relax_shapes=True)
def bar(x_shape):
got_shape[0] = x_shape._shape_tuple()
return x_shape
# The outer function will not go through shape relaxation because the shapes
# it receives will be [1], [[1]], [[[1]]], ...
@def_function.function(experimental_relax_shapes=True)
def foo(ones):
return bar(array_ops.shape(ones))
for rank in range(1, 6):
x_shape = self.evaluate(foo(array_ops.ones([1] * rank)))
self.assertAllEqual(x_shape, [1] * rank)
if rank < 3:
self.assertEqual(got_shape[0], (rank,))
else:
self.assertEqual(got_shape[0], (None,))
def testNoHash(self):
@def_function.function()
def f(_):
return 1.0
with self.assertRaisesRegex(ValueError, r'Got type: set'):
f(set([]))
def testFuncName(self):
@function.defun_with_attributes(attributes={'func_name': 'multiply'})
def add(x, y):
_ = x * y
return x + y
@function.defun
def add_2(x, y):
_ = x * y
return x + y
self.assertEqual(add._name, 'multiply')
self.assertEqual(add_2._name, 'add_2')
def testBasicGraphMode(self):
matmul = def_function.function(math_ops.matmul)
@def_function.function
def sq(a):
return matmul(a, a)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out = sq(t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedInputsGraphMode(self):
matmul = def_function.function(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
@def_function.function
def a_times_b(inputs):
return matmul(inputs.a['a'], inputs.b['b'])
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out = a_times_b(pair({'a': t}, {'b': t}))
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedOutputsGraphMode(self):
matmul = def_function.function(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
@def_function.function()
def pairs_mul(pair_a, pair_b):
return pair(matmul(pair_a.a, pair_b.a), matmul(pair_a.b, pair_b.b))
a = constant_op.constant([[1.0, 2.0], [1.0, 2.0]])
b = constant_op.constant([[3.0, 4.0], [3.0, 4.0]])
out = pairs_mul(pair(a, b), pair(b, a))
expected = pair(math_ops.matmul(a, b).numpy(),
math_ops.matmul(b, a).numpy())
self.assertAllClose(out, expected)
@parameterized.named_parameters(
dict(testcase_name='Defun',
function_decorator=function.defun),
dict(testcase_name='DefFunction',
function_decorator=def_function.function))
def testNestedFunctionGraphNotOutOfDate(self, function_decorator):
@function_decorator
def f():
return constant_op.constant(1.)
class _Model(object):
@function_decorator
def g(self):
self.f = f.get_concrete_function()
model = _Model()
model.g()
concrete = model.f
weak_g_graph = weakref.ref(model.g.get_concrete_function().graph)
self.assertIs(weak_g_graph(), concrete.graph.outer_graph)
weak_g = weakref.ref(model.g)
del model
self.assertIsNone(weak_g())
self.assertIsNone(weak_g_graph())
self.assertIsNotNone(concrete.graph.outer_graph)
self.assertIs(ops.get_default_graph(), concrete.graph.outer_graph)
def testGraphEagerIsolation(self):
@function.defun
def f():
self.v = variables.Variable(1.0)
return self.v.read_value()
self.assertAllEqual(f(), 1.0)
with ops.Graph().as_default():
self.assertEqual(f().shape, ())
def testBasicGraphFunction(self):
matmul = def_function.function(math_ops.matmul)
@def_function.function
def sq(a):
return matmul(a, a)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = sq.get_concrete_function(t)
self.assertEqual(sq_op.output_shapes, tensor_shape.TensorShape([2, 2]))
out = sq_op(t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testGetConcreteFunctionThreadSafety(self):
@def_function.function
def sq():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
return math_ops.matmul(t, t)
concrete_functions = []
def thread_func(_):
cf = sq.get_concrete_function()
concrete_functions.append(cf)
num_threads = 100
pool = multiprocessing.pool.ThreadPool(num_threads)
_ = pool.map(thread_func, list(range(num_threads)))
self.assertLen(set(concrete_functions), 1)
def testGetConcreteFunctionThreadSafetyWithArgs(self):
@def_function.function
def add_100(*args):
return math_ops.add_n(args)
p = multiprocessing.pool.ThreadPool(2)
args = (constant_op.constant(1.),) * 100
f1, f2 = p.map(add_100.get_concrete_function, [args] * 2)
# I see about len(args) + max(0, len(args) - 3) arguments expected.
f1(*args)
del f2
def testInputSpecGraphFunction(self):
matmul = def_function.function(math_ops.matmul)
@def_function.function
def sq(a):
return matmul(a, a)
sq_op = sq.get_concrete_function(
tensor_spec.TensorSpec((None, None), dtypes.float32))
self.assertEqual([None, None], sq_op.output_shapes.as_list())
t1 = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out1 = sq_op(t1)
self.assertAllEqual(out1, math_ops.matmul(t1, t1).numpy())
t2 = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out2 = sq_op(t2)
self.assertAllEqual(out2, math_ops.matmul(t2, t2).numpy())
def testNestedInputSpecGraphFunction(self):
matmul = def_function.function(math_ops.matmul)
@def_function.function
def sq(mats):
((a, b),) = mats
return matmul(a, b)
sq_op_autonamed = sq.get_concrete_function(
[(tensor_spec.TensorSpec((None, None), dtypes.float32),
tensor_spec.TensorSpec((None, None), dtypes.float32))])
self.assertEqual([None, None], sq_op_autonamed.output_shapes.as_list())
sq_op = sq.get_concrete_function(
[(tensor_spec.TensorSpec((None, None), dtypes.float32,
name='first_mat'),
tensor_spec.TensorSpec((None, None), dtypes.float32,
name='second_mat'))])
self.assertEqual([None, None], sq_op.output_shapes.as_list())
t1 = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
t2 = constant_op.constant([[1.4, 2.4], [3.4, 4.4]])
out = sq_op(first_mat=t1, second_mat=t2)
self.assertAllEqual(out, math_ops.matmul(t1, t2).numpy())
self.assertAllEqual(sq_op_autonamed(t1, t2),
math_ops.matmul(t1, t2).numpy())
def testExecutingStatelessDefunConcurrently(self):
@def_function.function
def stateless(x):
return math_ops.multiply(2.0, x)
pool = multiprocessing.pool.ThreadPool()
inputs = [constant_op.constant(1.0 * x) for x in range(100)]
outputs = [float(out) for out in pool.map(stateless, inputs)]
expected = [float(2.0 * x) for x in inputs]
self.assertSequenceEqual(outputs, expected)
def testExecutingManyStatelessDefunsConcurrently(self):
@def_function.function
def stateless(x):
del x
return math_ops.multiply(2.0, 2.0)
pool = multiprocessing.pool.ThreadPool()
# `pool.map` below instantiates 100 functions, one for each object.
objects = [object() for _ in range(100)]
outputs = [float(out) for out in pool.map(stateless, objects)]
expected = [4.0] * 100
self.assertSequenceEqual(outputs, expected)
def testExecutingStatefulDefunConcurrently(self):
v = resource_variable_ops.ResourceVariable(1.0)
@def_function.function
def stateful(x):
v.assign(x)
pool = multiprocessing.pool.ThreadPool()
inputs = [constant_op.constant(0.0)] * 100
pool.map(stateful, inputs)
self.assertEqual(float(v.read_value()), 0.0)
def testExecutingManyStatefulDefunsConcurrently(self):
v = resource_variable_ops.ResourceVariable(1.0)
@def_function.function
def stateful(x):
del x
return v.assign(0.0)
pool = multiprocessing.pool.ThreadPool()
# `pool.map` below instantiates 100 functions, one for each object.
pool.map(stateful, [object() for _ in range(100)])
self.assertEqual(float(v.read_value()), 0.0)
def testShareRendezvous(self):
# Disable grappler from inlining the functions. Note we run the send & recv
# in graph mode since with eager mode the function should automatically be
# inlined.
context.context().set_optimizer_experimental_options(
{'disable_meta_optimizer': True})
cpu = '/device:CPU:0'
signature = [tensor_spec.TensorSpec([], dtypes.int32)]
@def_function.function
def send():
x = constant_op.constant(1)
gen_sendrecv_ops.send(x, 'x', cpu, 0, cpu)
return x
send._shared_rendezvous = True # pylint: disable=protected-access
@def_function.function(input_signature=signature)
def send_body(n):
send()
return n - 1
@def_function.function
def recv():
return gen_sendrecv_ops.recv(dtypes.int32, 'x', cpu, 0, cpu)
recv._shared_rendezvous = True # pylint: disable=protected-access
@def_function.function(input_signature=signature)
def recv_body(n):
recv()
return n - 1
@def_function.function(input_signature=signature)
def cond(n):
return n > 0
# Instead of calling the send & recv functions directly we want to call them
# through a functional while to ensure the rendezvous is shared across the
# while boundary.
@def_function.function
def fn(n):
functional_ops.While([n], cond.get_concrete_function(),
send_body.get_concrete_function())
return functional_ops.While([n], cond.get_concrete_function(),
recv_body.get_concrete_function())
# Use a graph context since functions will not be automatically inlined
with context.graph_mode(), self.cached_session():
self.evaluate(fn(2))
def disabled_testRandomSeed(self):
@def_function.function
def f():
return random_ops.random_normal(())
random_seed.set_random_seed(1)
x = f()
self.assertNotEqual(x, f())
random_seed.set_random_seed(1)
self.assertAllEqual(f(), x)
def testNestedInputsGraphFunction(self):
matmul = def_function.function(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
@def_function.function
def a_times_b(inputs):
return matmul(inputs.a['a'], inputs.b['b'])
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = a_times_b.get_concrete_function(
pair(dict(a=tensor_spec.TensorSpec([2, 2], dtypes.float32, 'a')),
dict(b=tensor_spec.TensorSpec([2, 2], dtypes.float32, 'b'))))
self.assertEqual(sq_op.output_shapes, tensor_shape.TensorShape([2, 2]))
out = sq_op(a=t, b=t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedOutputGraphFunction(self):
matmul = def_function.function(math_ops.matmul)
@def_function.function
def sq(a):
return (matmul(a, a), {'b': constant_op.constant(1.0)})
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = sq.get_concrete_function(t)
self.assertEqual(sq_op.output_shapes,
(tensor_shape.TensorShape([2, 2]),
{'b': tensor_shape.TensorShape([])}))
self.assertEqual(sq_op.output_dtypes,
(dtypes.float32, {'b': dtypes.float32}))
(a, b) = sq_op(t)
self.assertAllEqual(a, math_ops.matmul(t, t).numpy())
self.assertAllEqual(b['b'].numpy(), 1.0)
def testGraphFunctionNoneOutput(self):
@def_function.function
def fn(unused_a, unused_b):
return None
x = constant_op.constant(1)
fn_op = fn.get_concrete_function(x, x)
self.assertEqual(fn_op.output_dtypes, None)
self.assertEqual(fn_op.output_shapes, None)
self.assertAllEqual(fn_op(x, x), None)
def testDefunNumpyArraysConvertedToTensors(self):
def f(x):
self.assertIsInstance(x, ops.Tensor)
return x
x = random_ops.random_uniform([2, 2]).numpy()
defined = function.defun(f)
defined(x)
self.assertLen(total_function_cache(defined), 1)
x = random_ops.random_uniform([2, 2]).numpy()
defined(x)
# A NumPy array with different values but the same shape and dtype
# shouldn't trigger another function definition.
self.assertLen(total_function_cache(defined), 1)
np_ones = numpy.ones([], numpy.float32)
np_zeros = numpy.zeros([], numpy.float32)
tf_ones = array_ops.ones([])
tf_zeros = array_ops.zeros([])
# Test that the numpy array is properly an argument to the graph function.
self.assertEqual(1., defined(np_ones).numpy())
self.assertLen(total_function_cache(defined), 2)
self.assertEqual(0., defined(np_zeros).numpy())
self.assertEqual(1., defined(tf_ones).numpy())
self.assertEqual(0., defined(tf_zeros).numpy())
self.assertLen(total_function_cache(defined), 2)
# Test that mutable inputs are supported.
mutable = numpy.ones([], numpy.float32)
self.assertEqual(1., defined(mutable).numpy())
mutable.fill(0)
self.assertEqual(0., defined(mutable).numpy())
class MyNdarray(numpy.ndarray):
pass
# Test that the subclasses of ndarray are converted too.
self.assertEqual(1., defined(np_ones.view(MyNdarray)).numpy())
self.assertEqual(0., defined(np_zeros.view(MyNdarray)).numpy())
# We should not have triggered any re-tracing of the python function.
self.assertLen(total_function_cache(defined), 2)
def testNumpyDtypeInputSupported(self):
@function.defun
def f(x, dtype):
return constant_op.constant(dtype(x))
self.assertEqual(f(1, numpy.float32).numpy(), numpy.float32(1))
self.assertEqual(f(2, numpy.float32).numpy(), numpy.float32(2))
self.assertEqual(f(1, numpy.int32).numpy(), numpy.int32(1))
self.assertEqual(f(2, numpy.int32).numpy(), numpy.int32(2))
def testDefunNumpyArraysConvertedToTensorsInKwargs(self):
def f(**kwargs):
x = kwargs.pop('x')
self.assertIsInstance(x, ops.Tensor)
return x
x = random_ops.random_uniform([2, 2]).numpy()
defined = function.defun(f)
defined(x=x)
self.assertLen(total_function_cache(defined), 1)
x = random_ops.random_uniform([2, 2]).numpy()
defined(x=x)
# A NumPy array with different values but the same shape and dtype
# shouldn't trigger another function definition.
self.assertLen(total_function_cache(defined), 1)
# Test that the numpy array is properly an argument to the graph function.
self.assertEqual(1., defined(x=numpy.ones([])).numpy())
self.assertEqual(0., defined(x=numpy.zeros([])).numpy())
self.assertEqual(1., defined(x=array_ops.ones([])).numpy())
self.assertEqual(0., defined(x=array_ops.zeros([])).numpy())
def testDefunCapturedInt32(self):
x = constant_op.constant(1, dtype=dtypes.int32)
@def_function.function
def add_int32s():
return x + x
self.assertEqual(2, int(add_int32s()))
def testDefunReadVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@def_function.function
def f():
return v.read_value()
self.assertEqual(1.0, float(f()))
def testDefunAssignAddVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
x = constant_op.constant(2.0)
@def_function.function
def test_assign_add():
v.assign_add(x)
return v.read_value()
self.assertEqual(3.0, float(test_assign_add()))
@test_util.run_in_graph_and_eager_modes
def testTensorInitializationInFunctionRaisesError(self):
error_msg = ('Tensor-typed variable initializers must either be '
'wrapped in an init_scope or callable.*')
@def_function.function
def tensor_init():
with self.assertRaisesRegex(ValueError, error_msg):
resource_variable_ops.ResourceVariable(constant_op.constant(2.0))
tensor_init()
@test_util.run_in_graph_and_eager_modes
def testCallableTensorInitializationInFunction(self):
@def_function.function
def tensor_init():
self.v = resource_variable_ops.ResourceVariable(
lambda: constant_op.constant(2.0))
return self.v.read_value()
value = tensor_init()
if not context.executing_eagerly():
self.evaluate(variables.global_variables_initializer())
self.assertEqual(self.evaluate(value), 2.0)
@test_util.also_run_as_tf_function
def testInitScopeTensorInitializationInFunction(self):
@def_function.function
def tensor_init():
with ops.init_scope():
const = constant_op.constant(2.0)
# Note: this variable bypasses tf.function's variable creation
# requirements by bypassing variable_creator_scope by using
# ResourceVariable instead of Variable.
self.v = resource_variable_ops.ResourceVariable(const)
return self.v.read_value()
value = tensor_init()
self.assertAllEqual(value, 2.0)
@test_util.run_in_graph_and_eager_modes
def testGetConcreteFunctionCreatesVariables(self):
v_holder = []
@def_function.function
def tensor_init():
if not v_holder:
v_holder.append(variables.Variable(5.))
return v_holder[0].read_value()
concrete = tensor_init.get_concrete_function()
self.evaluate(variables.global_variables_initializer())
self.assertAllEqual(5., self.evaluate(concrete()))
self.assertAllEqual(5., self.evaluate(tensor_init()))
def testFuncGraphCaptureByValue(self):
v = variables.Variable(1.0)
def trivial_function():
return v.read_value()
graph_function = function.Function(
trivial_function, 'test', capture_by_value=True)
self.assertAllEqual(graph_function(), 1.0)
v.assign(2.0)
self.assertAllEqual(graph_function(), 1.0)
def testFuncGraphCaptureByValueNested(self):
v = variables.Variable(1.0)
def trivial_function():
return control_flow_ops.cond(
array_ops.placeholder_with_default(True, ()),
v.read_value, v.read_value)
graph_function = function.Function(
trivial_function, 'test', capture_by_value=True)
self.assertAllEqual(graph_function(), 1.0)
v.assign(2.0)
self.assertAllEqual(graph_function(), 1.0)
def testDefunShapeInferenceWithCapturedResourceVariable(self):
v = resource_variable_ops.ResourceVariable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.shape, tensor_shape.TensorShape([2, 2]))
# We do not return v directly since the tensor conversion function of
# ResourceVariable returns the read value and not the resource itself.
return v._handle
compiled = def_function.function(f)
var_handle = compiled()
self.assertEqual(var_handle.dtype, dtypes.resource)
self.assertEqual(var_handle.shape, tensor_shape.TensorShape([]))
var_t = resource_variable_ops.read_variable_op(var_handle, dtype=v.dtype)
self.assertEqual(var_t.shape, tensor_shape.TensorShape([2, 2]))
def testShapeInferenceForMoreSpecificInput(self):
def f(a):
return array_ops.reshape(a, [-1, 3])
signature = [tensor_spec.TensorSpec(None, dtypes.float32)]
compiled = def_function.function(f, input_signature=signature)
@def_function.function
def use_f():
inputs = array_ops.zeros([10, 10, 3])
self.assertAllEqual(f(inputs).shape, compiled(inputs).shape)
use_f()
def testFuncListAttr(self):
@function.defun
def test_function(val):
def fn1():
return array_ops.ones([10])
fn2 = lambda: array_ops.ones([10]) * 2
def fn3(x=3):
return array_ops.ones([10]) * x
fn4 = functools.partial(fn3, x=4)
fn5 = functools.partial(fn3, 5)
return gen_functional_ops.case(val, [], [dtypes.float32],
[function.defun(f).get_concrete_function()
for f in (fn1, fn2, fn3, fn4, fn5)])
ones = array_ops.ones([10])
self.assertAllEqual([ones], test_function(0))
self.assertAllEqual([ones * 2], test_function(1))
self.assertAllEqual([ones * 3], test_function(2))
self.assertAllEqual([ones * 4], test_function(3))
self.assertAllEqual([ones * 5], test_function(4))
self.assertAllEqual([ones * 5], test_function(22)) # default branch
@test_util.enable_control_flow_v2
def testVariableInLoopInFunction(self):
@function.defun
def test_function():
def loop_test(_):
return False
def loop_body(_):
return variable_scope.get_variable('a', shape=())
return control_flow_ops.while_loop(loop_test, loop_body, [0.0])
self.assertEqual(test_function().shape, [])
def testDefunShapeInferenceWithCapturedResourceVariableInGraphMode(self):
with context.graph_mode():
v = resource_variable_ops.ResourceVariable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.shape, tensor_shape.TensorShape([2, 2]))
# We do not return v directly since the tensor conversion function of
# ResourceVariable returns the read value and not the resource itself.
return v._handle
compiled = def_function.function(f)
var_handle = compiled()
self.assertEqual(var_handle.dtype, dtypes.resource)
self.assertEqual(var_handle.shape, tensor_shape.TensorShape([]))
var_t = resource_variable_ops.read_variable_op(var_handle, dtype=v.dtype)
self.assertEqual(var_t.shape, tensor_shape.TensorShape([2, 2]))
def testDefunShapeInferenceWithCapturedVariableInGraphMode(self):
with context.graph_mode():
v = variables.Variable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.shape, tensor_shape.TensorShape([2, 2]))
# Check that shape inference works while creating the defun
compiled = def_function.function(f)
compiled()
def testDefunShapeInferenceWithCapturedTensorListInGraphMode(self):
with context.graph_mode():
tensor_list = list_ops.empty_tensor_list(
element_dtype=dtypes.float32,
element_shape=ops.convert_to_tensor([], dtype=dtypes.int32))
tensor_list = list_ops.tensor_list_push_back(tensor_list,
constant_op.constant(1.0))
tensor_list = list_ops.tensor_list_push_back(tensor_list,
constant_op.constant(2.0))
def f():
tl, value = list_ops.tensor_list_pop_back(
tensor_list, element_dtype=dtypes.float32)
self.assertEqual(value.shape, tensor_shape.TensorShape([]))
return tl
compiled = def_function.function(f)
output_tensor_list = compiled()
_, value = list_ops.tensor_list_pop_back(
output_tensor_list, element_dtype=dtypes.float32)
self.assertEqual(value.shape, tensor_shape.TensorShape([]))
@test_util.run_in_graph_and_eager_modes
def testDefunForcesResourceVariables(self):
def variable_creator():
self.v = variables.Variable(0.0)
return self.v.read_value()
self.v = None
defined = function.defun(variable_creator)
defined() # Create the variable.
self.assertIsInstance(
self.v, resource_variable_ops.ResourceVariable)
def testRunMetadata(self):
@def_function.function
def f(x):
return x * x
with ops.device('cpu:0'):
context.enable_run_metadata()
f(constant_op.constant(1.0))
run_metadata = context.export_run_metadata()
context.disable_run_metadata()
self.assertLen(run_metadata.partition_graphs, 1)
def testGraphModeCaptureVariable(self):
with context.graph_mode(), self.cached_session():
class HasAVar(object):
def __init__(self):
self.v = resource_variable_ops.ResourceVariable(1.0)
def call(self):
return self.v * 2
o = HasAVar()
self.evaluate(variables.global_variables_initializer())
call = def_function.function(o.call)
op = call()
self.assertAllEqual(self.evaluate(op), 2.0)
def testGraphModeManyFunctions(self):
with ops.Graph().as_default(), self.cached_session():
@def_function.function
def f(x):
return x * x
@def_function.function
def g(x):
return f(x) + 1
self.assertAllEqual(g(constant_op.constant(2.0)), 5.0)
def testDict(self):
@def_function.function
def f(x):
return {'name': x + 1}
self.assertAllEqual(f(constant_op.constant(1.0))['name'], 2.0)
def testTensorConversionWithDefun(self):
@def_function.function
def f(x):
return math_ops.add(x, constant_op.constant(3))
self.assertAllEqual(5, f(constant_op.constant(2)))
def testTensorConversionCall(self):
@def_function.function
def f(x):
return math_ops.add(x, constant_op.constant(3))
@def_function.function
def g(x):
return f(f(x))
self.assertAllEqual(8, g(constant_op.constant(2)))
def testCallShape(self):
@def_function.function
def f(x):
return x + 1
@def_function.function
def g(x):
x = f(x)
self.assertEqual(x.shape.as_list(), [])
return None
g(constant_op.constant(1.0))
def testNestedDefunWithNoOutputAndTapedInput(self):
three = resource_variable_ops.ResourceVariable(3.0, name='v')
@def_function.function
def f(x):
# This function intentionally takes a taped variable as input,
# but does not return any values
math_ops.add(x, three)
@def_function.function
def g(x):
y = math_ops.add(x, three)
f(y)
g(three)
def testGatherResourceWithDefun(self):
with ops.device('cpu:0'):
v = resource_variable_ops.ResourceVariable([0.0, 1.0, 2.0])
def sum_gather():
return math_ops.reduce_sum(array_ops.gather(v, [1, 2]))
defined = def_function.function(sum_gather)
self.assertAllEqual(sum_gather(), defined())
@parameterized.named_parameters([
('IndexedSlicesWithDenseShape',
_example_indexed_slices_with_dense_shape,),
('IndexedSlicesWithoutDenseShape',
_example_indexed_slices_without_dense_shape,),
('RaggedTensorRaggedRank1', ragged_tensor.RaggedTensor.from_row_lengths,
{'values': [1, 2, 3], 'row_lengths': [2, 0, 1]}),
('RaggedTensorRaggedRank2',
ragged_tensor.RaggedTensor.from_nested_row_lengths,
{'flat_values': [1, 2, 3], 'nested_row_lengths': [[1, 2], [2, 0, 1]]}),
('SparseTensor', sparse_tensor.SparseTensor,
{'values': [1, 2, 3], 'indices': [[0], [8], [10]], 'dense_shape': [20]}),
]) # pyformat: disable
def testReturnCompositeTensorWithDefun(self,
factory_fn,
factory_kwargs={},
input_signature=None):
input_ct = factory_fn(**factory_kwargs)
@def_function.function(input_signature=input_signature)
def f():
return input_ct
output_ct = f()
self.assertIsInstance(output_ct, type(input_ct))
nest.assert_same_structure(input_ct, output_ct, expand_composites=True)
input_flat = nest.flatten(input_ct, expand_composites=True)
output_flat = nest.flatten(output_ct, expand_composites=True)
for (input_component, output_component) in zip(input_flat, output_flat):
self.assertAllEqual(input_component, output_component)
@parameterized.named_parameters([
('IndexedSlicesWithDenseShape',
_example_indexed_slices_with_dense_shape,),
('IndexedSlicesWithoutDenseShape',
_example_indexed_slices_without_dense_shape,),
('RaggedTensorRaggedRank1',
ragged_tensor.RaggedTensor.from_row_lengths,
{'values': [1, 2, 3], 'row_lengths': [2, 0, 1]}),
('RaggedTensorRaggedRank2',
ragged_tensor.RaggedTensor.from_nested_row_lengths,
{'flat_values': [1, 2, 3], 'nested_row_lengths': [[1, 2], [2, 0, 1]]}),
('SparseTensor',
sparse_tensor.SparseTensor,
{'values': [1, 2, 3], 'indices': [[0], [8], [10]], 'dense_shape': [20]}),
('RaggedTensorRaggedRank1WithSignature',
ragged_tensor.RaggedTensor.from_row_lengths,
{'values': [1, 2, 3], 'row_lengths': [2, 0, 1]},
[ragged_tensor.RaggedTensorSpec([None, None], dtypes.int32)]),
('RaggedTensorRaggedRank2WithSignature',
ragged_tensor.RaggedTensor.from_nested_row_lengths,
{'flat_values': [1, 2, 3], 'nested_row_lengths': [[1, 2], [2, 0, 1]]},
[ragged_tensor.RaggedTensorSpec([None, None, None], dtypes.int32)]),
('SparseTensorWithSignature',
sparse_tensor.SparseTensor,
{'values': [1, 2, 3], 'indices': [[0], [8], [10]], 'dense_shape': [20]},
[sparse_tensor.SparseTensorSpec([None], dtypes.int32)]),
]) # pyformat: disable
def testCompositeAsArgumentTensorWithDefun(self,
factory_fn,
factory_kwargs={},
input_signature=None):
input_ct = factory_fn(**factory_kwargs)
@def_function.function(input_signature=input_signature)
def f(x):
return x
output_ct = f(input_ct)
self.assertIsInstance(output_ct, type(input_ct))
nest.assert_same_structure(input_ct, output_ct, expand_composites=True)
input_flat = nest.flatten(input_ct, expand_composites=True)
output_flat = nest.flatten(output_ct, expand_composites=True)
for (input_component, output_component) in zip(input_flat, output_flat):
self.assertAllEqual(input_component, output_component)
def testTracedCompositeDiscardsShapeInfo(self):
# SparseTensorSpec intentionally excludes info about the number of elements
# that are in a sparse tensor (which is recorded as st.indices.shape[0] and
# st.values.shape[0]). Similarly, RaggedTensorSpec intentionally excludes
# info about the total number of values in a RaggedTensor (stored as
# rt.values.shape[0]). This test checks that the placeholders created by
# tf.function() properly mask this shape info.
@def_function.function
def f(rt, st):
self.assertEqual(st.indices.shape.as_list()[:1], [None])
self.assertEqual(st.values.shape.as_list(), [None])
return (rt, st)
rt = ragged_factory_ops.constant([[1, 2], [3]])
st = sparse_tensor.SparseTensor([[0]], [0], [10])
f(rt, st)
@test_util.run_gpu_only
def testFunctionOnDevice(self):
x = constant_op.constant([1.]).gpu()
f = def_function.function(math_ops.add)
y = f(x, x).cpu()
self.assertAllEqual(y, [2.])
@test_util.run_gpu_only
@test_util.run_in_graph_and_eager_modes
def testFunctionWithResourcesOnDifferentDevices(self):
with ops.device('/cpu:0'):
v_cpu = resource_variable_ops.ResourceVariable([0.0, 1.0, 2.0])
with ops.device('/gpu:0'):
v_gpu = resource_variable_ops.ResourceVariable([0.0, 1.0, 2.0])
def sum_gather():
cpu_result = math_ops.reduce_sum(array_ops.gather(v_cpu, [1, 2]))
gpu_result = math_ops.reduce_sum(array_ops.gather(v_gpu, [1, 2]))
return cpu_result, gpu_result
defined = function.defun(sum_gather)
if not context.executing_eagerly():
self.evaluate(variables.global_variables_initializer())
expected = self.evaluate(sum_gather())
self.assertAllEqual(expected, self.evaluate(defined()))
@test_util.run_gpu_only
@test_util.run_in_graph_and_eager_modes
def testOpInFunctionWithConflictingResourceInputs(self):
with ops.device('/cpu:0'):
v_cpu = resource_variable_ops.ResourceVariable(
[0.0, 1.0, 2.0], name='cpu')
v_also_cpu = resource_variable_ops.ResourceVariable(
[0.0, 1.0, 2.0], name='also_cpu')
with ops.device('/gpu:0'):
v_gpu = resource_variable_ops.ResourceVariable(
[0.0, 1.0, 2.0], name='gpu')
@def_function.function
def resource_apply_adam():
training_ops.resource_apply_adam(
v_cpu.handle,
v_gpu.handle,
v_also_cpu.handle,
1.0, # beta1_power
1.0, # beta2_power
1.0, # learning_rate
1.0, # beta1
1.0, # beta2
1.0, # epsilon,
[1.0, 1.0, 1.0], # grad
False) # use_locking
return None
with self.assertRaisesRegex(
errors.InvalidArgumentError,
'Cannot place the graph because a reference or resource edge connects '
'colocation groups with incompatible assigned devices'):
if not context.executing_eagerly():
self.evaluate(variables.global_variables_initializer())
self.evaluate(resource_apply_adam())
@test_util.run_gpu_only
def testFunctionHandlesInputsOnDifferentDevices(self):
# The Reshape op requires the shape tensor to be placed in host memory.
reshape = def_function.function(array_ops.reshape)
value = constant_op.constant([1., 2.]).gpu()
shape = constant_op.constant([2, 1])
reshaped = reshape(value, shape).cpu()
self.assertAllEqual(reshaped, [[1], [2]])
@test_util.run_gpu_only
def testFunctionHandlesInputsPlacedOnTheWrongDeviceGracefully(self):
# The Reshape op requires the shape tensor to be placed in host memory.
reshape = def_function.function(array_ops.reshape)
value = constant_op.constant([1., 2.])
shape = constant_op.constant([2, 1]).gpu()
reshape(value, shape) # No error is raised
def testNoneOutput(self):
@def_function.function
def my_function(_):
return None
self.assertAllEqual(my_function(1), None)
def testNestedFunctions(self):
# TensorFlow function (which is what would be used in TensorFlow graph
# construction).
@tf_function.Defun(dtypes.int32, dtypes.int32)
def add(a, b):
return math_ops.add(a, b)
@def_function.function
def add_one(x):
return add(x, 1)
self.assertAllEqual(3, add_one(constant_op.constant(2)))
def testVariableCaptureInNestedFunctions(self):
v = resource_variable_ops.ResourceVariable(1, dtype=dtypes.int32)
@def_function.function
def inner_read():
return v.read_value()
@def_function.function
def outer():
return inner_read()
self.assertEqual(1, int(outer()))
def testReturnCapturedEagerTensor(self):
t = constant_op.constant(1)
@def_function.function
def read():
return t
self.assertEqual(1, int(read()))
def testReturnCapturedGraphTensor(self):
with context.graph_mode(), self.cached_session():
t = constant_op.constant(1)
@def_function.function
def read():
return t
self.assertEqual(1, int(self.evaluate(read())))
def testSequenceInputs(self):
clip_by_global_norm = def_function.function(clip_ops.clip_by_global_norm)
t_list = [constant_op.constant(1.0), constant_op.constant(2.0)]
clipped_list, global_norm = clip_by_global_norm(t_list,
constant_op.constant(.2))
for t in clipped_list:
self.assertIsInstance(t, ops.Tensor)
self.assertIsInstance(global_norm, ops.Tensor)
def testNestedSequenceInputs(self):
def my_op(inputs):
a, b, c = inputs
e, f = b
g, h = e
return [a + a, [tuple([f + f, g + g]), h + h], c + c], a + f + g + h + c
my_eager_op = def_function.function(my_op)
ret = my_eager_op([
constant_op.constant(1), [(constant_op.constant(2),
constant_op.constant(3)),
constant_op.constant(4)],
constant_op.constant(5)
])
self.assertLen(ret, 2)
self.assertAllEqual(ret[0][0], 2)
self.assertAllEqual(ret[0][1][0][0], 8)
self.assertAllEqual(ret[0][1][0][1], 4)
self.assertIsInstance(ret[0][1][0], tuple)
self.assertAllEqual(ret[0][1][1], 6)
self.assertAllEqual(ret[0][2], 10)
self.assertAllEqual(ret[1], 15)
def testVariableNamesRespectNameScopesWithDefun(self):
@def_function.function
def create_variable():
with ops.name_scope('foo', skip_on_eager=False):
v = resource_variable_ops.ResourceVariable(0.0, name='bar')
self.assertEqual(v.name, 'foo/bar:0')
create_variable()
def testVariableNamesRespectNameScopesWithDefunInGraph(self):
with context.graph_mode():
@def_function.function
def create_variable():
with ops.name_scope('foo', skip_on_eager=False):
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name='bar')
self.assertEqual(v.name, 'foo/bar:0')
with ops.get_default_graph().as_default():
create_variable()
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def testLayerInDefun(self):
conv = convolutional.Conv2D(
filters=1,
kernel_size=2,
kernel_initializer=init_ops.ones_initializer(),
bias_initializer=init_ops.zeros_initializer())
@function.defun
def model(x):
return conv(x)
x = array_ops.ones([1, 2, 2, 1])
y = model(x)
if not context.executing_eagerly():
self.evaluate(variables.global_variables_initializer())
self.assertAllClose([[[[4.0]]]], self.evaluate(y))
# Variable lifting is somewhat different between defun/tf.function, so testing
# device placement on both makes sense.
@parameterized.named_parameters(
dict(testcase_name='Defun',
function_decorator=function.defun),
dict(testcase_name='DefFunction',
function_decorator=def_function.function))
@test_util.run_in_graph_and_eager_modes
def testVariablesPlacedOnOutsideDevice(self, function_decorator):
class _Obj(object):
def __init__(self):
self.v = None
@function_decorator
def f(self):
if self.v is None:
self.v = variables.Variable(1.)
return self.v + 1.
has_device = _Obj()
with ops.device('cpu:0'):
has_device.f()
self.assertIn('CPU', has_device.v.device)
@test_util.run_in_graph_and_eager_modes
def testDeviceAnnotationsRespected(self):
def multi_device_fn():
with ops.device('/cpu:0'):
s0 = test_ops.device_placement_op()
with ops.device('/cpu:1'):
s1 = test_ops.device_placement_op()
with ops.device('/cpu:2'):
s2 = test_ops.device_placement_op()
s3 = test_ops.device_placement_op()
return s0, s1, s2, s3
defined = function.defun(multi_device_fn)
outputs = self.evaluate(defined())
self.assertLen(total_function_cache(defined), 1)
self.assertIn(compat.as_bytes('CPU:0'), outputs[0])
self.assertIn(compat.as_bytes('CPU:1'), outputs[1])
self.assertIn(compat.as_bytes('CPU:2'), outputs[2])
with ops.device('/cpu:3'):
outputs = self.evaluate(defined())
# All function definitions are agnostic to call site devices.
self.assertLen(total_function_cache(defined), 1)
self.assertIn(compat.as_bytes('CPU:0'), outputs[0])
self.assertIn(compat.as_bytes('CPU:1'), outputs[1])
self.assertIn(compat.as_bytes('CPU:2'), outputs[2])
self.assertIn(compat.as_bytes('CPU:3'), outputs[3])
with ops.device('/cpu:0'):
outputs = self.evaluate(defined())
self.assertLen(total_function_cache(defined), 1)
self.assertIn(compat.as_bytes('CPU:0'), outputs[0])
self.assertIn(compat.as_bytes('CPU:1'), outputs[1])
self.assertIn(compat.as_bytes('CPU:2'), outputs[2])
self.assertIn(compat.as_bytes('CPU:0'), outputs[3])
@test_util.run_in_graph_and_eager_modes
def testCallingGraphFunctionOnDifferentDevice(self):
def func():
return constant_op.constant(0)
defined = def_function.function(func)
with ops.device('cpu:0'):
cpu_graph_function = defined.get_concrete_function()
with ops.device('cpu:0'):
self.assertEqual(
self.evaluate(cpu_graph_function()), self.evaluate(func()))
with ops.device('cpu:1'):
self.assertEqual(0., self.evaluate(cpu_graph_function()))
with ops.device(None):
self.assertEqual(0., self.evaluate(cpu_graph_function()))
default_graph_function = defined.get_concrete_function()
self.assertEqual(
self.evaluate(default_graph_function()), self.evaluate(func()))
with ops.device('cpu:1'):
self.assertEqual(0., self.evaluate(default_graph_function()))
@test_util.run_gpu_only
@test_util.run_in_graph_and_eager_modes
def testColocateWithRespected(self):
# TODO(b/113291792): Use multiple CPUs instead of a GPU.
with ops.device('cpu:0'):
x = array_ops.identity(1.0)
with ops.device('gpu:0'):
y = array_ops.identity(1.0)
@def_function.function
def foo():
return test_ops.device_placement_op()
with ops.colocate_with(x):
self.assertIn(compat.as_bytes('CPU:0'), self.evaluate(foo()))
with ops.colocate_with(y):
self.assertIn(compat.as_bytes('GPU:0'), self.evaluate(foo()))
def testVariablesAreTracked(self):
v = resource_variable_ops.ResourceVariable(1.0)
def foo(x):
return v * x
defined = def_function.function(foo)
x = constant_op.constant([1.0])
self.assertEqual(1., self.evaluate(defined(x)))
v.assign(2.)
x = constant_op.constant([1.0, 2.0])
self.assertAllEqual([2., 4.], self.evaluate(defined(x)))
def testCacheObjectHashCollisions(self):
class Foo(object):
def __hash__(self):
return 42
def func(foo):
del foo
return
defined = function.defun(func)
defined(Foo())
self.assertLen(total_function_cache(defined), 1)
defined(Foo())
self.assertLen(total_function_cache(defined), 2)
def testCacheTensorDtypeCollision(self):
def func(t):
return t + t
defined = function.defun(func)
t = constant_op.constant([[1.0]], dtype=dtypes.complex64)
defined(t)
self.assertLen(total_function_cache(defined), 1)
t = constant_op.constant([[1.0]], dtype=dtypes.complex128)
defined(t)
self.assertLen(total_function_cache(defined), 2)
def testCacheTensorShapeCollision(self):
def func(t):
return t + t
defined = function.defun(func)
t = constant_op.constant([[1.0]], dtype=dtypes.complex64)
defined(t)
self.assertLen(total_function_cache(defined), 1)
t = constant_op.constant([1.0], dtype=dtypes.complex64)
defined(t)
self.assertLen(total_function_cache(defined), 2)
def testCacheTensorShapeDtypeCollision(self):
def func(t):
return t + t
defined = function.defun(func)
t = constant_op.constant([[1.0]], dtype=dtypes.complex64)
defined(t)
self.assertLen(total_function_cache(defined), 1)
t = constant_op.constant([1.0], dtype=dtypes.complex128)
defined(t)
self.assertLen(total_function_cache(defined), 2)
def testCacheTensorUnknownShapesCollisionRelaxedShapes(self):
def func(t):
return t + t
with context.graph_mode(), self.cached_session():
defined = function.defun(func, experimental_relax_shapes=True)
p = array_ops.placeholder(dtype=dtypes.float32, shape=[])
defined(p)
self.assertLen(total_function_cache(defined), 1)
p = array_ops.placeholder(dtype=dtypes.float32, shape=[1])
defined(p)
self.assertLen(total_function_cache(defined), 2)
p = array_ops.placeholder(dtype=dtypes.float32, shape=[2])
defined(p)
# Gradual shape relaxation is performed; and the common shape between
# [1] and [2] is one containing unknown dimensions.
self.assertLen(total_function_cache(defined), 2)
# pylint: disable=protected-access
self.assertLen(defined._function_cache.arg_relaxed_specs, 1)
relaxed_specs = (
list(defined._function_cache.arg_relaxed_specs.values())[0])
self.assertLen(relaxed_specs, 1)
relaxed_shape = relaxed_specs[0].shape
# pylint: enable=protected-access
self.assertEqual(relaxed_shape.rank, 1)
self.assertEqual(tensor_shape.dimension_value(relaxed_shape[0]), None)
t = constant_op.constant([1.0, 1.0, 1.0], dtype=dtypes.float32)
defined(t)
# Shape (3,) matches the relaxed shape TensorShape([None])
self.assertLen(total_function_cache(defined), 2)
def testPythonFunctionWithDefaultArgs(self):
def func(foo, bar=1, baz=2):
del foo
del bar
del baz
return
defined = function.defun(func)
defined(0, baz=20)
def cache_keys():
"""Sanitizes cache keys of non-input metadata."""
return tuple(key[0] for key in total_function_cache(defined))
# `True` corresponds to the fact that we're executing eagerly
self.assertIn(('URRRu', (0, 1, 20)), cache_keys())
defined(1) # bar=1, baz=2
self.assertIn(('URRRu', (1, 1, 2)), cache_keys())
# This matches the previous call.
defined(foo=1)
self.assertLen(total_function_cache(defined), 2)
defined(1, 2, 3)
self.assertLen(total_function_cache(defined), 3)
self.assertIn(('URRRu', (1, 2, 3)), cache_keys())
# This matches the previous call.
defined(1, bar=2, baz=3)
self.assertLen(total_function_cache(defined), 3)
# This matches the previous call.
defined(1, baz=3, bar=2)
self.assertLen(total_function_cache(defined), 3)
def testFunctoolsPartialUnwrappedCorrectly(self):
def full_function(a, b, c=3):
return a, b, c
partial = functools.partial(full_function, 1, c=4)
a, b, c = partial(2)
defined = function.defun(partial)
func_a, func_b, func_c = defined(2)
self.assertEqual(func_a.numpy(), a)
self.assertEqual(func_b.numpy(), b)
self.assertEqual(func_c.numpy(), c)
def testInputSignatureWithMatchingInputs(self):
def foo(a):
self.assertEqual(a.shape, (2,))
return a
signature = [tensor_spec.TensorSpec(shape=(2,), dtype=dtypes.float32)]
defined = function.defun(foo, input_signature=signature)
a = array_ops.ones([2])
self.assertAllEqual(a, defined(a))
self.assertLen(total_function_cache(defined), 1)
self.assertAllEqual(a, defined.get_concrete_function()(a))
self.assertAllEqual(a, defined.get_concrete_function(a)(a))
self.assertAllEqual(a, defined.get_concrete_function(
tensor_spec.TensorSpec((2,), dtype=dtypes.float32))(a))
self.assertLen(total_function_cache(defined), 1)
def bar(a):
self.assertEqual(a._shape_tuple(), (2, None))
return a
signature = [tensor_spec.TensorSpec((2, None), dtypes.float32)]
defined = function.defun(bar, input_signature=signature)
a = array_ops.ones([2, 1])
out = defined(a)
self.assertLen(total_function_cache(defined), 1)
self.assertAllEqual(out, a)
# Changing the second dimension shouldn't create a new function.
b = array_ops.ones([2, 3])
out = defined(b)
self.assertLen(total_function_cache(defined), 1)
self.assertAllEqual(out, b)
def testInputSignatureWithCompatibleInputs(self):
rank2_spec = tensor_spec.TensorSpec(shape=(None, None),
dtype=dtypes.float32)
@function.defun(input_signature=[rank2_spec])
def func(a):
self.assertEqual([None, None], a.shape.as_list())
return array_ops.shape(a)
self.assertAllEqual([3, 1], func([[0], [1.0], [1]]))
self.assertAllEqual([2, 2], func(numpy.array([[1, 1], [2, 2]])))
with self.assertRaisesRegex(ValueError, 'incompatible'):
func([0.0, 1.0, 2.0]) # Wrong shape.
with self.assertRaisesRegex(ValueError, 'incompatible'):
func([['wrong dtype']])
def testNoKeywordOnlyArgumentsWithInputSignature(self):
if sys.version_info[0] < 3:
self.skipTest('keyword_only arguments only exist in Python 3.')
func = eval('lambda x, *, y: x') # pylint: disable=eval-used
signature = [tensor_spec.TensorSpec(None, dtypes.int32)]
with self.assertRaisesRegex(
ValueError, 'Cannot define a TensorFlow function from a Python '
'function with keyword-only arguments when input_signature is '
'provided.'):
def_function.function(func, signature)
def testNestedInputSignatures(self):
def expected_foo(a, b):
return [a, b]
@function.defun(input_signature=[
[tensor_spec.TensorSpec((2, None), dtypes.float32)] * 2,
tensor_spec.TensorSpec((1,), dtypes.float32),
])
def foo(a, b):
self.assertEqual(a[0]._shape_tuple(), (2, None))
self.assertEqual(a[1]._shape_tuple(), (2, None))
self.assertEqual(b._shape_tuple(), (1,))
return [a, b]
a = array_ops.ones([2, 1])
b = array_ops.ones([1])
expected = expected_foo([a, a], b)
out = foo([a, a], b)
self.assertLen(total_function_cache(foo), 1)
nest.assert_same_structure(out, expected)
self.assertAllEqual(out[0][0], a)
self.assertAllEqual(out[0][1], a)
self.assertAllEqual(out[1], b)
# Changing the unspecified dimensions shouldn't create a new function.
a = array_ops.ones([2, 3])
b = array_ops.ones([2, 5])
c = array_ops.ones([1])
expected = expected_foo([a, b], c)
out = foo([a, b], c)
self.assertLen(total_function_cache(foo), 1)
nest.assert_same_structure(out, expected)
self.assertAllEqual(out[0][0], a)
self.assertAllEqual(out[0][1], b)
self.assertAllEqual(out[1], c)
# Passing compatible inputs should work.
a = a.numpy().tolist()
b = b.numpy().tolist()
c = c.numpy().tolist()
out = foo([a, b], c)
self.assertLen(total_function_cache(foo), 1)
nest.assert_same_structure(out, expected)
self.assertAllEqual(out[0][0], a)
self.assertAllEqual(out[0][1], b)
self.assertAllEqual(out[1], c)
def testNestedInputSignaturesWithDict(self):
def expected_bar(a):
return a
@function.defun(input_signature=[{
'a': tensor_spec.TensorSpec((2, None), dtypes.float32),
'b': tensor_spec.TensorSpec((2, None), dtypes.float32),
'c': tensor_spec.TensorSpec((1,), dtypes.float32)}])
def bar(a):
self.assertEqual(a['a']._shape_tuple(), (2, None))
self.assertEqual(a['b']._shape_tuple(), (2, None))
self.assertEqual(a['c']._shape_tuple(), (1,))
return a
a = array_ops.ones([2, 3])
b = array_ops.ones([1])
inputs = {'a': a, 'b': a, 'c': b}
expected = expected_bar(inputs)
out = bar(inputs)
nest.assert_same_structure(out, expected)
self.assertAllEqual(out['a'], expected['a'])
self.assertAllEqual(out['b'], expected['b'])
self.assertAllEqual(out['c'], expected['c'])
# Passing compatible inputs should work.
a = a.numpy().tolist()
b = b.numpy().tolist()
inputs = {'a': a, 'b': a, 'c': b}
out = bar(inputs)
nest.assert_same_structure(out, expected)
self.assertAllEqual(out['a'], expected['a'])
self.assertAllEqual(out['b'], expected['b'])
self.assertAllEqual(out['c'], expected['c'])
def testInputSignatureMustBeSequenceOfTensorSpecs(self):
def foo(a, b):
del a
del b
# Signatures must consist exclusively of `TensorSpec` objects.
signature = [(2, 3), tensor_spec.TensorSpec([2, 3], dtypes.float32)]
with self.assertRaisesRegex(TypeError, 'Invalid input_signature.*'):
def_function.function(foo, input_signature=signature)
# Signatures must be either lists or tuples on their outermost levels.
signature = {'t1': tensor_spec.TensorSpec([], dtypes.float32)}
with self.assertRaisesRegex(
TypeError, 'input_signature must be either a '
'tuple or a list.*'):
function.defun(foo, input_signature=signature)
@test_util.run_in_graph_and_eager_modes
def testInputsIncompatibleWithSignatureRaisesError(self):
def foo(a):
return a
signature = [tensor_spec.TensorSpec(shape=(2,), dtype=dtypes.float32)]
defined = def_function.function(foo, input_signature=signature)
# Invalid shapes.
with self.assertRaisesRegex(ValueError, 'Python inputs incompatible.*'):
defined(array_ops.ones([3]))
with self.assertRaisesRegex(ValueError, 'Python inputs incompatible.*'):
defined(array_ops.ones([2, 1]))
# Wrong number of arguments.
with self.assertRaisesRegex(
TypeError, r'takes 1 positional arguments \(as specified by the '
r'input_signature\) but 2 were given'):
defined(array_ops.ones([2]), array_ops.ones([2]))
with self.assertRaisesRegex(ValueError,
'Structure of Python function inputs.*'):
defined()
with self.assertRaisesRegex(ValueError,
'inputs incompatible with input_signature'):
defined.get_concrete_function(
tensor_spec.TensorSpec(shape=(3,), dtype=dtypes.float32))
def testInputsIncompatibleWithNestedSignatureRaisesError(self):
def foo(a, b):
return [a, b]
signature = [[tensor_spec.TensorSpec((1,), dtypes.float32)] * 2,
[tensor_spec.TensorSpec((1,), dtypes.float32)] * 2]
defined = function.defun(foo, input_signature=signature)
a = array_ops.ones([1])
with self.assertRaisesRegex(ValueError,
'Structure of Python function inputs.*'):
defined([a, a, a], [a])
with self.assertRaisesRegex(ValueError,
'Structure of Python function inputs.*'):
defined([a], [a, a, a])
defined([a, a], [a, a])
def testUnderspecifiedInputSignature(self):
@function.defun(input_signature=[
tensor_spec.TensorSpec([], dtypes.float32),
])
def foo(a, training=True):
if training:
return a
else:
return -1.0 * a
x = constant_op.constant(1.0)
with self.assertRaisesRegex(
TypeError, 'got keyword argument `training` '
'that was not included in input_signature'):
foo(x, training=True)
with self.assertRaisesRegex(
TypeError, 'got keyword argument `training` '
'that was not included in input_signature'):
foo(x, training=False)
self.assertAllEqual(x.numpy(), foo(x).numpy())
def testInputSignatureWithPartialFunction(self):
def full_function(a, b, c=3.0):
return a, b, c
partial = functools.partial(full_function, 1, c=4)
a, b, c = partial(2.0)
signature = [tensor_spec.TensorSpec([], dtypes.float32)]
defined = function.defun(partial, input_signature=signature)
x = constant_op.constant(2.0)
func_a, func_b, func_c = defined(x)
self.assertEqual(func_a.numpy(), a)
self.assertEqual(func_b.numpy(), b)
self.assertEqual(func_c.numpy(), c)
def testInputSignatureConversionWithDefaultArg(self):
def foo(a, training=True):
if training:
return a
else:
return -1.0 * a
signature = [
tensor_spec.TensorSpec([], dtypes.float32),
tensor_spec.TensorSpec([], dtypes.bool),
]
defined = def_function.function(foo, input_signature=signature)
a = constant_op.constant(1.0)
self.assertAllEqual(a.numpy(), defined(a))
self.assertAllEqual(a.numpy(), defined(a, training=True))
self.assertAllEqual(-a.numpy(), defined(a, training=False))
def testInputSignatureWithKeywordPositionalArgs(self):
@function.defun(input_signature=[
tensor_spec.TensorSpec([], dtypes.float32),
tensor_spec.TensorSpec([], dtypes.int64)
])
def foo(flt, integer):
return flt, integer
flt = constant_op.constant(1.0)
integer = constant_op.constant(2, dtypes.int64)
out1, out2 = foo(flt, integer)
self.assertLen(total_function_cache(foo), 1)
self.assertEqual(out1.numpy(), 1.0)
self.assertEqual(out2.numpy(), 2)
out1, out2 = foo(flt=flt, integer=integer)
self.assertLen(total_function_cache(foo), 1)
self.assertEqual(out1.numpy(), 1.0)
self.assertEqual(out2.numpy(), 2)
out1, out2 = foo(integer=integer, flt=flt)
self.assertLen(total_function_cache(foo), 1)
self.assertEqual(out1.numpy(), 1.0)
self.assertEqual(out2.numpy(), 2)
out1, out2 = foo(flt, integer=integer)
self.assertLen(total_function_cache(foo), 1)
self.assertEqual(out1.numpy(), 1.0)
self.assertEqual(out2.numpy(), 2)
def testInputSignatureWithKeywordArgs(self):
def foo(a, b, **kwargs):
del kwargs
return a, b
x = function.defun(
foo,
input_signature=[
tensor_spec.TensorSpec([], dtypes.float32),
tensor_spec.TensorSpec([], dtypes.int32)
]).get_concrete_function()
result = x(constant_op.constant(5.0), constant_op.constant(5))
self.assertAllEqual(result, [5.0, 5])
def testInputSignatureWithCompositeTensors(self):
def f(rt):
self.assertEqual(rt.values.shape.as_list(), [None])
self.assertEqual(rt.row_splits.shape.as_list(), [4])
return rt
signature = [ragged_tensor.RaggedTensorSpec(
shape=[3, None], dtype=dtypes.int32)]
defined = function.defun(f, input_signature=signature)
rt1 = ragged_factory_ops.constant([[1], [], [2, 3, 4]])
out1 = defined(rt1)
self.assertLen(total_function_cache(defined), 1)
self.assertAllEqual(out1.values, rt1.values)
self.assertAllEqual(out1.row_splits, rt1.row_splits)
# Changing the row lengths shouldn't create a new function.
rt2 = ragged_factory_ops.constant([[1, 2], [3, 4], [5]])
out2 = defined(rt2)
self.assertLen(total_function_cache(defined), 1)
self.assertAllEqual(out2.values, rt2.values)
self.assertAllEqual(out2.row_splits, rt2.row_splits)
# Different number of rows
rt3 = ragged_factory_ops.constant([[1, 2], [3, 4], [5], [6]])
with self.assertRaisesRegex(ValueError, 'incompatible'):
defined(rt3)
# Different dtype
rt4 = ragged_factory_ops.constant([[1.0, 2.0], [], [3.0]])
with self.assertRaisesRegex(ValueError, 'Structure .* does not match'):
defined(rt4)
# Different rank
rt5 = ragged_factory_ops.constant([[[1]], [[2]], [[3]]])
with self.assertRaisesRegex(ValueError, 'does not match'):
defined(rt5)
def testInputSignatureWithVariableArgs(self):
def f(v):
v.assign_add(1)
signature = [
resource_variable_ops.VariableSpec(shape=[], dtype=dtypes.int32)
]
defined = function.defun(f, input_signature=signature)
v1 = variables.Variable(0)
v2 = variables.Variable(0)
defined(v1)
self.assertEqual(v1.numpy(), 1)
self.assertEqual(v2.numpy(), 0)
defined(v=v2)
self.assertEqual(v1.numpy(), 1)
self.assertEqual(v2.numpy(), 1)
def testTensorKeywordArguments(self):
def foo(a, b):
del a
return b
defined = function.defun(foo)
a = constant_op.constant(2.0)
b = constant_op.constant([1.0, 2.0])
one = defined(a, b)
self.assertLen(total_function_cache(defined), 1)
two = defined(a=a, b=b)
self.assertLen(total_function_cache(defined), 1)
three = defined(b=b, a=a)
self.assertLen(total_function_cache(defined), 1)
four = defined(a, b=b)
self.assertLen(total_function_cache(defined), 1)
# The next call corresponds to a new input signature, hence
# we expect another function to be defined.
five = defined(b, a)
self.assertLen(total_function_cache(defined), 2)
six = defined(a=b, b=a)
self.assertLen(total_function_cache(defined), 2)
seven = defined(b=a, a=b)
self.assertLen(total_function_cache(defined), 2)
self.assertAllEqual(one, [1.0, 2.0])
self.assertAllEqual(two, [1.0, 2.0])
self.assertAllEqual(three, [1.0, 2.0])
self.assertAllEqual(four, [1.0, 2.0])
self.assertAllEqual(five, 2.0)
self.assertAllEqual(six, 2.0)
self.assertAllEqual(seven, 2.0)
def testDefuningInstanceMethod(self):
integer = constant_op.constant(2, dtypes.int64)
class Foo(object):
def one(self, tensor):
return tensor
@def_function.function
def two(self, tensor, other=integer):
return self.one(tensor), other
foo = Foo()
t = constant_op.constant(1.0)
one, two = foo.two(t)
self.assertEqual(one.numpy(), 1.0)
self.assertEqual(two.numpy(), 2)
def testDefuningInstanceMethodWithDefaultArgument(self):
integer = constant_op.constant(2, dtypes.int64)
class Foo(object):
@def_function.function
def func(self, other=integer):
return other
foo = Foo()
self.assertEqual(foo.func().numpy(), int(integer))
def testPythonCallWithSideEffects(self):
state = []
@def_function.function
def side_effecting_function():
state.append(0)
side_effecting_function()
self.assertAllEqual(state, [0])
# The second invocation should call the graph function, which shouldn't
# trigger the list append.
side_effecting_function()
self.assertAllEqual(state, [0])
# Whereas calling the python function directly should create a side-effect.
side_effecting_function.python_function()
self.assertAllEqual(state, [0, 0])
def testFunctionWithNestedFunctionCallAndSideEffects(self):
v1 = variables.Variable(1.0)
v2 = variables.Variable(1.0)
@def_function.function
def add_one(a):
a.assign_add(1.0)
# Grappler will inline calls to `add_one` into the function body, we check
# that all side-effects were executed.
@def_function.function
def side_effecting_function(a, b):
add_one(a)
add_one(b)
return a + b
result = side_effecting_function(v1, v2)
self.assertEqual(result.numpy(), 4.0)
def testFunctionWithExtraAttributes(self):
@function.defun_with_attributes(attributes={'experimental_1': 'value1',
'experimental_2': 2})
def matmul(x, y):
return math_ops.matmul(x, y)
def add(x, y):
return math_ops.add(x, y)
defun_add = function.defun_with_attributes(
add, attributes={'experimental_3': True, 'experimental_4': 1.0})
with context.graph_mode(), self.cached_session():
with ops.get_default_graph().as_default():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq = matmul(t, t)
double = defun_add(t, t)
self.assertAllEqual(sq.eval().reshape(-1), [7, 10, 15, 22])
self.assertAllEqual(double.eval().reshape(-1), [2, 4, 6, 8])
graph = ops.get_default_graph()
# pylint: disable=protected-access
self.assertLen(graph._functions, 2)
functions = list(graph._functions.values())
self.assertRegex(functions[0].definition.signature.name, '.*matmul.*')
attrs = functions[0].definition.attr
self.assertLen(attrs, 2)
self.assertEqual(attrs['experimental_1'].s, b'value1')
self.assertEqual(attrs['experimental_2'].i, 2)
self.assertRegex(functions[1].definition.signature.name, '.*add.*')
attrs = functions[1].definition.attr
self.assertLen(attrs, 2)
self.assertEqual(attrs['experimental_3'].b, True)
self.assertEqual(attrs['experimental_4'].f, 1.0)
# pylint: enable=protected-access
def testFunctionWithInvalidAttribute(self):
@function.defun_with_attributes(attributes={'experimental_1': ['value1']})
def add(x, y):
return math_ops.add(x, y)
with self.assertRaisesRegex(ValueError, '.*Unsupported attribute type.*'):
with context.graph_mode(), self.cached_session():
with ops.get_default_graph().as_default():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
add(t, t)
def testRegisterFunction(self):
@function.defun
def add(x, y):
return math_ops.add(x, y)
def matmul(x, y):
return math_ops.matmul(x, y)
defun_matmul = function.defun(matmul)
with context.graph_mode(), self.cached_session():
with ops.get_default_graph().as_default():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
function.register(defun_matmul, t, t)
function.register(add, t, t)
graph = ops.get_default_graph()
# pylint: disable=protected-access
self.assertLen(graph._functions, 6)
# two sets of functions, each of them are (inference, forward, backward)
functions = list(graph._functions.values())
captured_function_names = [
f.definition.signature.name for f in functions
]
expected_func_name_regex = [
'.*inference.*matmul.*',
'.*forward.*matmul.*',
'.*inference.*backward.*matmul.*',
'.*inference.*add.*',
'.*forward.*add.*',
'.*inference.*backward.*add.*',
]
for i in range(len(functions)):
self.assertRegex(captured_function_names[i],
expected_func_name_regex[i])
# Check the forward and backward function has the correct attributes.
self.assertEqual(
functions[1].definition.attr['backward_function_name'].s,
functions[2].name)
self.assertEqual(
functions[2].definition.attr['forward_function_name'].s,
functions[1].name)
self.assertEqual(
functions[4].definition.attr['backward_function_name'].s,
functions[5].name)
self.assertEqual(
functions[5].definition.attr['forward_function_name'].s,
functions[4].name)
sq = defun_matmul(t, t)
double = add(t, t)
self.assertAllEqual(sq.eval().reshape(-1), [7, 10, 15, 22])
self.assertAllEqual(double.eval().reshape(-1), [2, 4, 6, 8])
# Make sure the pre registered function is used, and no other function
# is added.
self.assertLen(graph._functions, 6)
functions = list(graph._functions.values())
for i in range(len(functions)):
self.assertEqual(captured_function_names[i],
functions[i].definition.signature.name)
@parameterized.named_parameters(
dict(testcase_name='Defun',
function_decorator=function.defun),
dict(testcase_name='DefFunction',
function_decorator=def_function.function))
def testRegisterConcreteFunction(self, function_decorator):
@function_decorator
def py_add(x, y):
return math_ops.add(x, y)
py_add(array_ops.ones([]), array_ops.ones([]))
add = py_add.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.float32),
tensor_spec.TensorSpec(None, dtypes.float32))
@function_decorator
def py_composite(x, y):
return x, add(x, y)
py_composite(array_ops.ones([]), array_ops.ones([]))
composite = py_composite.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.float32),
tensor_spec.TensorSpec(None, dtypes.float32))
with context.graph_mode(), self.cached_session():
with ops.get_default_graph().as_default():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
composite.add_to_graph()
composite.add_gradient_functions_to_graph()
graph = ops.get_default_graph()
# pylint: disable=protected-access
self.assertLen(graph._functions, 6)
# two sets of functions, each of them are (inference, forward, backward)
functions = list(graph._functions.values())
captured_function_names = [
f.definition.signature.name for f in functions
]
expected_func_name_regex = [
'.*inference.*py_composite.*',
'.*inference.*py_add.*',
'.*forward.*py_composite.*',
'.*forward.*py_add.*',
'.*inference.*backward.*py_composite.*',
'.*inference.*backward.*py_add.*',
]
for expected, found in zip(
expected_func_name_regex,
captured_function_names):
self.assertRegex(found, expected)
composite_t, composite_double = composite(t, t)
double = add(t, t)
self.assertAllEqual([[2, 4], [6, 8]], self.evaluate(double))
self.assertAllEqual([[2, 4], [6, 8]], self.evaluate(composite_double))
self.assertAllEqual([[1, 2], [3, 4]], self.evaluate(composite_t))
# Make sure the pre registered function is used, and no other function
# is added.
self.assertLen(graph._functions, 6)
@parameterized.named_parameters(
dict(testcase_name='Defun',
function_decorator=function.defun),
dict(testcase_name='DefFunction',
function_decorator=def_function.function))
def testEagerCaptures(self, function_decorator):
with context.eager_mode():
large_tensor = array_ops.ones(shape=(256,))
self.assertGreater(256, func_graph._EAGER_CONST_THRESHOLD)
small_tensor = array_ops.ones(shape=(4,))
self.assertLessEqual(4, func_graph._EAGER_CONST_THRESHOLD)
v = resource_variable_ops.ResourceVariable(0.0)
for captured, op_type in [(large_tensor, 'Placeholder'),
(small_tensor, 'Const'), (v, 'Placeholder')]:
@function_decorator
def test_fn():
return captured + 1 # pylint: disable=cell-var-from-loop
g = test_fn.get_concrete_function().graph
internal_captures = g.internal_captures
self.assertLen(internal_captures, 1)
self.assertEqual(internal_captures[0].op.type, op_type)
def testRegisterFunctionWithInputSignature(self):
def matmul(x, y):
return math_ops.matmul(x, y)
defun_matmul = function.defun(
matmul,
input_signature=[
tensor_spec.TensorSpec(shape=(2, 2), dtype=dtypes.float32),
tensor_spec.TensorSpec(shape=(2, 2), dtype=dtypes.float32)
])
with context.graph_mode(), self.cached_session():
with ops.get_default_graph().as_default():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
function.register(defun_matmul, t, t)
graph = ops.get_default_graph()
# pylint: disable=protected-access
self.assertLen(graph._functions, 3)
# Test register function with cache, note inputs are ignored.
function.register(defun_matmul)
graph = ops.get_default_graph()
self.assertLen(graph._functions, 3)
def testRegisterFunctionWithCache(self):
def matmul(x, y):
return math_ops.matmul(x, y)
defun_matmul = function.defun(matmul)
with context.graph_mode(), self.cached_session():
with ops.get_default_graph().as_default():
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
t2 = constant_op.constant([[2.0, 3.0], [4.0, 5.0]])
function.register(defun_matmul, t, t)
function.register(defun_matmul, t2, t2)
graph = ops.get_default_graph()
# Only one function is registered since the input param are in same type
# pylint: disable=protected-access
self.assertLen(graph._functions, 3)
def testCallingFunctionWithDifferentVariables(self):
@function.defun
def foo(v):
v.assign_add(1.0)
return v.read_value()
v = resource_variable_ops.ResourceVariable(0.0)
graph_function = foo.get_concrete_function(v)
self.assertLen(graph_function.inputs, 1)
self.assertEmpty(graph_function.captured_inputs)
self.assertEqual(float(graph_function(v)), 1.0)
self.assertEqual(float(graph_function(v)), 2.0)
w = resource_variable_ops.ResourceVariable(0.0)
@function.defun
def bar(v):
del v
return constant_op.constant(1.0)
graph_function = bar.get_concrete_function(v)
self.assertEqual(float(graph_function(v)), 1.0)
self.assertEqual(float(graph_function(w)), 1.0)
def testCallingFunctionWithNonTensorsFails(self):
@function.defun
def foo(x):
return x
graph_function = foo.get_concrete_function(constant_op.constant(1.0))
with self.assertRaises((TypeError, ValueError)):
graph_function('Not a Tensor.')
def testSwapImplementationWithGrapplerPlugin(self):
# Set the min_graph_nodes to -1 since the graph in this test is too small,
# and will be ignored by grappler if don't set this.
rewrites = rewriter_config_pb2.RewriterConfig()
rewrites.implementation_selector = rewriter_config_pb2.RewriterConfig.ON
rewrites.min_graph_nodes = -1
graph_options = config_pb2.GraphOptions(
rewrite_options=rewrites, build_cost_model=1)
config_proto = config_pb2.ConfigProto(graph_options=graph_options)
with context.graph_mode(), self.cached_session(
config=config_proto, graph=ops.Graph(), use_gpu=True):
@function.defun_with_attributes(
attributes={
'api_implements': 'random_boost',
'api_preferred_device': 'CPU'
})
def cpu_boost(x):
return math_ops.add(x, 2.0)
@function.defun_with_attributes(
attributes={
'api_implements': 'random_boost',
'api_preferred_device': 'GPU'
})
def gpu_boost(x):
return math_ops.add(x, 4.0)
x = constant_op.constant(1.0)
function.register(cpu_boost, x)
y = gpu_boost(x)
y_value = self.evaluate(y)
if test.is_gpu_available():
self.assertEqual(y_value, 5.0)
else:
# Grappler fallback to use the CPU impl even called with GPU function.
self.assertEqual(y_value, 3.0)
def testSwapImplementationInEager(self):
if not context.executing_eagerly():
self.skipTest('eager only')
# testSharedRendezvous sets the disable_meta_optimizer flag to True
# if that subtest runs before this one, then having that set to True
# will cause this subtest to fail. To avoid that scenario, explicitly
# set the disable_meta_optimizer flag to false here
context.context().set_optimizer_experimental_options({
'min_graph_nodes': -1,
'implementation_selector': True,
'disable_meta_optimizer': False
})
@function.defun_with_attributes(
attributes={'api_implements': 'foo',
'api_preferred_device': 'CPU'})
def on_cpu(x):
return x + 2
@function.defun_with_attributes(
attributes={'api_implements': 'foo',
'api_preferred_device': 'GPU'})
def on_gpu(x):
return x + 4
@function.defun
def run_on_cpu(t):
function.register(on_cpu, t)
with ops.device('CPU:0'):
return on_gpu(t)
# Expect to run the on_cpu branch, regardless whether gpu is available.
self.assertEqual(run_on_cpu(constant_op.constant(1)).numpy(), 3)
def testDefunFunctionSeparateGraphs(self):
with context.graph_mode():
@function.defun
def add(x):
return x + 5
@function.defun
def maybe_add(x, should_add):
if should_add:
return add(x)
else:
return x
with ops.Graph().as_default():
x = constant_op.constant(11)
maybe_add(x, True)
self.assertLen(total_function_cache(maybe_add), 1)
self.assertLen(total_function_cache(add), 1)
maybe_add(x, False)
self.assertLen(total_function_cache(maybe_add), 2)
self.assertLen(total_function_cache(add), 1)
with ops.Graph().as_default():
x = constant_op.constant(11)
maybe_add(x, True)
self.assertLen(total_function_cache(maybe_add), 3)
self.assertLen(total_function_cache(add), 2)
def testCacheKeyOverlappingShapes(self):
@function.defun
def defined(t):
return t
defined(array_ops.zeros([12, 1]))
self.assertLen(total_function_cache(defined), 1)
defined(array_ops.zeros([1, 21]))
self.assertLen(total_function_cache(defined), 2)
def testCacheKeyNestedLists(self):
@function.defun
def defined(l):
return l
a = constant_op.constant(1.)
b = constant_op.constant(2.)
c = constant_op.constant(3.)
defined([[a], b, c])
self.assertLen(total_function_cache(defined), 1)
defined([[a, b], c])
self.assertLen(total_function_cache(defined), 2)
def testCacheKeyAttrsClass(self):
if attr is None:
self.skipTest('attr module is unavailable.')
@attr.s
class TestClass(object):
a = attr.ib()
b = attr.ib()
@function.defun
def defined(l):
return l
defined(
TestClass(
constant_op.constant(1.),
[constant_op.constant(2.),
constant_op.constant(3.)]))
self.assertLen(total_function_cache(defined), 1)
defined(
TestClass(
constant_op.constant(1.),
[constant_op.constant(2.),
constant_op.constant(3.)]))
self.assertLen(total_function_cache(defined), 1)
defined(
TestClass([constant_op.constant(1.),
constant_op.constant(2.)], constant_op.constant(3.)))
self.assertLen(total_function_cache(defined), 2)
def testCacheKeyVariables(self):
@function.defun
def defined(a, b, c):
return a + b + c
x = resource_variable_ops.ResourceVariable(0.0)
y = resource_variable_ops.ResourceVariable(0.0)
z = resource_variable_ops.ResourceVariable(0.0)
# If tensor equality is not enabled, we always get a cache miss if the
# function is called with different variables. With equality enabled we
# should only get a miss if the aliasing changed.
defined(x, y, z)
self.assertLen(total_function_cache(defined), 1)
defined(x, y, z)
self.assertLen(total_function_cache(defined), 1)
# Re-arranging arguments causes cache miss
defined(z, y, x)
self.assertLen(total_function_cache(defined), 2)
defined(z, y, x)
self.assertLen(total_function_cache(defined), 2)
# Aliasing causes cache miss
defined(x, x, z)
self.assertLen(total_function_cache(defined), 3)
defined(x, x, z)
self.assertLen(total_function_cache(defined), 3)
# Re-arranging arguments causes cache miss
defined(y, y, z)
self.assertLen(total_function_cache(defined), 4)
defined(y, y, z)
self.assertLen(total_function_cache(defined), 4)
# Different alias positions causes cache miss
defined(z, y, y)
self.assertLen(total_function_cache(defined), 5)
defined(z, y, y)
self.assertLen(total_function_cache(defined), 5)
x_copy = copy.deepcopy(x)
# Deep copy causes cache miss
defined(x_copy, y, z)
self.assertLen(total_function_cache(defined), 6)
defined(x_copy, y, z)
self.assertLen(total_function_cache(defined), 6)
def testVariableRetracing(self):
v1 = variables.Variable(1.)
v2 = variables.Variable(1.)
v3 = copy.deepcopy(variables.Variable(1.))
var_dict = {id(v1): constant_op.constant(1),
id(v2): constant_op.constant(2),
id(v3): constant_op.constant(3)}
@function.defun
def lookup_tensor(v):
return var_dict[id(v)]
self.assertEqual(1, lookup_tensor(v1).numpy())
self.assertEqual(2, lookup_tensor(v2).numpy())
self.assertEqual(3, lookup_tensor(v3).numpy())
def testDecoratedMethodInspect(self):
class DefunnedMiniModel(object):
@function.defun
def call(self, inputs, training=True):
pass
m = DefunnedMiniModel()
fullargspec = tf_inspect.getfullargspec(m.call)
self.assertIn('training', fullargspec.args)
def testFunctionModifiesInputList(self):
# Tests on `list` methods that do in place modification, except `list.sort`
# since it cannot even be "defunned" in the first place
def get_list():
return [constant_op.constant(0.), constant_op.constant(1.)]
expected_msg = '.*() should not modify'
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def append(l):
l.append(constant_op.constant(0.))
append(get_list())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def extend(l):
l.extend([constant_op.constant(0.)])
extend(get_list())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def insert(l):
l.insert(0, constant_op.constant(0.))
insert(get_list())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def pop(l):
l.pop()
pop(get_list())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def reverse(l):
l.reverse()
reverse(get_list())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def remove(l):
l.remove(l[0])
remove(get_list())
# `list.clear` is a method that is in Py3 but not Py2
if sys.version.startswith('3'):
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def clear(l):
l.clear()
clear(get_list())
# One last test for keyword arguments
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def kwdappend(**kwargs):
l = kwargs['l']
l.append(constant_op.constant(0.))
kwdappend(l=get_list())
def testFunctionModifiesInputDict(self):
def get_dict():
return {'t1': constant_op.constant(0.), 't2': constant_op.constant(1.)}
expected_msg = '.* should not modify'
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def clear(m):
m.clear()
clear(get_dict())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def pop(m):
m.pop('t1')
pop(get_dict())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def popitem(m):
m.popitem()
popitem(get_dict())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def update(m):
m.update({'t1': constant_op.constant(3.)})
update(get_dict())
with self.assertRaisesRegex(ValueError, expected_msg):
@def_function.function
def setdefault(m):
m.setdefault('t3', constant_op.constant(3.))
setdefault(get_dict())
def testFunctionModifiesInputNest(self):
with self.assertRaisesRegex(ValueError, 'modify.* should not modify'):
@def_function.function
def modify(n):
n[0]['t1'].append(constant_op.constant(1.))
nested_input = [{
't1': [constant_op.constant(0.),
constant_op.constant(1.)],
},
constant_op.constant(2.)]
modify(nested_input)
with self.assertRaisesRegex(ValueError,
'modify_same_flat.* should not modify'):
# The flat list doesn't change whereas the true structure changes
@def_function.function
def modify_same_flat(n):
n[0].append(n[1].pop(0))
nested_input = [[constant_op.constant(0.)],
[constant_op.constant(1.),
constant_op.constant(2.)]]
modify_same_flat(nested_input)
def testExecutorType(self):
@function.defun
def add_five(x):
return x + 5
self.assertEqual(
5,
add_five(constant_op.constant(0, dtype=dtypes.int32)).numpy())
with self.assertRaisesRegex(errors.NotFoundError, 'NON_EXISTENT_EXECUTOR'):
with context.function_executor_type('NON_EXISTENT_EXECUTOR'):
add_five(constant_op.constant(0, dtype=dtypes.int32))
for executor_type in ('', 'DEFAULT', None):
with context.function_executor_type(executor_type):
self.assertAllEqual(
5,
add_five(constant_op.constant(0, dtype=dtypes.int32)).numpy())
@test_util.assert_no_garbage_created
def testReferenceCycles(self):
fn = function.defun(lambda x: 2. * x)
fn(constant_op.constant(4.0))
weak_fn = weakref.ref(fn)
del fn
# Tests that the weak reference we made to the function is now dead, which
# means the object has been deleted. This should be true as long as the
# function itself is not involved in a reference cycle.
self.assertIs(None, weak_fn())
def testFunctionStackInErrorMessage(self):
if context.executing_eagerly():
# TODO(b/122736651): Remove this skipTest once fixed.
self.skipTest('Error interpolation is not working when function is '
'invoked without PartitionedCallOp.')
@def_function.function()
def fn3(x):
return x + 2
@def_function.function()
def fn2(x):
check_ops.assert_equal(fn3(x), 3)
return 2
@def_function.function()
def fn(x):
return fn2(x)
with self.assertRaises(errors.InvalidArgumentError) as cm:
fn(2)
e = cm.exception
self.assertIn('fn -> fn2', e.message)
self.assertIn('node assert_equal/Assert/Assert (defined at', e.message)
self.assertNotIn('fn3', e.message)
@test_util.run_gpu_only
def testFunctionIsNotPinned(self):
"""Tests that functions aren't pinned to the CPU by the eager runtime."""
seed1, seed2 = 79, 25
shape = constant_op.constant([4, 7])
dtype = dtypes.float32
@def_function.function
def func():
with ops.device('GPU:0'):
return gen_random_ops.random_standard_normal(
shape, dtype=dtype, seed=seed1, seed2=seed2)
with ops.device('GPU:0'):
x = func()
self.assertRegex(x.device, 'GPU')
@test_util.run_in_graph_and_eager_modes
def testShapeCaching(self):
@function.defun
def func(x):
return array_ops.shape(x)
@function.defun(
input_signature=[tensor_spec.TensorSpec([None, None], dtypes.float32)])
def calls_func(x):
return func(x)
self.assertAllEqual([1, 1], self.evaluate(func(array_ops.zeros([1, 1]))))
self.assertAllEqual([2, 2], self.evaluate(func(array_ops.zeros([2, 2]))))
self.assertAllEqual(
[3, 3],
self.evaluate(calls_func(array_ops.zeros([3, 3]))))
def testLimitedRetracing(self):
trace_count = [0]
@function.defun
def func(x):
trace_count[0] += 1
return x
for _ in range(50):
func(constant_op.constant(3.))
func(constant_op.constant(4.))
func(constant_op.constant([[1., 2.]]))
func(constant_op.constant([[]]))
func(constant_op.constant([[3., 4.], [5., 6.]]))
func(constant_op.constant([[3., 4.], [5., 6.], [7., 8.]]))
# Tracing more than twice per input doesn't make sense.
self.assertLess(trace_count[0], 13)
def testLimitedRetracingWithCompositeTensors(self):
trace_count = [0]
@def_function.function
def f(x):
trace_count[0] += 1
return x
for i in range(10):
f(ragged_factory_ops.constant([[1, 2], [i]]))
f(ragged_factory_ops.constant([[1, 2], [], [3, 4, 5]]))
f(ragged_factory_ops.constant([[[1, 2], [3]], [[4, 5, 6]]]))
self.assertEqual(trace_count[0], 3)
def test_concrete_function_shape_mismatch(self):
@def_function.function
def f(argument_name):
return argument_name + 1.
f_concrete = f.get_concrete_function(constant_op.constant([1.]))
# Calling a function from eager doesn't do any shape checking above what
# kernels do while executing.
self.assertAllEqual(
[2., 3.],
f_concrete(constant_op.constant([1., 2.])).numpy())
@def_function.function
def g():
f_concrete(constant_op.constant([1., 2.]))
with self.assertRaisesRegex(ValueError, 'argument_name'):
g()
@test_util.run_in_graph_and_eager_modes
def test_shape_inference_with_symbolic_shapes(self):
@def_function.function
def _uses_symbolic_shapes(w, x, y):
x = array_ops.identity(x, name='name_collision')
x = array_ops.transpose(x, [1, 0, 2])
x_batch = array_ops.shape(x)[0]
y_batch = array_ops.shape(y)[0]
y *= w
n = y_batch // x_batch
return array_ops.reshape(y, [n, x_batch, -1])
conc = _uses_symbolic_shapes.get_concrete_function(
tensor_spec.TensorSpec(None, dtypes.float32),
tensor_spec.TensorSpec(None, dtypes.float32),
tensor_spec.TensorSpec(None, dtypes.float32))
@def_function.function
def _call_concrete():
c = constant_op.constant(1.)
array_ops.identity(c, name='name_collision')
output1 = conc(array_ops.ones([2]),
array_ops.ones([5, 4, 2]),
array_ops.ones([20, 2]))
self.assertEqual([5, 4, 2], output1.shape)
output2 = conc(array_ops.ones([3]),
array_ops.ones([5, 4, 3]),
array_ops.ones([40, 3]))
self.assertEqual([10, 4, 3], output2.shape)
return output1, output2
output1, output2 = _call_concrete()
self.assertEqual((5, 4, 2), self.evaluate(output1).shape)
self.assertEqual((10, 4, 3), self.evaluate(output2).shape)
def testAutoGraphContext(self):
@def_function.function
def test_fn():
self.assertEqual(
ag_ctx.control_status_ctx().status, ag_ctx.Status.ENABLED)
prev_status = ag_ctx.control_status_ctx().status
test_fn()
self.assertEqual(ag_ctx.control_status_ctx().status, prev_status)
def testCancelBeforeFunctionExecution(self):
if not context.executing_eagerly():
self.skipTest('eager only')
q = data_flow_ops.FIFOQueue(1, dtypes.int32)
@def_function.function
def f():
return q.dequeue()
c_mgr = cancellation.CancellationManager()
cancelable_func = c_mgr.get_cancelable_function(f.get_concrete_function())
c_mgr.start_cancel()
with self.assertRaises(errors.CancelledError):
cancelable_func()
# TODO(b/162544929): Enable this test.
def DISABLE_testCancelBlockedFunctionExecution(self):
if not context.executing_eagerly():
self.skipTest('eager only')
q = data_flow_ops.FIFOQueue(1, dtypes.int32)
@def_function.function
def f():
return q.dequeue()
c_mgr = cancellation.CancellationManager()
cancelable_func = c_mgr.get_cancelable_function(f.get_concrete_function())
def cancel_thread():
time.sleep(0.5)
c_mgr.start_cancel()
t = self.checkedThread(cancel_thread)
t.start()
with self.assertRaises(errors.CancelledError):
cancelable_func()
t.join()
def testCancelAfterFunctionExecution(self):
if not context.executing_eagerly():
self.skipTest('eager only')
q = data_flow_ops.FIFOQueue(1, dtypes.int32)
q.enqueue(37)
@def_function.function
def f():
return q.dequeue()
c_mgr = cancellation.CancellationManager()
cancelable_func = c_mgr.get_cancelable_function(f.get_concrete_function())
self.assertAllEqual(37, cancelable_func().numpy())
# Cancellation after the function executes is a no-op.
c_mgr.start_cancel()
def testAddFunctionCallback(self):
functions = []
def function_callback(f):
functions.append(f)
@def_function.function
def plus_one(x):
return x + 1
try:
function.add_function_callback(function_callback)
x_float32 = numpy.array(3.0, dtype=numpy.float32)
self.assertAllClose(plus_one(x_float32), 4.0)
self.assertLen(functions, 1)
# Function is already created. Executing it again should not invoke the
# function callback.
self.assertAllClose(plus_one(x_float32), 4.0)
self.assertLen(functions, 1)
# Signature change leads to a new Function being built.
x_float64 = numpy.array(3.0, dtype=numpy.float64)
self.assertAllClose(plus_one(x_float64), 4.0)
self.assertLen(functions, 2)
finally:
function.clear_function_callbacks()
def testRemoveFunctionCallback(self):
functions_1 = []
def function_callback_1(f):
functions_1.append(f)
functions_2 = []
def function_callback_2(f):
functions_2.append(f)
@def_function.function
def plus_one(x):
return x + 1
try:
function.add_function_callback(function_callback_1)
function.add_function_callback(function_callback_2)
self.assertAllClose(plus_one(numpy.array(3.0, dtype=numpy.float32)), 4.0)
self.assertLen(functions_1, 1)
self.assertLen(functions_2, 1)
function.remove_function_callback(function_callback_1)
# The 1st callback should not be invokved after remove_function_callback()
# is called.
self.assertAllClose(plus_one(numpy.array(3.0, dtype=numpy.float64)), 4.0)
self.assertLen(functions_1, 1)
self.assertLen(functions_2, 2)
finally:
function.clear_function_callbacks()
def testClearFunctionCallbacks(self):
function.add_function_callback(lambda f: None)
function.add_function_callback(lambda f: None)
self.assertLen(function._function_callbacks, 2)
function.clear_function_callbacks()
self.assertEmpty(function._function_callbacks) # pylint:disable=protected-access
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionWithNestedTensorInputs(self):
@def_function.function
def f(x, y):
return (x['a'] + x['b'], y[0] + y[1])
a = constant_op.constant(1000)
b = constant_op.constant(200)
c = constant_op.constant(30)
d = {'a': a, 'b': b}
e = (c, 4)
# Test different argument signatures when constructing the concrete func.
for cf in [
f.get_concrete_function(d, e),
f.get_concrete_function(d, y=e),
f.get_concrete_function(y=e, x=d),
f.get_concrete_function(_spec_for_value(d), _spec_for_value(e)),
f.get_concrete_function(_spec_for_value(d), y=_spec_for_value(e)),
f.get_concrete_function(y=_spec_for_value(e), x=_spec_for_value(d))
]:
# Test different calling conventions when calling the concrete func.
for output in [
cf(d, e), # structured signature
cf(d, y=e), # structured signature w/ kwarg
cf(y=e, x=d), # structured signature w/ 2 kwargs
cf(a, b, c), # flat signature
cf(x=a, x_1=b, y=c) # flat signature w/ kwargs
]:
self.assertIsInstance(output, tuple)
self.assertLen(output, 2)
self.assertAllEqual(output[0], 1200)
self.assertAllEqual(output[1], 34)
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionWithNestedNonTensorInputs(self):
@def_function.function
def f(x, y):
return (x['a'] + x['b'], y[0] + y[1])
a = {'a': constant_op.constant(1000), 'b': constant_op.constant(200)}
b = (50, 3)
for cf in [ # argument y is bound to non-Tensor value (50, 3).
f.get_concrete_function(a, b),
f.get_concrete_function(a, y=b),
f.get_concrete_function(x=a, y=b)
]:
for output in [cf(a), cf(x=a), cf(a, b), cf(x=a, y=b)]:
self.assertAllEqual(output[0] + output[1], 1253)
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionWithBoundNestedNonTensorInputs(self):
@def_function.function
def f(x, y):
return (x['a'] + x['b'], y[0] + y[1])
a = {'a': 3000, 'b': 200, 'c': 9000}
b = (constant_op.constant(30), 4)
for cf in [ # argument x is bound to non-tensor value `a`
f.get_concrete_function(a, b),
f.get_concrete_function(a, y=b),
f.get_concrete_function(x=a, y=b)
]:
for output in [cf(a, b), cf(a, y=b), cf(y=b), cf(x=a, y=b)]:
self.assertAllEqual(output[0] + output[1], 3234)
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionWithAllBoundNestedNonTensorInputs(self):
@def_function.function
def f(x, y):
return (x['a'] + x['b'], y[0] + y[1])
a = {'a': 5000, 'b': 500}
b = (50, 5)
cf = f.get_concrete_function(a, b)
for output in [cf(), cf(a), cf(y=b)]:
self.assertAllEqual(output[0] + output[1], 5555)
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionMethodWithVarargs(self):
float32_scalar = tensor_spec.TensorSpec(shape=(), dtype=dtypes.float32)
class MyModel(module.Module):
@def_function.function(input_signature=[float32_scalar, float32_scalar])
def add(self, *arg):
return math_ops.add(*arg)
m = MyModel()
cf = m.add.get_concrete_function()
cf(-12.0, 3.0)
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionStructuredSignatureKeywordOrder(self):
# Check that keyword-only arguments are sorted appropriately, so that they
# feed the right tensor into each input.
@def_function.function
def g(**kwargs):
return string_ops.reduce_join(
string_ops.reduce_join(
ops.convert_to_tensor(sorted(kwargs.items())),
axis=1,
separator='='),
axis=0,
separator=', ')
s = constant_op.constant('s')
g.get_concrete_function(q=s, a=s, p=s, r=s, v=s, m=s, l=s)
self.assertAllEqual(
g(m='a', r='b', v='c', q='d', l='e', a='f', p='g'),
b'a=f, l=e, m=a, p=g, q=d, r=b, v=c')
self.assertAllEqual(
g(q='d', a='f', p='g', r='b', v='c', m='a', l='e'),
b'a=f, l=e, m=a, p=g, q=d, r=b, v=c')
self.assertAllEqual(
g(a='f', l='e', m='a', p='g', q='d', r='b', v='c'),
b'a=f, l=e, m=a, p=g, q=d, r=b, v=c')
# pylint: disable=g-long-lambda
@parameterized.named_parameters([
dict(
testcase_name='MissingArg',
conc_args=lambda: (1, constant_op.constant(2)),
call_args=lambda: (1,),
error=r'func\(x, y\) missing required arguments: y'),
dict(
testcase_name='MissingVararg',
conc_args=lambda: (1, 2, constant_op.constant(1.0)),
call_args=lambda: (1, 2),
error=r'func\(x, y, <arg3>\) missing required arguments: <arg3>'),
dict(
testcase_name='ExtraPositionalArg',
conc_args=lambda: (1, 2),
call_args=lambda: (1, 2, 3),
error=r'func\(x, y\) takes 2 positional arguments but 3 were given'),
dict(
testcase_name='MissingKeywordOnlyArg',
conc_args=lambda: (1, 2),
conc_kwargs=lambda: {'c': constant_op.constant(1.0)},
call_args=lambda: (1, 2),
error=r'func\(x, y, \*, c\) missing required arguments: c'),
dict(
testcase_name='ExtraKeywordArg',
conc_args=lambda: (1, 2),
call_args=lambda: (1, 2),
call_kwargs=lambda: {'c': constant_op.constant(1.0)},
error=r'func\(x, y\) got unexpected keyword arguments: c'),
dict(
testcase_name='ExpectedRaggedGotNest',
conc_args=lambda: (ragged_factory_ops.constant([[1, 2], [3]]),),
call_args=lambda: ({
'a': constant_op.constant([1, 2, 3])
},),
error=r'func\(x, y\): argument x had incorrect type\n'
r' expected: RaggedTensor\n'
r" got: {'a': (Eager)?Tensor}"),
dict(
testcase_name='WrongRaggedRank',
conc_args=lambda: (ragged_factory_ops.constant([[1, 2], [3]]),),
call_args=lambda: (ragged_factory_ops.constant([[[1]]]),),
error=r'func\(x, y\): argument x had incorrect type\n'),
dict(
testcase_name='WrongRaggedDType',
conc_args=lambda: (ragged_factory_ops.constant([[1]]),),
call_args=lambda: (ragged_factory_ops.constant([[1.0]]),),
error=r'func\(x, y\): argument x had incorrect type\n'),
dict(
testcase_name='ExpectedDictGotTensor',
conc_args=lambda: ({
'a': constant_op.constant(1),
'b': constant_op.constant(1)
},),
call_args=lambda: (constant_op.constant(1),),
error=r'func\(x, y\): argument x had incorrect type\n'),
dict(
testcase_name='ExpectedTupleGotTensor',
conc_args=lambda:
((constant_op.constant(1), constant_op.constant(2)),),
call_args=lambda: (constant_op.constant(1),),
error=r'func\(x, y\): argument x had incorrect type\n'),
dict(
testcase_name='WrongDType',
conc_args=lambda: (constant_op.constant(1),),
call_args=lambda: (constant_op.constant(1.0),),
exception=(ValueError, errors.InvalidArgumentError,
# on xla_gpu, we get InternalError instead.
errors.InternalError)),
dict(
testcase_name='ExpectedTensorGotInt',
conc_args=lambda: (constant_op.constant(1),),
call_args=lambda: (5,),
error=r'func\(x, y\) expected a Tensor in x, but got int value 5'),
dict(
testcase_name='ExpectedIntGotDifferentInt',
conc_args=lambda: (5,),
call_args=lambda: (8,),
error=r'ConcreteFunction func\(x, y\) was constructed with int '
r'value 5 in x, but was called with int value 8'),
dict(
testcase_name='ExpectedIntGotTensor',
conc_args=lambda: (5,),
call_args=lambda: (constant_op.constant(6),),
error=r'ConcreteFunction func\(x, y\) was constructed with int '
'value 5 in x, but was called with (Eager)?Tensor value .*'),
dict(
testcase_name='TwoValuesForArgument',
conc_args=lambda: (1, 2),
call_args=lambda: (1, 2),
call_kwargs=lambda: {'x': 3},
error=r"func\(x, y\) got two values for argument 'x'"),
])
# pylint: enable=g-long-lambda
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionStructuredSignatureError(self,
conc_args=(),
conc_kwargs=None,
call_args=(),
call_kwargs=None,
error='.*',
exception=TypeError):
"""Tests for errors in the structrued signature.
Args:
conc_args: Positional arguments used for get_concrete_function.
conc_kwargs: Keyword arguments used for get_concrete_function.
call_args: Positional arguments used to call the function.
call_kwargs: Keyword arguments used to call the function.
error: Expected exception message.
exception: Expected exception type.
"""
conc_args = conc_args() if callable(conc_args) else conc_args
conc_kwargs = conc_kwargs() if callable(conc_kwargs) else conc_kwargs or {}
call_args = call_args() if callable(call_args) else call_args
call_kwargs = call_kwargs() if callable(call_kwargs) else call_kwargs or {}
self.assertIsInstance(conc_args, tuple)
self.assertIsInstance(call_args, tuple)
self.assertIsInstance(conc_kwargs, dict)
self.assertIsInstance(call_kwargs, dict)
@def_function.function
def func(x, y=5, *varargs, **kwargs): # pylint: disable=keyword-arg-before-vararg
del y, varargs, kwargs
return x
conc = func.get_concrete_function(*conc_args, **conc_kwargs)
with self.assertRaisesRegex(exception, error):
self.evaluate(conc(*call_args, **call_kwargs))
# pylint: disable=g-long-lambda
@parameterized.named_parameters([
dict(
testcase_name='MissingArg',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
call_args=lambda: (constant_op.constant(1),),
error=r'func\(x, y\) missing required arguments: y'),
dict(
testcase_name='TwoValuesForArg',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
call_args=lambda: (constant_op.constant(1),),
call_kwargs=lambda: {
'x': constant_op.constant(1),
'y': constant_op.constant(1)
},
error=r"func\(x, y\) got two values for argument 'x'"),
dict(
testcase_name='ExtraPositionalArg',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
call_args=lambda: (constant_op.constant(1), constant_op.constant(2),
constant_op.constant(3)),
error=r'func\(x, y\) takes 2 positional arguments but 3 were given'),
dict(
testcase_name='UnexpectedKeywordArg',
conc_args=lambda: (constant_op.constant(1),),
call_args=lambda: (constant_op.constant(1),),
call_kwargs=lambda: {'c': constant_op.constant(1)},
error=r'func\(x\) got unexpected keyword arguments: c'),
dict(
testcase_name='MissingVararg',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2),
constant_op.constant(3)),
call_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
error=r'func\(x, y, varargs_0\) missing required '
r'arguments: varargs_0'),
dict(
testcase_name='MissingKeywordArg',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
conc_kwargs=lambda: {'c': constant_op.constant(1)},
call_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
error=r'func\(x, y, c\) missing required arguments: c'),
dict(
testcase_name='ExpectedTensorGotInt',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
call_args=lambda: (5, constant_op.constant(2)),
error=r'func\(x, y\): expected argument #0\(zero-based\) to be '
r'a Tensor; got int \(5\)'),
dict(
testcase_name='WrongDType',
conc_args=lambda: (constant_op.constant(1),),
call_args=lambda: (constant_op.constant(1.0),),
exception=(ValueError, errors.InvalidArgumentError,
# on xla_gpu, we get InternalError instead.
errors.InternalError)),
dict(
testcase_name='MissingKeywordArgNestPiece',
conc_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
conc_kwargs=lambda: {'c': ragged_factory_ops.constant([[1]])},
call_args=lambda: (constant_op.constant(1), constant_op.constant(2)),
call_kwargs=lambda: {'c': constant_op.constant(1)},
error=r'func\(x, y, c, c_1\) missing required arguments: c_1'),
])
# pylint: enable=g-long-lambda
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionFlatSignatureError(self,
conc_args=(),
conc_kwargs=None,
call_args=(),
call_kwargs=None,
error='.*',
exception=TypeError):
"""Tests for errors in the flat signature.
Args:
conc_args: Positional arguments used for get_concrete_function.
conc_kwargs: Keyword arguments used for get_concrete_function.
call_args: Positional arguments used to call the function.
call_kwargs: Keyword arguments used to call the function.
error: Expected exception message.
exception: Expected exception type.
"""
conc_args = conc_args() if callable(conc_args) else conc_args
conc_kwargs = conc_kwargs() if callable(conc_kwargs) else conc_kwargs or {}
call_args = call_args() if callable(call_args) else call_args
call_kwargs = call_kwargs() if callable(call_kwargs) else call_kwargs or {}
self.assertIsInstance(conc_args, tuple)
self.assertIsInstance(call_args, tuple)
self.assertIsInstance(conc_kwargs, dict)
self.assertIsInstance(call_kwargs, dict)
@def_function.function
def func(x, y=5, *varargs, **kwargs): # pylint: disable=keyword-arg-before-vararg
del y, varargs, kwargs
return x
conc = func.get_concrete_function(*conc_args, **conc_kwargs)
# Remove _function_spec, to disable the structured signature.
conc._set_function_spec(None) # pylint: disable=protected-access
with self.assertRaisesRegex(exception, error):
self.evaluate(conc(*call_args, **call_kwargs))
@test_util.run_in_graph_and_eager_modes
def testConcreteFunctionAmbiguousSignature(self):
# When both the flat & structured signatures are applicable, but they
# give different results, we use the structured signature. Note: we expect
# this to be extremely rare.
@def_function.function
def f(x, y):
return x * 10 + y
conc = f.get_concrete_function(
x=tensor_spec.TensorSpec(None, dtypes.int32, name='y'),
y=tensor_spec.TensorSpec(None, dtypes.int32, name='x'))
result = conc(x=constant_op.constant(5), y=constant_op.constant(6))
self.assertAllEqual(result, 56)
def testPrettyPrintedSignature(self):
@def_function.function
def func(x, kangaroo=None, octopus=7):
del octopus, kangaroo
return x
scalar = constant_op.constant(5)
vector = constant_op.constant([10, 10, 20])
ragged = ragged_factory_ops.constant([[10, 20], [40]])
c1 = func.get_concrete_function(scalar, vector)
c1_summary = r'func\(x, kangaroo, octopus=7\)'
c1_details = (r' Args:\n'
r' x: int32 Tensor, shape=\(\)\n'
r' kangaroo: int32 Tensor, shape=\(3,\)\n'
r' Returns:\n'
r' int32 Tensor, shape=\(\)')
self.assertRegex(c1.pretty_printed_signature(verbose=False), c1_summary)
self.assertRegex(
c1.pretty_printed_signature(verbose=True),
c1_summary + '\n' + c1_details)
self.assertRegex(
repr(c1), r'<ConcreteFunction func\(x, kangaroo, octopus=7\) at .*>')
self.assertRegex(
str(c1), 'ConcreteFunction {}\n{}'.format(c1_summary, c1_details))
c2 = func.get_concrete_function(scalar, ragged, 3)
c2_summary = r'func\(x, kangaroo, octopus=3\)'
c2_details = (r' Args:\n'
r' x: int32 Tensor, shape=\(\)\n'
r' kangaroo: RaggedTensorSpec\(.*\)\n'
r' Returns:\n'
r' int32 Tensor, shape=\(\)')
self.assertRegex(c2.pretty_printed_signature(),
c2_summary + '\n' + c2_details)
c3 = func.get_concrete_function({'a': scalar, 'b': [ragged, ragged]})
c3_summary = r'func\(x, kangaroo=None, octopus=7\)'
c3_details = (r' Args:\n'
r" x: {'a': <1>, 'b': \[<2>, <3>\]}\n"
r' <1>: int32 Tensor, shape=\(\)\n'
r' <2>: RaggedTensorSpec\(.*\)\n'
r' <3>: RaggedTensorSpec\(.*\)\n'
r' Returns:\n'
r" {'a': <1>, 'b': \[<2>, <3>\]}\n"
r' <1>: int32 Tensor, shape=\(\)\n'
r' <2>: RaggedTensorSpec\(.*\)\n'
r' <3>: RaggedTensorSpec\(.*\)')
# python 3.5 does not gurantee deterministic iteration of dict contents
# which can lead mismatch on pretty_printed_signature output for "Args"
if sys.version_info >= (3, 6):
self.assertRegex(c3.pretty_printed_signature(),
c3_summary + '\n' + c3_details)
# pylint: disable=keyword-arg-before-vararg
@def_function.function
def func2(x, y=3, *args, **kwargs):
return (x, y, args, kwargs)
c4 = func2.get_concrete_function(scalar, 4, 5, a=scalar)
c4_summary = 'func2(x, y=4, <arg3>=5, *, a)'
self.assertEqual(c4.pretty_printed_signature(verbose=False), c4_summary)
c5 = func2.get_concrete_function(8, vector)
c5_summary = 'func2(x=8, y)'
self.assertEqual(c5.pretty_printed_signature(verbose=False), c5_summary)
def testPrettyPrintedExplicitSignatureWithKeywordArg(self): # b/159639913
@def_function.function(input_signature=[tensor_spec.TensorSpec(None)])
def fn(a, b=1):
return a + b
concrete_fn = fn.get_concrete_function()
self.assertEqual(concrete_fn.pretty_printed_signature(False), 'fn(a)')
self.assertEqual(
concrete_fn.pretty_printed_signature(True), 'fn(a)\n'
' Args:\n'
' a: float32 Tensor, shape=<unknown>\n'
' Returns:\n'
' float32 Tensor, shape=<unknown>')
@test_util.run_in_graph_and_eager_modes
def testIndexedSlicesAsGradientsForConcreteFunctions(self):
@def_function.function
def summing_rnn(inputs):
return math_ops.reduce_sum(inputs, axis=1)
@def_function.function
def gradients(inputs):
with backprop.GradientTape() as tape:
tape.watch(inputs)
hidden = summing_rnn(inputs)
hidden = array_ops.gather(hidden, constant_op.constant([0]))
loss = math_ops.reduce_mean(hidden)
return tape.gradient(loss, inputs)
gradients(constant_op.constant([[[1.0], [2.0]]])) # No error is raised
def testFollowTypeHintsTraceBasic(self):
trace_count = [0]
def func(x: ops.Tensor):
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
disabled = def_function.function(func, experimental_follow_type_hints=False)
enabled(1) # Initial call gets traced
enabled(2)
enabled(3)
self.assertEqual(trace_count[0], 1)
trace_count = [0]
disabled(1)
disabled(2) # Retrace
disabled(3) # Retrace
self.assertEqual(trace_count[0], 3)
def testFollowTypeHintsTraceWithArgs(self):
trace_count = [0]
def func(*args: ops.Tensor):
trace_count[0] += 1
return args
enabled = def_function.function(func, experimental_follow_type_hints=True)
disabled = def_function.function(func, experimental_follow_type_hints=False)
args = (
'abc',
'def',
) * 20
args2 = (
'def',
'abc',
) * 20
enabled(args)
enabled(args2)
self.assertEqual(trace_count[0], 1)
trace_count = [0]
disabled(args)
disabled(args2) # Retrace
self.assertEqual(trace_count[0], 2)
def testFollowTypeHintsTraceWithKwargs(self):
trace_count = [0]
def func(t: ops.Tensor, **kwargs: ops.Tensor):
del kwargs
trace_count[0] += 1
return t
enabled = def_function.function(func, experimental_follow_type_hints=True)
disabled = def_function.function(func, experimental_follow_type_hints=False)
enabled(1, x=1, y=1.0, z='one')
enabled(2, x=2, y=2.0, z='two')
self.assertEqual(trace_count[0], 1)
trace_count = [0]
disabled(1, x=1, y=1.0, z='one')
disabled(2, x=2, y=2.0, z='two') # Retrace
self.assertEqual(trace_count[0], 2)
def testFollowTypeHintsTraceWithMultipleInputTypes(self):
trace_count = [0]
def func(t: ops.Tensor, *args: ops.Tensor, **kwargs: ops.Tensor):
del args, kwargs
trace_count[0] += 1
return t
enabled = def_function.function(func, experimental_follow_type_hints=True)
disabled = def_function.function(func, experimental_follow_type_hints=False)
enabled(1, constant_op.constant(1), 'str', x=4.0)
enabled(2, constant_op.constant(2), 'str2', x=5.0)
self.assertEqual(trace_count[0], 1)
trace_count = [0]
disabled(1, constant_op.constant(1), 'str', x=4.0)
disabled(2, constant_op.constant(2), 'str2', x=5.0) # Retrace
self.assertEqual(trace_count[0], 2)
def testFollowTypeHintsTraceWithOnlyArgNamed(self):
trace_count = [0]
def func(t: ops.Tensor, i: int = 1, **kwargs): # pylint: disable=bad-whitespace
del i, kwargs
trace_count[0] += 1
return t
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 3, x=4.0, y='str')
enabled(2, 4, x=4.0, y='str') # Retrace
self.assertEqual(trace_count[0], 2)
def testFollowTypeHintsTraceWithNotAllNamed(self):
trace_count = [0]
def func(x, y: ops.Tensor, z: int):
del y, z
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 2, 3)
enabled(1, 20, 3) # No retrace - change in ops.Tensor typed arg
enabled(2, 2, 3) # Retrace - change in untyped arg
enabled(2, 2, 4) # Retrace - change in typed arg
self.assertEqual(trace_count[0], 3)
def testFollowTypeHintsTraceWithOnlyArgsNamed(self):
trace_count = [0]
def func(x, y, *args: ops.Tensor):
del y, args
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 20, 3, 4, 5, 6)
enabled(1, 20, 3, 4, 5, 60) # No retrace - change in *args
enabled(1, 30, 7, 8, 9, 10) # Retrace - change in args
self.assertEqual(trace_count[0], 2)
def testFollowTypeHintsTraceWithOnlyKwargsNamed(self):
trace_count = [0]
def func(x, y, *args, **kwargs: ops.Tensor):
del y, args, kwargs
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 2, 3, 4, 5, 6, a=1.0, b=2.0, c=3.0)
enabled(
1, 2, 3, 4, 5, 6, a=1.5, b=2.5,
c=3.5) # No retrace - change in **kwargs
enabled(100, 2, 3, 4, 5, 6, a=1.0, b=2.0, c=3.0) # Retrace - change in args
enabled(
1, 2, 3, 4, 5, 100, a=1.0, b=2.0, c=3.0) # Retrace - change in *args
self.assertEqual(trace_count[0], 3)
def testFollowTypeHintsTraceWithArgsEquals(self):
trace_count = [0]
def func(
x: ops.Tensor = 0, # pylint:disable=bad-whitespace
y: int = 1, # pylint:disable=bad-whitespace
**kwargs: ops.Tensor):
del y, kwargs
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(x=1, y=2, z=3)
enabled(x=1, y=3, z=3) # Retrace - change in args
enabled(x=2, y=2, z=4) # No retrace - change in args and **kwargs
enabled(x=2, y=2, z=4, u=5) # Retrace - change in **kwargs
self.assertEqual(trace_count[0], 3)
def testFollowTypeHintsTraceWithArgsEqualsTypedKwargs(self):
trace_count = [0]
def func(x, y, **kwargs: ops.Tensor):
del y, kwargs
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(x=1, y=2, z=3)
enabled(x=1, y=3, z=3) # Retrace
enabled(x=1, y=2, z=4) # No retrace
enabled(x=2, y=2, z=4) # Retrace
enabled(x=2, y=2, z=4, u=5) # Retrace
self.assertEqual(trace_count[0], 4)
def testFollowTypeHintsTraceWithArgsEqualsTypedArgs(self):
trace_count = [0]
def func(x: ops.Tensor, y: int, **kwargs):
del y, kwargs
trace_count[0] += 1
return x
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(x=1, y=2, z=3)
enabled(x=1, y=3, z=3) # Retrace
enabled(x=1, y=2, z=4) # Retrace
enabled(x=2, y=2, z=3) # No retrace
enabled(x=2, y=2, z=4, u=5) # Retrace
self.assertEqual(trace_count[0], 4)
def testFollowTypeHintsTraceWithKwOnlyArgsBasic(self):
trace_count = [0]
def func(*, a: ops.Tensor = None, b=1): # pylint: disable=bad-whitespace
del b
trace_count[0] += 1
return a
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(a=1, b=2)
enabled(a=2, b=2) # No retrace
enabled(a=1, b=1) # Retrace
self.assertEqual(trace_count[0], 2)
def testFollowTypeHintsTraceWithArgsKwOnlyArgsKwargsAndTypedArg(self):
trace_count = [0]
def func(arg: ops.Tensor, *args, kwonly, **kwargs):
del args, kwonly, kwargs
trace_count[0] += 1
return arg
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7)
enabled(100, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7) # No retrace
enabled(1000, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7) # No retrace
enabled(1, 20, 30, 40, kwonly=5, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 2, 3, 4, kwonly=50, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 2, 3, 4, kwonly=5, kwarg1=60, kwarg2=70) # Retrace
self.assertEqual(trace_count[0], 4)
def testFollowTypeHintsTraceWithArgsKwOnlyArgsKwargsAndTypedArgs(self):
trace_count = [0]
def func(arg, *args: ops.Tensor, kwonly, **kwargs):
del args, kwonly, kwargs
trace_count[0] += 1
return arg
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7)
enabled(100, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 20, 30, 40, kwonly=5, kwarg1=6, kwarg2=7) # No retrace
enabled(1, 200, 300, 400, kwonly=5, kwarg1=6, kwarg2=7) # No retrace
enabled(1, 2, 3, 4, kwonly=50, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 2, 3, 4, kwonly=5, kwarg1=60, kwarg2=70) # Retrace
self.assertEqual(trace_count[0], 4)
def testFollowTypeHintsTraceWithArgsKwOnlyArgsKwargsAndTypedKwOnlyArg(self):
trace_count = [0]
def func(arg, *args, kwonly: ops.Tensor, **kwargs):
del args, kwonly, kwargs
trace_count[0] += 1
return arg
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7)
enabled(100, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 20, 30, 40, kwonly=5, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 2, 3, 4, kwonly=50, kwarg1=6, kwarg2=7) # No retrace
enabled(1, 2, 3, 4, kwonly=500, kwarg1=6, kwarg2=7) # No retrace
enabled(1, 2, 3, 4, kwonly=5, kwarg1=60, kwarg2=70) # Retrace
self.assertEqual(trace_count[0], 4)
def testFollowTypeHintsTraceWithArgsKwOnlyArgsKwargsAndTypedKwargs(self):
trace_count = [0]
def func(arg, *args, kwonly, **kwargs: ops.Tensor):
del args, kwonly, kwargs
trace_count[0] += 1
return arg
enabled = def_function.function(func, experimental_follow_type_hints=True)
enabled(1, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7)
enabled(100, 2, 3, 4, kwonly=5, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 20, 30, 40, kwonly=5, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 2, 3, 4, kwonly=50, kwarg1=6, kwarg2=7) # Retrace
enabled(1, 2, 3, 4, kwonly=5, kwarg1=60, kwarg2=70) # No retrace
enabled(1, 2, 3, 4, kwonly=5, kwarg1=600, kwarg2=700) # No retrace
self.assertEqual(trace_count[0], 4)
def testWithModuleNameScope(self):
self.skipTest('b/166158748:function does not handle this case correctly.')
class Foo(module.Module):
def __init__(self):
super().__init__()
self.var = None
@def_function.function
@module.Module.with_name_scope
def add(self, x, y, z=1):
if self.var is None:
return x + y + z
foo = Foo()
self.assertEqual(foo.add(2, 3), 6)
def testWithModuleNameScopeRedundantArgs(self):
self.skipTest('b/166158748:function does not handle this case correctly.')
class Foo(module.Module):
def __init__(self):
super().__init__()
self.var = None
@def_function.function
@module.Module.with_name_scope
def add(self, x, y):
if self.var is None:
return x + y
foo = Foo()
with self.assertRaisesRegex(TypeError, 'got two values for argument'):
foo.add(2, x=3) # pylint: disable=redundant-keyword-arg,no-value-for-parameter
def testWithModuleNameScopeMissingArgs(self):
self.skipTest('b/166158748:function does not handle this case correctly.')
class Foo(module.Module):
def __init__(self):
super().__init__()
self.var = None
@def_function.function
@module.Module.with_name_scope
def add(self, x, y):
if self.var is None:
return x + y
foo = Foo()
with self.assertRaisesRegex(TypeError, 'missing required arguments: y'):
foo.add(2) # pylint: disable=no-value-for-parameter
class MultiDeviceTest(test.TestCase, parameterized.TestCase):
@test_util.run_gpu_only
def testMultiDeviceOutput(self):
"""Tests that functions can produce outputs on multiple devices."""
@function.defun
def func(a, b, transpose_a):
with ops.device('/device:CPU:0'):
m1 = math_ops.matmul(a, b, transpose_a=transpose_a)
with ops.device('/device:GPU:0'):
m2 = math_ops.matmul(a, b, transpose_a=transpose_a)
return m1, m2
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
m1, m2 = func(t, t, transpose_a=True)
self.assertAllEqual(m1.numpy(), [[10, 14], [14, 20]])
self.assertRegex(m1.backing_device, 'CPU')
self.assertAllEqual(m2.numpy(), [[10, 14], [14, 20]])
self.assertRegex(m2.backing_device, 'GPU')
@test_util.run_gpu_only
def testEmptyBody(self):
@function.defun
def func(a, b):
return b, a
with ops.device('/device:CPU:0'):
a = array_ops.identity(3.0)
with ops.device('/device:GPU:0'):
b = array_ops.identity(5.0)
m1, m2 = func(a, b)
self.assertAllEqual(m1.numpy(), 5.0)
self.assertRegex(m1.backing_device, 'GPU')
self.assertAllEqual(m2.numpy(), 3.0)
self.assertRegex(m2.backing_device, 'CPU')
@test_util.run_gpu_only
def testMultiDeviceInt32(self):
"""Tests that multi-device functions can take and output INT32s.
When an INT32 device tensor is fed into a function, it is copied to CPU
by the eager runtime. The function sees all INT32 inputs on CPU.
We set allocator attribute 'on_host' for INT32 outputs. They can be
partitioned into the GPU component function, but will be allocated on
CPU nevertheless.
There is experimental support for `ints_on_device` in
FunctionLibraryRuntime now. We can try that.
"""
with ops.device('/device:CPU:0'):
int_cpu = constant_op.constant(3, dtype=dtypes.int32)
resource = resource_variable_ops.ResourceVariable(5, dtype=dtypes.int32)
with ops.device('/device:GPU:0'):
int_gpu = constant_op.constant(7, dtype=dtypes.int32)
@function.defun
def func(int_cpu, resource, int_gpu):
with ops.device('/device:CPU:0'):
m1 = int_cpu * resource + int_gpu
with ops.device('/device:GPU:0'):
# This computation will happen on GPU but m2 will be copied to CPU.
m2 = int_gpu * resource + int_cpu + 1
return m1, m2
m1, m2 = func(int_cpu, resource, int_gpu)
self.assertAllEqual(m1.numpy(), 22)
self.assertRegex(m1.backing_device, 'CPU')
self.assertAllEqual(m2.numpy(), 39)
self.assertRegex(m2.backing_device, 'CPU')
# flip arguments
m1, m2 = func(int_gpu, resource, int_cpu)
self.assertAllEqual(m1.numpy(), 38)
self.assertRegex(m1.backing_device, 'CPU')
self.assertAllEqual(m2.numpy(), 23)
self.assertRegex(m2.backing_device, 'CPU')
@test_util.run_gpu_only
def testMultiDeviceColocateWith(self):
"""Tests that function's outputs respect colocation constraints."""
@function.defun
def func(a, b):
with ops.colocate_with(a):
ra = 2 * a
with ops.colocate_with(b):
rb = 3 * b
return ra, rb
devices = ['/device:CPU:0', '/device:GPU:0']
for dev1, dev2 in itertools.product(devices, devices):
with ops.device(dev1):
a = array_ops.identity(1.0)
with ops.device(dev2):
b = array_ops.identity(10.0)
ra, rb = func(a, b)
self.assertEqual(ra.numpy(), 2.0)
self.assertRegex(ra.backing_device, dev1)
self.assertEqual(rb.numpy(), 30.0)
self.assertRegex(rb.backing_device, dev2)
@test_util.run_gpu_only
def testMultiDeviceResources(self):
with ops.device('/device:CPU:0'):
c1 = resource_variable_ops.ResourceVariable(2.0)
c2 = resource_variable_ops.ResourceVariable(7.0)
with ops.device('/device:GPU:0'):
g1 = resource_variable_ops.ResourceVariable(3.0)
g2 = resource_variable_ops.ResourceVariable(5.0)
@function.defun
def func(resource1, resource2):
with ops.device('/device:CPU:0'):
result1 = resource1 * g2
with ops.device('/device:GPU:0'):
result2 = resource2 * c2
return result1, result2
r1, r2 = func(c1, g1)
self.assertEqual(r1.numpy(), 10.0)
self.assertRegex(r1.backing_device, 'CPU')
self.assertEqual(r2.numpy(), 21.0)
self.assertRegex(r2.backing_device, 'GPU')
# Call with flipped inputs. Check that we look at resource's
# device and reinstantiates the function when inputs' devices change.
r1, r2 = func(g1, c1)
self.assertEqual(r1.numpy(), 15.0)
self.assertRegex(r1.backing_device, 'CPU')
self.assertEqual(r2.numpy(), 14.0)
self.assertRegex(r2.backing_device, 'GPU')
@test_util.run_gpu_only
def testOutputResources(self):
with ops.device('/device:CPU:0'):
c1 = resource_variable_ops.ResourceVariable(2.0)
with ops.device('/device:GPU:0'):
g1 = resource_variable_ops.ResourceVariable(3.0)
@function.defun
def func(resource1, resource2):
with ops.device('/device:CPU:0'):
result1 = resource1 * 5
with ops.device('/device:GPU:0'):
result2 = resource2 * 7
return result1, resource1.handle, result2, resource2.handle
r1, res1, r2, res2 = func(c1, g1)
self.assertEqual(r1.numpy(), 10.0)
self.assertRegex(r1.backing_device, 'CPU')
self.assertEqual(r2.numpy(), 21.0)
self.assertRegex(r2.backing_device, 'GPU')
def check_handle(handle, expected_value):
self.assertRegex(handle.backing_device, 'CPU')
tensor = gen_resource_variable_ops.read_variable_op(
handle, dtypes.float32)
self.assertEqual(tensor.numpy(), expected_value)
# Check that handles returned from functions are on CPU and an op using
# the resource handle is correctly placed on the device backing the
# resource.
check_handle(res1, 2.0)
check_handle(res2, 3.0)
# Call with flipped inputs to make sure the same the function is
# reinstantiated and eager runtime does not mess up the device assignment
# for ops consuming handles returned from defuns.
r1, res1, r2, res2 = func(g1, c1)
self.assertEqual(r1.numpy(), 15.0)
self.assertRegex(r1.backing_device, 'CPU')
self.assertEqual(r2.numpy(), 14.0)
self.assertRegex(r2.backing_device, 'GPU')
check_handle(res1, 3.0)
check_handle(res2, 2.0)
@test_util.run_gpu_only
def testPassResourceThroughNestedFunctionCall(self):
"""Test passing GPU resource to noinline function call placed on CPU.
PartitionedCallOp must not enforce any particular device assignment for the
resource output. Inner function marked as `_nospecialize`, so Grappler would
not prune unused function output.
"""
with ops.device('/device:GPU:0'):
g1 = resource_variable_ops.ResourceVariable(3.0)
@function.defun_with_attributes(attributes={
'_noinline': True,
'_nospecialize': True
})
def inner(resource1):
return resource1 * 2, resource1.handle
@function.defun
def outer(resource1):
with ops.device('/device:CPU:0'):
r1, _ = inner(resource1)
return r1
r1 = outer(g1)
self.assertEqual(r1.numpy(), 6.0)
self.assertRegex(r1.backing_device, 'CPU')
@test_util.run_gpu_only
def testReturnResourceFromNestedFunctionCall(self):
"""Test returning GPU resource from noinline function call placed on CPU.
When inferring output devices for the return value, do not set a device for
returns of DT_RESOURCE data type based on the device assignment of the node
that produced that resource. As an example function call placed on CPU can
return resources on GPU.
"""
with ops.device('/device:GPU:0'):
g1 = resource_variable_ops.ResourceVariable(3.0)
@function.defun_with_attributes(attributes={
'_noinline': True
})
def inner(resource1):
resource1.assign_add(2.0)
return resource1 * 2, resource1.handle
@function.defun
def outer(resource1):
with ops.device('/device:CPU:0'):
r1, res1 = inner(resource1)
return r1, res1
r1, res1 = outer(g1)
self.assertEqual(r1.numpy(), 10.0)
self.assertRegex(r1.backing_device, 'CPU')
def check_handle(handle, expected_value):
self.assertRegex(handle.backing_device, 'CPU')
tensor = gen_resource_variable_ops.read_variable_op(
handle, dtypes.float32)
self.assertEqual(tensor.numpy(), expected_value)
# Check that handles returned from functions are on CPU and an op using
# the resource handle is correctly placed on the device backing the
# resource.
check_handle(res1, 5.0)
@test_util.run_gpu_only
def testComplexInputOutputDevicePattern(self):
"""Tests input/output mapping logic in partitioning."""
with ops.device('/device:CPU:0'):
rc0 = resource_variable_ops.ResourceVariable(2.0)
rc1 = resource_variable_ops.ResourceVariable(3.0)
cc0 = array_ops.identity(5.0)
cc1 = array_ops.identity(7.0)
with ops.device('/device:GPU:0'):
rg0 = resource_variable_ops.ResourceVariable(11.0)
rg1 = resource_variable_ops.ResourceVariable(13.0)
cg0 = array_ops.identity(17.0)
cg1 = array_ops.identity(19.0)
# Make sure tensors are on expected devices.
for tensor in [cc0, cc1]:
self.assertRegex(tensor.backing_device, 'CPU:0')
for tensor in [cg0, cg1]:
self.assertRegex(tensor.backing_device, 'GPU:0')
@function.defun
def func(rc0, cc0, cg0, rc1, cg1, rg0, rg1, cc1):
with ops.device('/device:CPU:0'):
m1 = rc0 * cg0
with ops.device('/device:GPU:0'):
m2 = rg0 * cc0
with ops.device('/device:CPU:0'):
r1 = 1000.0 * m2 + rc1 * cg1
with ops.device('/device:GPU:0'):
r2 = 1000.0 * m1 + rg1 * cc1
return r1, r2, m2, m1
r1, r2, m2, m1 = func(rc0, cc0, cg0, rc1, cg1, rg0, rg1, cc1)
self.assertRegex(m1.backing_device, 'CPU')
self.assertRegex(r1.backing_device, 'CPU')
self.assertRegex(m2.backing_device, 'GPU')
self.assertRegex(r2.backing_device, 'GPU')
self.assertEqual(m1.numpy(), 34.0)
self.assertEqual(r1.numpy(), 55000.0 + 3.0 * 19.0)
self.assertEqual(m2.numpy(), 55.0)
self.assertEqual(r2.numpy(), 34000.0 + 13.0 * 7.0)
@test_util.run_gpu_only
def testArgumentPruning(self):
"""Tests functions taking unnecessary arguments."""
with ops.device('/device:CPU:0'):
c1 = constant_op.constant(5.0)
c2 = constant_op.constant(7.0)
with ops.device('/device:GPU:0'):
g1 = constant_op.constant(11.0)
g2 = constant_op.constant(13.0)
g3 = constant_op.constant(17.0)
@function.defun
def func(g1, g2, c1, g3, c2): # pylint: disable=unused-argument
# arguments g1 and g2 are unused and can be pruned by grappler.
return c1 * g3 * c2
result = func(g1, g2, c1, g3, c2)
self.assertEqual(result.numpy(), 5.0 * 7.0 * 17.0)
def testNestedCallWatchedVariables(self):
v = variables.Variable(4.)
@def_function.function
def f():
return v ** 2.
with backprop.GradientTape() as tape:
f()
self.assertEqual((v,), tape.watched_variables())
@def_function.function
def g():
return f()
with backprop.GradientTape() as tape:
g()
self.assertEqual((v,), tape.watched_variables())
# f() can rely on the variable being read during its trace. g() checks that
# variables from a function which knows about them are recorded on the
# tape. h() tests that functions forward knowledge of variables to callers.
@def_function.function
def h():
return g()
with backprop.GradientTape() as tape:
h()
self.assertEqual((v,), tape.watched_variables())
def testDeferredCapture(self):
value = 1.0
@def_function.function
def lazy_capture(x):
y = ops.get_default_graph().capture_call_time_value(
lambda: value, tensor_spec.TensorSpec(None))
return x + y
self.assertAllEqual(lazy_capture(2.0), 3.0)
# After changing the value of `value` the function call should return a
# different result.
value = 2.0
self.assertAllEqual(lazy_capture(2.0), 4.0)
def testDeferredCaptureWithKey(self):
value0 = 1.0
value1 = 2.0
@def_function.function
def lazy_capture(x):
w = ops.get_default_graph().capture_call_time_value(
lambda: value0, tensor_spec.TensorSpec(None), key=0)
y = ops.get_default_graph().capture_call_time_value(
lambda: value1, tensor_spec.TensorSpec(None), key=1)
def bad_closure():
raise ValueError('Should not run')
z = ops.get_default_graph().capture_call_time_value(
bad_closure, tensor_spec.TensorSpec(None), key=1)
return x + y + w + z
self.assertAllEqual(lazy_capture(2.0), 7.0)
value0 = 2.0
value1 = 3.0
self.assertAllEqual(lazy_capture(2.0), 10.0)
def testDeferredCaptureTypeError(self):
value = constant_op.constant(1.0)
@def_function.function
def lazy_capture(x):
y = ops.get_default_graph().capture_call_time_value(
lambda: value, tensor_spec.TensorSpec(()))
return x + y
self.assertAllEqual(lazy_capture(2.0), 3.0)
# dtype mismatch
value = constant_op.constant(1)
with self.assertRaisesRegex(ValueError, 'Value .* to a tensor with dtype'):
lazy_capture(2.0)
# shape mismatch
value = constant_op.constant([1.0])
with self.assertRaisesRegex(ValueError, 'Value .* shape'):
lazy_capture(2.0)
def testDeferredCaptureReturnNestWithCompositeTensor(self):
i_s = indexed_slices.IndexedSlices(
constant_op.constant([1, 2]),
constant_op.constant([0, 1], dtype=dtypes.int64),
constant_op.constant([2]))
r_t = ragged_factory_ops.constant([[[1, 2], [3]], [[4, 5, 6]]])
s_t = sparse_tensor.SparseTensor(
values=[1, 2, 3], indices=[[0], [8], [10]], dense_shape=[20])
@def_function.function
def lazy_capture():
y = ops.get_default_graph().capture_call_time_value(
lambda: {'i': i_s, 't': (r_t, s_t)},
{'i': indexed_slices.IndexedSlicesSpec(
dtype=dtypes.int32, dense_shape_dtype=dtypes.int32),
't': (ragged_tensor.RaggedTensorSpec([2, None, None], dtypes.int32),
sparse_tensor.SparseTensorSpec([None], dtypes.int32))})
return y['i'], y['t']
i, (r, s) = lazy_capture()
self.assertAllEqual(i_s.values, i.values)
self.assertAllEqual(i_s.indices, i.indices)
self.assertAllEqual(i_s.dense_shape, i.dense_shape)
self.assertAllEqual(r_t, r)
self.assertAllEqual(s_t.indices, s.indices)
self.assertAllEqual(s_t.values, s.values)
self.assertAllEqual(s_t.dense_shape, s.dense_shape)
def testDeferredCaptureCompositeTensorSpecTypeMismatch(self):
value = indexed_slices.IndexedSlices(
constant_op.constant([1, 2]),
constant_op.constant([0, 1], dtype=dtypes.int64))
@def_function.function
def lazy_capture():
return ops.get_default_graph().capture_call_time_value(
lambda: value,
indexed_slices.IndexedSlicesSpec(dtype=dtypes.int32))
# Type matches spec.
lazy_capture()
# Extra dense shape component.
value = indexed_slices.IndexedSlices(
constant_op.constant([1, 2]),
constant_op.constant([0, 1], dtype=dtypes.int64),
constant_op.constant([2]))
with self.assertRaises(ValueError):
lazy_capture()
# Index dtype mismatch int32 vs. int64.
value = indexed_slices.IndexedSlices(
constant_op.constant([1, 2]),
constant_op.constant([0, 1]))
with self.assertRaises(ValueError):
lazy_capture()
if __name__ == '__main__':
ops.enable_eager_execution()
test.main()
|
davidzchen/tensorflow
|
tensorflow/python/eager/function_test.py
|
Python
|
apache-2.0
| 162,845
|
[
"Octopus"
] |
1e9dcfe6b1b3d77d87a2aa8819d166250afa81d65526ecfb0832e96e8a97049d
|
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import unittest
import mms
class TestMMS(unittest.TestCase):
def testBasic(self):
f,s = mms.evaluate('div(grad(u))', 'x**3')
fs = mms.fparser(f)
self.assertEqual(fs, '6*x')
ss = mms.fparser(s)
self.assertEqual(ss, 'x^3')
def testEvaluate(self):
f,_ = mms.evaluate('diff(h, t) + div(u*h) + div(grad(r*h))',
'cos(x*y*t)', variable='h', scalars=['r'], vectors=['u'])
s = mms.fparser(f)
self.assertEqual(s, '-x^2*r*t^2*cos(x*y*t) - x*y*sin(x*y*t) - x*t*u_y*sin(x*y*t) - ' \
'y^2*r*t^2*cos(x*y*t) - y*t*u_x*sin(x*y*t)')
s = mms.moosefunction(f)
self.assertEqual(s, '-std::pow(p(0), 2)*_r*std::pow(t, 2)*std::cos(p(0)*p(1)*t) - ' \
'p(0)*p(1)*std::sin(p(0)*p(1)*t) - p(0)*t*_u(1)*std::sin(p(0)*p(1)*t) ' \
'- std::pow(p(1), 2)*_r*std::pow(t, 2)*std::cos(p(0)*p(1)*t) - ' \
'p(1)*t*_u(0)*std::sin(p(0)*p(1)*t)')
def testCylindricalEvaluate(self):
f,_ = mms.evaluate('div(u)', 'r*phi*z*(e_i+e_j+e_k)', transformation='cylindrical',
coordinate_names=('r','phi','z'))
s = mms.fparser(f)
self.assertEqual(s, 'phi*r + 2*phi*z + z')
def testEvaluateWithScalarFunction(self):
f, _ = mms.evaluate('diff(h*u, t)', 'cos(x*t)', functions=['h'])
s = mms.fparser(f)
self.assertEqual(s, '-x*h(R.x, R.y, R.z, t)*sin(x*t) + ' \
'cos(x*t)*Derivative(h(R.x, R.y, R.z, t), t)')
def testEvaluateWithVectorFunction(self):
f, _ = mms.evaluate('div(h*u)', 'cos(x*t)', vectorfunctions=['h'])
s = mms.fparser(f)
self.assertEqual(s, '-t*h_x(R.x, R.y, R.z, t)*sin(x*t) + ' \
'cos(x*t)*Derivative(h_x(R.x, R.y, R.z, t), R.x) + ' \
'cos(x*t)*Derivative(h_y(R.x, R.y, R.z, t), R.y) + ' \
'cos(x*t)*Derivative(h_z(R.x, R.y, R.z, t), R.z)')
def testEvaluateWithKwargs(self):
f, _ = mms.evaluate('div(h*u)', 'cos(x*t)*e_i', scalars=['k'], h='k*x*x')
s = mms.fparser(f)
self.assertEqual(s, '-x^2*k*t*sin(x*t) + 2*x*k*cos(x*t)')
def testEvaluateVectorFunction(self):
f, e = mms.evaluate('div(u.outer(u))', 'cos(x*t)*e_i')
s = mms.fparser(f)
self.assertEqual(s, '[-2*t*sin(x*t)*cos(x*t), 0, 0]')
s = mms.fparser(e)
self.assertEqual(s, '[cos(x*t), 0, 0]')
def testExceptions(self):
try:
mms.evaluate('div(h*u)', 'cos(x*t)*e_i', scalars=['R'], h='k*x*x')
except SyntaxError as e:
self.assertIn("name 'R'", str(e))
try:
mms.evaluate('div(h*u)', 'cos(x*t)*e_i', scalars=['x'], h='k*x*x')
except SyntaxError as e:
self.assertIn("name 'x'", str(e))
try:
mms.evaluate('div(h*u)', 'cos(x*t)*e_i', scalars=['t'], h='k*x*x')
except SyntaxError as e:
self.assertIn("name 't'", str(e))
try:
mms.evaluate('div(h*u)', 'cos(x*t)*e_i', scalars=['e_k'], h='k*x*x')
except SyntaxError as e:
self.assertIn("name 'e_k'", str(e))
def testHit(self):
f,s = mms.evaluate('a*div(k*grad(u))', 'x**3', scalars=['k', 'a'])
n = mms.build_hit(f, 'force', a=42).render()
self.assertIn('[force]', n)
self.assertIn('type = ParsedFunction', n)
self.assertIn("value = '6*x*a*k'", n)
self.assertIn("vars = 'a k'", n)
self.assertIn("vals = '42 1.0'", n)
if __name__ == '__main__':
unittest.main(module=__name__, verbosity=2)
|
nuclear-wizard/moose
|
python/mms/test/test_mms.py
|
Python
|
lgpl-2.1
| 4,051
|
[
"MOOSE"
] |
bb2a5ce7e010b4a3a3a11916eaa503ae96438baecc8d2bb77c6f15d00782f219
|
"""Options manager for :class:`~.Poly` and public API functions. """
from __future__ import print_function, division
__all__ = ["Options"]
from sympy.core import S, Basic, sympify
from sympy.core.compatibility import string_types, with_metaclass
from sympy.polys.polyerrors import GeneratorsError, OptionError, FlagError
from sympy.utilities import numbered_symbols, topological_sort, public
from sympy.utilities.iterables import has_dups
import sympy.polys
import re
class Option(object):
"""Base class for all kinds of options. """
option = None
is_Flag = False
requires = []
excludes = []
after = []
before = []
@classmethod
def default(cls):
return None
@classmethod
def preprocess(cls, option):
return None
@classmethod
def postprocess(cls, options):
pass
class Flag(Option):
"""Base class for all kinds of flags. """
is_Flag = True
class BooleanOption(Option):
"""An option that must have a boolean value or equivalent assigned. """
@classmethod
def preprocess(cls, value):
if value in [True, False]:
return bool(value)
else:
raise OptionError("'%s' must have a boolean value assigned, got %s" % (cls.option, value))
class OptionType(type):
"""Base type for all options that does registers options. """
def __init__(cls, *args, **kwargs):
@property
def getter(self):
try:
return self[cls.option]
except KeyError:
return cls.default()
setattr(Options, cls.option, getter)
Options.__options__[cls.option] = cls
@public
class Options(dict):
"""
Options manager for polynomial manipulation module.
Examples
========
>>> from sympy.polys.polyoptions import Options
>>> from sympy.polys.polyoptions import build_options
>>> from sympy.abc import x, y, z
>>> Options((x, y, z), {'domain': 'ZZ'})
{'auto': False, 'domain': ZZ, 'gens': (x, y, z)}
>>> build_options((x, y, z), {'domain': 'ZZ'})
{'auto': False, 'domain': ZZ, 'gens': (x, y, z)}
**Options**
* Expand --- boolean option
* Gens --- option
* Wrt --- option
* Sort --- option
* Order --- option
* Field --- boolean option
* Greedy --- boolean option
* Domain --- option
* Split --- boolean option
* Gaussian --- boolean option
* Extension --- option
* Modulus --- option
* Symmetric --- boolean option
* Strict --- boolean option
**Flags**
* Auto --- boolean flag
* Frac --- boolean flag
* Formal --- boolean flag
* Polys --- boolean flag
* Include --- boolean flag
* All --- boolean flag
* Gen --- flag
* Series --- boolean flag
"""
__order__ = None
__options__ = {}
def __init__(self, gens, args, flags=None, strict=False):
dict.__init__(self)
if gens and args.get('gens', ()):
raise OptionError(
"both '*gens' and keyword argument 'gens' supplied")
elif gens:
args = dict(args)
args['gens'] = gens
defaults = args.pop('defaults', {})
def preprocess_options(args):
for option, value in args.items():
try:
cls = self.__options__[option]
except KeyError:
raise OptionError("'%s' is not a valid option" % option)
if issubclass(cls, Flag):
if flags is None or option not in flags:
if strict:
raise OptionError("'%s' flag is not allowed in this context" % option)
if value is not None:
self[option] = cls.preprocess(value)
preprocess_options(args)
for key, value in dict(defaults).items():
if key in self:
del defaults[key]
else:
for option in self.keys():
cls = self.__options__[option]
if key in cls.excludes:
del defaults[key]
break
preprocess_options(defaults)
for option in self.keys():
cls = self.__options__[option]
for require_option in cls.requires:
if self.get(require_option) is None:
raise OptionError("'%s' option is only allowed together with '%s'" % (option, require_option))
for exclude_option in cls.excludes:
if self.get(exclude_option) is not None:
raise OptionError("'%s' option is not allowed together with '%s'" % (option, exclude_option))
for option in self.__order__:
self.__options__[option].postprocess(self)
@classmethod
def _init_dependencies_order(cls):
"""Resolve the order of options' processing. """
if cls.__order__ is None:
vertices, edges = [], set([])
for name, option in cls.__options__.items():
vertices.append(name)
for _name in option.after:
edges.add((_name, name))
for _name in option.before:
edges.add((name, _name))
try:
cls.__order__ = topological_sort((vertices, list(edges)))
except ValueError:
raise RuntimeError(
"cycle detected in sympy.polys options framework")
def clone(self, updates={}):
"""Clone ``self`` and update specified options. """
obj = dict.__new__(self.__class__)
for option, value in self.items():
obj[option] = value
for option, value in updates.items():
obj[option] = value
return obj
def __setattr__(self, attr, value):
if attr in self.__options__:
self[attr] = value
else:
super(Options, self).__setattr__(attr, value)
@property
def args(self):
args = {}
for option, value in self.items():
if value is not None and option != 'gens':
cls = self.__options__[option]
if not issubclass(cls, Flag):
args[option] = value
return args
@property
def options(self):
options = {}
for option, cls in self.__options__.items():
if not issubclass(cls, Flag):
options[option] = getattr(self, option)
return options
@property
def flags(self):
flags = {}
for option, cls in self.__options__.items():
if issubclass(cls, Flag):
flags[option] = getattr(self, option)
return flags
class Expand(with_metaclass(OptionType, BooleanOption)):
"""``expand`` option to polynomial manipulation functions. """
option = 'expand'
requires = []
excludes = []
@classmethod
def default(cls):
return True
class Gens(with_metaclass(OptionType, Option)):
"""``gens`` option to polynomial manipulation functions. """
option = 'gens'
requires = []
excludes = []
@classmethod
def default(cls):
return ()
@classmethod
def preprocess(cls, gens):
if isinstance(gens, Basic):
gens = (gens,)
elif len(gens) == 1 and hasattr(gens[0], '__iter__'):
gens = gens[0]
if gens == (None,):
gens = ()
elif has_dups(gens):
raise GeneratorsError("duplicated generators: %s" % str(gens))
elif any(gen.is_commutative is False for gen in gens):
raise GeneratorsError("non-commutative generators: %s" % str(gens))
return tuple(gens)
class Wrt(with_metaclass(OptionType, Option)):
"""``wrt`` option to polynomial manipulation functions. """
option = 'wrt'
requires = []
excludes = []
_re_split = re.compile(r"\s*,\s*|\s+")
@classmethod
def preprocess(cls, wrt):
if isinstance(wrt, Basic):
return [str(wrt)]
elif isinstance(wrt, str):
wrt = wrt.strip()
if wrt.endswith(','):
raise OptionError('Bad input: missing parameter.')
if not wrt:
return []
return [ gen for gen in cls._re_split.split(wrt) ]
elif hasattr(wrt, '__getitem__'):
return list(map(str, wrt))
else:
raise OptionError("invalid argument for 'wrt' option")
class Sort(with_metaclass(OptionType, Option)):
"""``sort`` option to polynomial manipulation functions. """
option = 'sort'
requires = []
excludes = []
@classmethod
def default(cls):
return []
@classmethod
def preprocess(cls, sort):
if isinstance(sort, str):
return [ gen.strip() for gen in sort.split('>') ]
elif hasattr(sort, '__getitem__'):
return list(map(str, sort))
else:
raise OptionError("invalid argument for 'sort' option")
class Order(with_metaclass(OptionType, Option)):
"""``order`` option to polynomial manipulation functions. """
option = 'order'
requires = []
excludes = []
@classmethod
def default(cls):
return sympy.polys.orderings.lex
@classmethod
def preprocess(cls, order):
return sympy.polys.orderings.monomial_key(order)
class Field(with_metaclass(OptionType, BooleanOption)):
"""``field`` option to polynomial manipulation functions. """
option = 'field'
requires = []
excludes = ['domain', 'split', 'gaussian']
class Greedy(with_metaclass(OptionType, BooleanOption)):
"""``greedy`` option to polynomial manipulation functions. """
option = 'greedy'
requires = []
excludes = ['domain', 'split', 'gaussian', 'extension', 'modulus', 'symmetric']
class Composite(with_metaclass(OptionType, BooleanOption)):
"""``composite`` option to polynomial manipulation functions. """
option = 'composite'
@classmethod
def default(cls):
return None
requires = []
excludes = ['domain', 'split', 'gaussian', 'extension', 'modulus', 'symmetric']
class Domain(with_metaclass(OptionType, Option)):
"""``domain`` option to polynomial manipulation functions. """
option = 'domain'
requires = []
excludes = ['field', 'greedy', 'split', 'gaussian', 'extension']
after = ['gens']
_re_realfield = re.compile(r"^(R|RR)(_(\d+))?$")
_re_complexfield = re.compile(r"^(C|CC)(_(\d+))?$")
_re_finitefield = re.compile(r"^(FF|GF)\((\d+)\)$")
_re_polynomial = re.compile(r"^(Z|ZZ|Q|QQ|R|RR|C|CC)\[(.+)\]$")
_re_fraction = re.compile(r"^(Z|ZZ|Q|QQ)\((.+)\)$")
_re_algebraic = re.compile(r"^(Q|QQ)\<(.+)\>$")
@classmethod
def preprocess(cls, domain):
if isinstance(domain, sympy.polys.domains.Domain):
return domain
elif hasattr(domain, 'to_domain'):
return domain.to_domain()
elif isinstance(domain, string_types):
if domain in ['Z', 'ZZ']:
return sympy.polys.domains.ZZ
if domain in ['Q', 'QQ']:
return sympy.polys.domains.QQ
if domain == 'EX':
return sympy.polys.domains.EX
r = cls._re_realfield.match(domain)
if r is not None:
_, _, prec = r.groups()
if prec is None:
return sympy.polys.domains.RR
else:
return sympy.polys.domains.RealField(int(prec))
r = cls._re_complexfield.match(domain)
if r is not None:
_, _, prec = r.groups()
if prec is None:
return sympy.polys.domains.CC
else:
return sympy.polys.domains.ComplexField(int(prec))
r = cls._re_finitefield.match(domain)
if r is not None:
return sympy.polys.domains.FF(int(r.groups()[1]))
r = cls._re_polynomial.match(domain)
if r is not None:
ground, gens = r.groups()
gens = list(map(sympify, gens.split(',')))
if ground in ['Z', 'ZZ']:
return sympy.polys.domains.ZZ.poly_ring(*gens)
elif ground in ['Q', 'QQ']:
return sympy.polys.domains.QQ.poly_ring(*gens)
elif ground in ['R', 'RR']:
return sympy.polys.domains.RR.poly_ring(*gens)
else:
return sympy.polys.domains.CC.poly_ring(*gens)
r = cls._re_fraction.match(domain)
if r is not None:
ground, gens = r.groups()
gens = list(map(sympify, gens.split(',')))
if ground in ['Z', 'ZZ']:
return sympy.polys.domains.ZZ.frac_field(*gens)
else:
return sympy.polys.domains.QQ.frac_field(*gens)
r = cls._re_algebraic.match(domain)
if r is not None:
gens = list(map(sympify, r.groups()[1].split(',')))
return sympy.polys.domains.QQ.algebraic_field(*gens)
raise OptionError('expected a valid domain specification, got %s' % domain)
@classmethod
def postprocess(cls, options):
if 'gens' in options and 'domain' in options and options['domain'].is_Composite and \
(set(options['domain'].symbols) & set(options['gens'])):
raise GeneratorsError(
"ground domain and generators interfere together")
elif ('gens' not in options or not options['gens']) and \
'domain' in options and options['domain'] == sympy.polys.domains.EX:
raise GeneratorsError("you have to provide generators because EX domain was requested")
class Split(with_metaclass(OptionType, BooleanOption)):
"""``split`` option to polynomial manipulation functions. """
option = 'split'
requires = []
excludes = ['field', 'greedy', 'domain', 'gaussian', 'extension',
'modulus', 'symmetric']
@classmethod
def postprocess(cls, options):
if 'split' in options:
raise NotImplementedError("'split' option is not implemented yet")
class Gaussian(with_metaclass(OptionType, BooleanOption)):
"""``gaussian`` option to polynomial manipulation functions. """
option = 'gaussian'
requires = []
excludes = ['field', 'greedy', 'domain', 'split', 'extension',
'modulus', 'symmetric']
@classmethod
def postprocess(cls, options):
if 'gaussian' in options and options['gaussian'] is True:
options['extension'] = set([S.ImaginaryUnit])
Extension.postprocess(options)
class Extension(with_metaclass(OptionType, Option)):
"""``extension`` option to polynomial manipulation functions. """
option = 'extension'
requires = []
excludes = ['greedy', 'domain', 'split', 'gaussian', 'modulus',
'symmetric']
@classmethod
def preprocess(cls, extension):
if extension == 1:
return bool(extension)
elif extension == 0:
raise OptionError("'False' is an invalid argument for 'extension'")
else:
if not hasattr(extension, '__iter__'):
extension = set([extension])
else:
if not extension:
extension = None
else:
extension = set(extension)
return extension
@classmethod
def postprocess(cls, options):
if 'extension' in options and options['extension'] is not True:
options['domain'] = sympy.polys.domains.QQ.algebraic_field(
*options['extension'])
class Modulus(with_metaclass(OptionType, Option)):
"""``modulus`` option to polynomial manipulation functions. """
option = 'modulus'
requires = []
excludes = ['greedy', 'split', 'domain', 'gaussian', 'extension']
@classmethod
def preprocess(cls, modulus):
modulus = sympify(modulus)
if modulus.is_Integer and modulus > 0:
return int(modulus)
else:
raise OptionError(
"'modulus' must a positive integer, got %s" % modulus)
@classmethod
def postprocess(cls, options):
if 'modulus' in options:
modulus = options['modulus']
symmetric = options.get('symmetric', True)
options['domain'] = sympy.polys.domains.FF(modulus, symmetric)
class Symmetric(with_metaclass(OptionType, BooleanOption)):
"""``symmetric`` option to polynomial manipulation functions. """
option = 'symmetric'
requires = ['modulus']
excludes = ['greedy', 'domain', 'split', 'gaussian', 'extension']
class Strict(with_metaclass(OptionType, BooleanOption)):
"""``strict`` option to polynomial manipulation functions. """
option = 'strict'
@classmethod
def default(cls):
return True
class Auto(with_metaclass(OptionType, BooleanOption, Flag)):
"""``auto`` flag to polynomial manipulation functions. """
option = 'auto'
after = ['field', 'domain', 'extension', 'gaussian']
@classmethod
def default(cls):
return True
@classmethod
def postprocess(cls, options):
if ('domain' in options or 'field' in options) and 'auto' not in options:
options['auto'] = False
class Frac(with_metaclass(OptionType, BooleanOption, Flag)):
"""``auto`` option to polynomial manipulation functions. """
option = 'frac'
@classmethod
def default(cls):
return False
class Formal(with_metaclass(OptionType, BooleanOption, Flag)):
"""``formal`` flag to polynomial manipulation functions. """
option = 'formal'
@classmethod
def default(cls):
return False
class Polys(with_metaclass(OptionType, BooleanOption, Flag)):
"""``polys`` flag to polynomial manipulation functions. """
option = 'polys'
class Include(with_metaclass(OptionType, BooleanOption, Flag)):
"""``include`` flag to polynomial manipulation functions. """
option = 'include'
@classmethod
def default(cls):
return False
class All(with_metaclass(OptionType, BooleanOption, Flag)):
"""``all`` flag to polynomial manipulation functions. """
option = 'all'
@classmethod
def default(cls):
return False
class Gen(with_metaclass(OptionType, Flag)):
"""``gen`` flag to polynomial manipulation functions. """
option = 'gen'
@classmethod
def default(cls):
return 0
@classmethod
def preprocess(cls, gen):
if isinstance(gen, (Basic, int)):
return gen
else:
raise OptionError("invalid argument for 'gen' option")
class Series(with_metaclass(OptionType, BooleanOption, Flag)):
"""``series`` flag to polynomial manipulation functions. """
option = 'series'
@classmethod
def default(cls):
return False
class Symbols(with_metaclass(OptionType, Flag)):
"""``symbols`` flag to polynomial manipulation functions. """
option = 'symbols'
@classmethod
def default(cls):
return numbered_symbols('s', start=1)
@classmethod
def preprocess(cls, symbols):
if hasattr(symbols, '__iter__'):
return iter(symbols)
else:
raise OptionError("expected an iterator or iterable container, got %s" % symbols)
class Method(with_metaclass(OptionType, Flag)):
"""``method`` flag to polynomial manipulation functions. """
option = 'method'
@classmethod
def preprocess(cls, method):
if isinstance(method, str):
return method.lower()
else:
raise OptionError("expected a string, got %s" % method)
def build_options(gens, args=None):
"""Construct options from keyword arguments or ... options. """
if args is None:
gens, args = (), gens
if len(args) != 1 or 'opt' not in args or gens:
return Options(gens, args)
else:
return args['opt']
def allowed_flags(args, flags):
"""
Allow specified flags to be used in the given context.
Examples
========
>>> from sympy.polys.polyoptions import allowed_flags
>>> from sympy.polys.domains import ZZ
>>> allowed_flags({'domain': ZZ}, [])
>>> allowed_flags({'domain': ZZ, 'frac': True}, [])
Traceback (most recent call last):
...
FlagError: 'frac' flag is not allowed in this context
>>> allowed_flags({'domain': ZZ, 'frac': True}, ['frac'])
"""
flags = set(flags)
for arg in args.keys():
try:
if Options.__options__[arg].is_Flag and not arg in flags:
raise FlagError(
"'%s' flag is not allowed in this context" % arg)
except KeyError:
raise OptionError("'%s' is not a valid option" % arg)
def set_defaults(options, **defaults):
"""Update options with default values. """
if 'defaults' not in options:
options = dict(options)
options['defaults'] = defaults
return options
Options._init_dependencies_order()
|
kaushik94/sympy
|
sympy/polys/polyoptions.py
|
Python
|
bsd-3-clause
| 21,330
|
[
"Gaussian"
] |
4f73b475c7c8f4201173fdd602f74a30ee481e1e5f1772322f0c7f7d02063579
|
#!/usr/bin/python
#
# This source file is part of appleseed.
# Visit http://appleseedhq.net/ for additional information and resources.
#
# This software is released under the MIT license.
#
# Copyright (c) 2015-2017 Hans Hoogenboom, The appleseedhq Organization
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
import json
import glob
import os.path
import sys
import argparse
try:
from ConfigParser import ConfigParser
except:
from configparser import ConfigParser
import datetime
import getpass
import subprocess
FileTypes = {'.oso': "openshadinglanguage"}
# metadata according to the OSL specification
_shaderTypes = ["surface", "displacement", "light", "volume", "shader"]
_dataTypes = ["int", "float", "point", "vector", "normal", "color", "matrix", "string", "void"]
_shaderKeys = ["name", "label", "type", "help", "url", "value", "page", "widget", "units"]
# These osl parameters are not part of the official shadinglanguage but more guidelines as how to
# make up the interface of the shader inside a 3rd party program. Not yet decided what to do with it...
#_parmWidgets = ["number", "string", "boolean", "checkBox", "popup", "mapper", "filename", "null"]
#_parmInteger = ["min", "max", "sensitivity", "slider"]
#_parmFloat = _parmInteger + ["digits"]
#_parmSlider = ["slidermin", "slidermax", "slidercenter", "sliderexponent"]
#_parmKeyword = ["output"]
#----------------------------------------------------------
# Functions to sanitize olsinfo output
#----------------------------------------------------------
def _error(msg, crash=False):
sys.stderr.write(msg)
sys.stderr.write('\n')
if crash:
sys.exit(1)
return False
def _fatalError(msg):
_error(msg, True)
def _formatVal(st):
value = st.replace('"', '', 2)
value = value.strip()
return value
def _getKeyValue(st):
signPos = st.index('=')
value = st[signPos + 1:]
key = st[:signPos - 1]
key = key.split()
key = key[-1].strip()
return (key, value)
#----------------------------------------------------------
# File handling
#----------------------------------------------------------
def isValidFile(filename, filetypes):
(head, tail) = os.path.splitext(filename)
return (os.path.isfile(filename) and tail in filetypes)
def isValidExtension(fp, filetypes):
return (os.path.splitext(fp)[1] in filetypes)
def createFileList(filetypes, osl_cfg, recursive=False, args=None, pathfile=None):
filelist = list()
# files/dirs from external file
if pathfile:
for fp in pathfile:
try:
fp = open(pathfile)
for line in fp:
filelist.append(line)
fp.close()
except:
_error("Could not read from file %s" % pathfile)
# files/dirs from command line arguments
if args:
for arg in args:
filelist.append(arg)
# files/dirs from config file
osl_dir = osl_cfg.get('settings', 'osldir')
if len(osl_dir) > 0:
osldir_list = osl_dir.split(',')
for arg in osldir_list:
filelist.append(arg)
# expand vars
args_expanded = list()
for arg in filelist:
args_expanded.append(os.path.expandvars(arg))
# clear filelist and glob
filelist = list()
for arg in args_expanded:
filelist.extend([x for x in glob.iglob(arg)])
# split files from directories
dirlist = list()
dirlist = [x for x in filelist if os.path.isdir(x)]
filelist[:] = [x for x in filelist if isValidFile(x, filetypes)]
# travel directories and add shader files to filelist
for directory in dirlist:
if recursive:
for dirpath, dirnames, filenames in os.walk(directory):
for filename in filenames:
(head, tail) = os.path.splitext(filename)
if tail in filetypes:
filelist.append(os.path.join(dirpath, filename))
else:
dirpath, dirnames, filenames = next(os.walk(directory))
for filename in filenames:
(head, tail) = os.path.splitext(filename)
if tail in filetypes:
filelist.append(os.path.join(dirpath, filename))
# clear duplicate entries, do not care for order
filelist = list(set(filelist))
# if there are no files/paths quit
if len(filelist) < 1:
_fatalError("No files or directories found, exiting.")
return filelist
#----------------------------------------------------------
# Functions for parsing *.oso files
#----------------------------------------------------------
def parseOslInfo(compiledShader, osl_cfg):
oslpath = osl_cfg.get('settings', 'oslpath')
if os.path.isfile(oslpath):
cmd = str(oslpath) + ' -v %s' % compiledShader
else:
cmd = 'oslinfo -v %s' % compiledShader
cmd = cmd.split()
try:
fp = subprocess.check_output(cmd)
except subprocess.CalledProcessError as fp_ret:
_fatalError("Could not run oslinfo, exiting.\nReturncode: %s" % fp_ret.returncode)
# check if output of oslinfo is correct
# if false skip shader and write error message to console
lines = fp.splitlines()
if not lines:
_error('Missing shader definition for %s' % compiledShader)
return False
count = 0
shaderDef = lines[count]
args = shaderDef.split()
# tempShader stores all the data
tempShader = dict()
# store the order in which oslinfo outputs its data
# and separate the parameters from general shader data
parmlist = list()
if args[0] not in _shaderTypes:
_error("Not a valid shader type: %s" % args[0])
return False
else:
tempShader['type'] = _formatVal(args[0])
tempShader['name'] = _formatVal(args[1])
tempShader['hasMetaData'] = False
tempShader['hasParmHelp'] = False
# parse the rest of the file to get parameters
# number of entries in lines
length = len(lines) - 1
# lines iterator
count = 1
while True:
line = lines[count]
if not line:
_error("No more lines to read, invalid shader %s?" % compiledShader)
break
args = line.split()
# find parameter name
if args[0] not in ["Default", "metadata:"]: # or args[0] == "export":
tempparm = dict()
if len(args) < 3:
tempparm['name'] = _formatVal(args[0])
tempparm['type'] = _formatVal(args[1])
else:
tempparm['output'] = True
tempparm['name'] = _formatVal(args[0])
tempparm['type'] = _formatVal(args[2])
condition = True
widget = str()
while condition:
# read next line
count += 1
if count > length:
break
line = lines[count]
parmargs = line.split()
if parmargs[0] == "Default":
tempparm['value'] = _formatVal(' '.join(parmargs[2:]))
elif parmargs[0] == "metadata:":
(key, value) = _getKeyValue(line)
value = _formatVal(value)
if key != 'widget':
tempparm[key] = value
else:
widget = value
else:
condition = False
# move one line back
count -= 1
if len(widget) > 0 and 'widget' not in tempparm:
tempparm['widget'] = widget
tempShader[tempparm['name']] = tempparm
parmlist.append(tempparm['name'])
if 'help' in tempparm:
tempShader['hasParmHelp'] = True
# we didn't find a parameter yet, so there must be some general stuff
else:
if args[0] == "metadata:":
(key, value) = _getKeyValue(line)
value = _formatVal(value)
tempparm[key] = value
tempShader['hasMetaData'] = True
if count > length:
break
else:
count += 1
# parsed all lines
tempShader['parmlist'] = parmlist
return tempShader
def parseShaderInfo(compiledShader, FileTypes, osl_cfg):
(name, extension) = os.path.splitext(compiledShader)
shaderUI = None
if extension == '.oso':
shaderUI = parseOslInfo(compiledShader, osl_cfg)
if not shaderUI:
_error("Could not process %s" % compiledShader)
return None
else:
compShader = dict()
compShader['name'] = shaderUI['name']
compShader['path'] = compiledShader
compShader['mtime'] = str(os.path.getmtime(compiledShader))
compShader['ctime'] = str(datetime.datetime.now())
compShader['language'] = FileTypes[extension]
# holds the output of parseOslInfo (the actual shader metadata/ui)
compShader['ui'] = shaderUI
return compShader
#----------------------------------------------------------
# Functions for handling the shader dictionary
#----------------------------------------------------------
def getNumberOfShaders(jsonFile):
return len(jsonFile['shaders'])
def cleanJsonShaders(jsonDict):
num_del = 0
for shaderpath in jsonDict.keys():
if not os.path.isfile(shaderpath):
del jsonDict[shaderpath]
num_del += 1
return (num_del, jsonDict)
def existsJsonShader(jsonFile, shaderName):
for shader in jsonFile['shaders']:
if shader['name'] == shaderName:
return True
else:
return False
def writeJsonHeader(filename, numElements):
headerDict = dict()
headerDict['creator'] = getpass.getuser()
headerDict['creation date'] = str(datetime.datetime.now())
headerDict['name'] = os.path.basename(filename)
headerDict['elements'] = numElements
headerDict['last update'] = str(datetime.datetime.now())
return headerDict
def updateJsonHeader(jsonFile, numElements):
headerDict = jsonFile
headerDict['last update'] = str(datetime.datetime.now())
headerDict['elements'] = numElements
return headerDict
def cli():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description='''
oslextractmetadata stores the user interface and metadata of a
compiled OSL (openshadinglanguage) shader(s) into a JSON file.
The JSON dictionary consists of a 'header' and a 'shader' part.
jsondict['shader'] will return a dictionary with all shaders. The
user interface of the shader is stored as a sub-dictionary, the
metadata can be retrieved using the 'ui' key on the elements, e.g.:
for x in jsondict['shaders'].values():
print x['ui']
''')
parser.add_argument('-i', '--input', nargs='+', action='store', dest='files', metavar='compiled shaders', help='List of file(s) to parse.')
parser.add_argument('-v', '--verbose', action='store_true', dest='verbosity', help='Increase output verbosity.')
parser.add_argument('-o', '--output', nargs=1, action='store', dest='output',
required=True, metavar='output file', help="Store shader UI in file.")
parser.add_argument('-f', '--file', nargs='+', action='store', dest='read_file', metavar='file', help="Read file paths from file(s).")
parser.add_argument('-U', '--update', action='store_true', dest='update', help="Update existing shader file.")
parser.add_argument('-O', '--overwrite', action='store_true', dest='overwrite', help="Overwrite existing files.")
parser.add_argument('-c', '--clean', action='store_true', dest='clean', help="Clean file, remove non existant shaders.")
parser.add_argument('-r', '--recursive', action='store_true', dest='recursive', help="Add directories recursively.")
args = parser.parse_args()
# user input checks
output = args.output[0]
existingFile = os.path.exists(output)
if not existingFile:
args.overwrite = False
args.update = False
args.clean = False
if args.overwrite:
args.update = False
args.clean = False
return (args, output, existingFile)
#----------------------------------------------------------
# Main body
#----------------------------------------------------------
def main():
(args, output, existingFile) = cli()
# read configuration file
cfg_defaults = {'oslpath': '/usr/bin/oslinfo'}
osl_cfg = ConfigParser(cfg_defaults)
osl_cfg.read('oslextractmeta.conf')
# create list of files specified on cli or read from file
files = createFileList(FileTypes, osl_cfg, args.recursive, args.files, args.read_file)
# parse files for shader metadata
shaders = dict()
for shaderfile in files:
if args.verbosity:
print("Processing file %s" % shaderfile)
shaderUI = parseShaderInfo(shaderfile, FileTypes, osl_cfg)
if shaderUI:
shaders[shaderUI['path']] = shaderUI
jsonDict = dict()
# retrieve existing values in case of updating or cleaning
if existingFile and not args.overwrite:
with open(output, 'r') as fp:
try:
jsonDict = json.load(fp)
except:
_fatalError("JSON object could not be decoded.")
# create/update/clean json shader and header dictionaries
changes = 0
if args.clean:
(changes, jsonDict['shaders']) = cleanJsonShaders(jsonDict['shaders'])
if args.verbosity:
print("Removed %s shaders." % changes)
if args.update:
changes = len(shaders)
jsonDict['shaders'].update(shaders)
if args.verbosity:
print("%s shaders updated." % changes)
if args.overwrite:
changes = len(shaders)
jsonDict['header'] = writeJsonHeader(output, changes)
jsonDict['shaders'] = shaders
if args.verbosity:
print("%s shaders added to %s" % (changes, output))
# only adding new shaders
else:
temp_changes = changes
if jsonDict.has_key('shaders'):
existing_keys = jsonDict['shaders'].keys()
for key in shaders:
if key not in existing_keys:
jsonDict['shaders'][key] = shaders[key]
changes += 1
else:
jsonDict['shaders'] = shaders
changes = len(shaders)
if args.verbosity:
added_shaders = changes - temp_changes
print("Added %s shaders." % added_shaders)
# write to file shaders to file if changed
if existingFile and changes:
with open(output, 'w') as fp:
fp.seek(0)
fp.truncate()
jsonDict['header'] = updateJsonHeader(jsonDict['header'], len(jsonDict['shaders']))
json.dump(jsonDict, fp)
elif not existingFile and changes:
with open(output, 'w') as fp:
jsonDict['header'] = writeJsonHeader(output, len(shaders))
json.dump(jsonDict, fp)
elif args.verbosity:
print("No shaders found for adding to %s, exiting." % output)
return 0
# call main function
if __name__ == "__main__":
main()
|
gospodnetic/appleseed
|
scripts/oslextractmeta.py
|
Python
|
mit
| 16,387
|
[
"VisIt"
] |
4ee4216292c5248e3a4ee927c288f613a5f901999cdaff386bf4eaa19454b809
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2013 Brian Douglass bhdouglass@gmail.com
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
import os
website = "http://bhdouglass.com/remindor/indicator/"
website_qt = "http://bhdouglass.com/remindor/qt/"
blogsite = "http://blog.bhdouglass.com/"
rssfeed = "http://blog.bhdouglass.com/rss"
donatesite = "http://bhdouglass.com/remindor/contribute/"
bugsite = "https://bugs.launchpad.net/indicator-remindor"
bugsite_qt = "https://github.com/bhdouglass/remindor-qt/issues"
featuresite = "https://bugs.launchpad.net/indicator-remindor"
featuresite_qt = "https://github.com/bhdouglass/remindor-qt/issues"
translatesite = "https://translations.launchpad.net/indicator-remindor"
questionsite = "https://answers.launchpad.net/indicator-remindor"
questionsite_qt = "https://github.com/bhdouglass/remindor-qt/issues"
config_path = os.path.expanduser('~') + '/.config/indicator-remindor'
if os.name == 'nt':
config_path = os.getenv('APPDATA') + '/remindor'
database_file = config_path + '/indicator-remindor.db'
config_file = config_path + '/remindor.conf'
log_file = config_path + '/indicator-remindor.log'
scheduled_file = config_path + '/scheduled-reminders.json'
|
bhdouglass/remindor-common
|
remindor_common/constants.py
|
Python
|
gpl-3.0
| 1,839
|
[
"Brian"
] |
47e2aea5ca5e535d170fb4a7ccc820b8f92ea139f1f7ae99db5920c6f0b8e759
|
## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
BibWorkflow Unit tests - functions to test workflows
"""
from invenio.importutils import lazy_import
from invenio.testutils import make_test_suite, run_test_suite, \
InvenioTestCase
from invenio.sqlalchemyutils import db
from invenio.bibworkflow_config import CFG_OBJECT_VERSION
run = lazy_import('invenio.bibworkflow_api:run')
class TestWorkflowStart(InvenioTestCase):
"""Tests for BibWorkflow API."""
def setUp(self):
super(TestWorkflowStart, self).setUp()
self.test_data = {}
self.workflow_ids = []
self.recxml = """<?xml version="1.0" encoding="UTF-8"?>
<OAI-PMH xmlns="http://www.openarchives.org/OAI/2.0/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">
<responseDate>2013-04-03T13:56:49Z</responseDate>
<request verb="ListRecords" from="2013-03-25" metadataPrefix="arXiv" set="physics:astro-ph">http://export.arxiv.org/oai2</request>
<ListRecords>
<record>
<header>
<identifier>oai:arXiv.org:0801.3931</identifier>
<datestamp>2013-03-26</datestamp>
<setSpec>physics:astro-ph</setSpec>
</header>
<metadata>
<arXiv xmlns="http://arxiv.org/OAI/arXiv/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://arxiv.org/OAI/arXiv/ http://arxiv.org/OAI/arXiv.xsd">
<id>0801.3931</id><created>2008-01-25</created><authors><author><keyname>Manos</keyname><forenames>T.</forenames></author><author><keyname>Athanassoula</keyname><forenames>E.</forenames></author></authors><title>Dynamical study of 2D and 3D barred galaxy models</title><categories>astro-ph</categories><comments>8 pages, 3 figures, to appear in the proceedings of the international
conference "Chaos in Astronomy", Athens, Greece (talk contribution)</comments><journal-ref>Chaos in Astronomy Astrophysics and Space Science Proceedings
2009, pp 115-122</journal-ref><doi>10.1007/978-3-540-75826-6_11</doi><abstract> We study the dynamics of 2D and 3D barred galaxy analytical models, focusing
on the distinction between regular and chaotic orbits with the help of the
Smaller ALigment Index (SALI), a very powerful tool for this kind of problems.
We present briefly the method and we calculate the fraction of chaotic and
regular orbits in several cases. In the 2D model, taking initial conditions on
a Poincar\'{e} $(y,p_y)$ surface of section, we determine the fraction of
regular and chaotic orbits. In the 3D model, choosing initial conditions on a
cartesian grid in a region of the $(x, z, p_y)$ space, which in coordinate
space covers the inner disc, we find how the fraction of regular orbits changes
as a function of the Jacobi constant. Finally, we outline that regions near the
$(x,y)$ plane are populated mainly by regular orbits. The same is true for
regions that lie either near to the galactic center, or at larger relatively
distances from it.
</abstract></arXiv>
</metadata>
</record>
</ListRecords>
</OAI-PMH>
"""
def tearDown(self):
""" Clean up created objects """
from invenio.bibworkflow_model import WfeObject, Workflow
for wid in self.workflow_ids:
WfeObject.query.filter(WfeObject.workflow_id == wid).delete()
Workflow.query.filter(Workflow.uuid == wid).delete()
db.session.commit()
super(TestWorkflowStart, self).tearDown()
def test_workflow_basic_run(self):
"""Tests running workflow with one data object"""
from invenio.bibworkflow_model import WfeObject
self.test_data = {'data': 20}
initial_data = self.test_data
final_data = {'data': 41}
workflow = run(wname="test_workflow",
data=[self.test_data],
task_queue=False)
# Keep id for cleanup after
self.workflow_ids.append(workflow.uuid)
# Get parent object of the workflow we just ran
# NOTE: ignore PEP8 here for None
objects = WfeObject.query.filter(WfeObject.workflow_id == workflow.uuid,
WfeObject.parent_id == None)
self._check_workflow_execution(objects,
initial_data, final_data)
def test_workflow_complex_run(self):
"""Tests running workflow with several data objects"""
from invenio.bibworkflow_model import WfeObject
self.test_data = [{"data": 1}, {"data": "wwww"}, {"data": 20}]
final_data = [{"data": 19}, {"data": "wwww"}, {"data": 38}]
workflow = run(wname="test_workflow_2",
data=self.test_data,
task_queue=False)
# Keep id for cleanup after
self.workflow_ids.append(workflow.uuid)
# Get parent objects of the workflow we just ran
# NOTE: ignore PEP8 here for None
objects = WfeObject.query.filter(WfeObject.workflow_id == workflow.uuid,
WfeObject.parent_id == None)
# Let's check that we found anything. There should only be three objects
self.assertEqual(objects.count(), 3)
for obj in objects.all():
# The child object should have the final or halted version
self.assertTrue(obj.child_objects[0].version
in (CFG_OBJECT_VERSION.FINAL,
CFG_OBJECT_VERSION.HALTED))
# Making sure the final data is correct
self.assertTrue(obj.child_objects[0].data
in final_data)
def test_workflow_recordxml(self):
"""Tests runnning a record ingestion workflow"""
from invenio.bibworkflow_model import WfeObject
initial_data = {"data": self.recxml, 'type': "text/xml"}
workflow = run(wname="marcxml_workflow",
data=[{"data": self.recxml, 'type': "text/xml"}],
task_queue=False)
# Keep id for cleanup after
self.workflow_ids.append(workflow.uuid)
# Get parent object of the workflow we just ran
# NOTE: ignore PEP8 here for None
objects = WfeObject.query.filter(WfeObject.workflow_id == workflow.uuid,
WfeObject.parent_id == None)
self._check_workflow_execution(objects,
initial_data, None)
def _check_workflow_execution(self, objects,
initial_data, final_data):
# Let's check that we found anything. There should only be one object
self.assertEqual(objects.count(), 1)
parent_object = objects[0]
# The object should be the inital version
self.assertEqual(parent_object.version, CFG_OBJECT_VERSION.INITIAL)
# The object should have the inital data
self.assertEqual(parent_object.data, initial_data)
# Fetch final object which should exist
final_object = objects[0].child_objects[0]
self.assertTrue(final_object)
if final_data:
# Check that final data is correct
self.assertEqual(final_object.data, final_data)
TEST_SUITE = make_test_suite(TestWorkflowStart)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
|
labordoc/labordoc-next
|
modules/bibworkflow/lib/bibworkflow_unit_tests.py
|
Python
|
gpl-2.0
| 8,048
|
[
"Galaxy"
] |
0a1eae0acfb503b09609c700beb378eed03a00a6f74d34c406f22928d9b7cf8d
|
#!/usr/bin/env python3
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Verifies that all source files contain the necessary copyright boilerplate
# snippet.
import argparse
import datetime
import glob
import os
import re
import sys
AUTHORS = r"TestGrid|Kubernetes"
YEAR = r"YEAR"
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"filenames", help="list of files to check, all files if unspecified", nargs='*')
rootdir = os.path.dirname(__file__) + "/../"
rootdir = os.path.abspath(rootdir)
parser.add_argument("--rootdir", default=rootdir,
help="root directory to examine")
default_boilerplate_dir = os.path.join(rootdir, "hack/boilerplate")
parser.add_argument("--boilerplate-dir", default=default_boilerplate_dir)
return parser.parse_args()
def get_refs():
refs = {}
for path in glob.glob(os.path.join(ARGS.boilerplate_dir, "boilerplate.*.txt")):
extension = os.path.basename(path).split(".")[1]
# Pass the encoding parameter to avoid ascii decode error for some
# platform.
ref_file = open(path, 'r', encoding='utf-8')
ref = ref_file.read().splitlines()
ref_file.close()
refs[extension] = ref
return refs
GENERATED_GO_MARKERS = [
"// Code generated by client-gen. DO NOT EDIT.",
"// Code generated by deepcopy-gen. DO NOT EDIT.",
"// Code generated by informer-gen. DO NOT EDIT.",
"// Code generated by lister-gen. DO NOT EDIT.",
"// Code generated by protoc-gen-go. DO NOT EDIT.",
]
# given the file contents, return true if the file appears to be generated
def is_generated(data):
for marker in GENERATED_GO_MARKERS:
if marker in data:
return True
return False
def file_passes(filename, refs, regexs): # pylint: disable=too-many-locals
try:
# Pass the encoding parameter to avoid ascii decode error for some
# platform.
with open(filename, 'r', encoding='utf-8') as fp:
data = fp.read()
except IOError:
return False
basename = os.path.basename(filename)
extension = file_extension(filename)
if extension != "":
ref = refs[extension]
else:
ref = refs[basename]
# check for and skip generated files
if is_generated(data):
return True
# remove build tags from the top of Go files
if extension == "go":
con = regexs["go_build_constraints"]
(data, found) = con.subn("", data, 1)
# remove shebang from the top of shell files
if extension in ("sh", "py"):
she = regexs["shebang"]
(data, found) = she.subn("", data, 1)
data = data.splitlines()
# if our test file is smaller than the reference it surely fails!
if len(ref) > len(data):
return False
# trim our file to the same number of lines as the reference file
data = data[:len(ref)]
year = regexs["year"]
for datum in data:
if year.search(datum):
return False
# Replace all occurrences of the regex "2017|2016|2015|2014" with "YEAR"
when = regexs["date"]
for idx, datum in enumerate(data):
(data[idx], found) = when.subn("YEAR", datum)
if found != 0:
break
# Replace all occurrences of the regex "Testgrid|Kubernetes" with "AUTHOR"
author = regexs["author"]
for idx, datum in enumerate(data):
(data[idx], found) = author.subn("AUTHOR", datum)
if found != 0:
break
# if we don't match the reference at this point, fail
if ref != data:
return False
return True
def file_extension(filename):
return os.path.splitext(filename)[1].split(".")[-1].lower()
SKIPPED_DIRS = [
'external',
'.git',
'Godeps',
'_gopath',
'__init__.py',
'node_modules',
'_output',
'third_party',
'vendor',
]
# even when generated by bazel we will complain about some generated files
# not having the headers. since they're just generated, ignore them
IGNORE_HEADERS = [
'// Code generated by go-bindata.'
]
def has_ignored_header(pathname):
# Pass the encoding parameter to avoid ascii decode error for some
# platform.
with open(pathname, 'r', encoding='utf-8') as myfile:
data = myfile.read()
for header in IGNORE_HEADERS:
if data.startswith(header):
return True
return False
def normalize_files(files):
newfiles = []
for pathname in files:
if any(x in pathname for x in SKIPPED_DIRS):
continue
newfiles.append(pathname)
for idx, pathname in enumerate(newfiles):
if not os.path.isabs(pathname):
newfiles[idx] = os.path.join(ARGS.rootdir, pathname)
return newfiles
def get_files(extensions):
files = []
if ARGS.filenames:
files = ARGS.filenames
else:
for root, dirs, walkfiles in os.walk(ARGS.rootdir):
# don't visit certain dirs. This is just a performance improvement
# as we would prune these later in normalize_files(). But doing it
# cuts down the amount of filesystem walking we do and cuts down
# the size of the file list
for dpath in SKIPPED_DIRS:
if dpath in dirs:
dirs.remove(dpath)
for name in walkfiles:
pathname = os.path.join(root, name)
files.append(pathname)
files = normalize_files(files)
outfiles = []
for pathname in files:
basename = os.path.basename(pathname)
extension = file_extension(pathname)
if extension in extensions or basename in extensions:
if not has_ignored_header(pathname):
outfiles.append(pathname)
return outfiles
def get_dates():
years = datetime.datetime.now().year
return '(%s)' % '|'.join((str(year) for year in range(2014, years + 1)))
def get_regexs():
regexs = {}
# Search for "YEAR" which exists in the boilerplate, but shouldn't in the real thing
regexs["year"] = re.compile(YEAR)
# Search for "AUTHOR" which exists in the boilerplate, but shouldn't in the real thing
regexs["author"] = re.compile(AUTHORS)
# dates can be 2014, 2015, 2016 or 2017, company holder names can be anything
regexs["date"] = re.compile(get_dates())
# strip // +build \n\n build constraints
regexs["go_build_constraints"] = re.compile(
r"^(// \+build.*\n)+\n", re.MULTILINE)
# strip #!.* from shell/python scripts
regexs["shebang"] = re.compile(r"^(#!.*\n)\n*", re.MULTILINE)
return regexs
def main():
regexs = get_regexs()
refs = get_refs()
filenames = get_files(refs.keys())
nonconforming_files = []
for filename in filenames:
if not file_passes(filename, refs, regexs):
nonconforming_files.append(filename)
if nonconforming_files:
print('%d files have incorrect boilerplate headers:' %
len(nonconforming_files))
for filename in sorted(nonconforming_files):
print(os.path.relpath(filename, ARGS.rootdir))
sys.exit(1)
if __name__ == "__main__":
ARGS = get_args()
main()
|
GoogleCloudPlatform/testgrid
|
hack/verify_boilerplate.py
|
Python
|
apache-2.0
| 7,786
|
[
"VisIt"
] |
6d103b4dceb9855ba38f7e9fac53e637392338b5a28c52c27401ba8993e42c23
|
########################################################################
# $HeadURL $
# File: RemoveFile.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2013/03/25 07:44:19
########################################################################
""" :mod: RemoveFile
================
.. module: RemoveFile
:synopsis: removeFile operation handler
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
removeFile operation handler
"""
__RCSID__ = "$Id $"
# #
# @file RemoveFile.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2013/03/25 07:44:27
# @brief Definition of RemoveFile class.
# # imports
import os
import re
# # from DIRAC
from DIRAC import S_OK, S_ERROR, gMonitor
from DIRAC.RequestManagementSystem.private.OperationHandlerBase import OperationHandlerBase
########################################################################
class RemoveFile( OperationHandlerBase ):
"""
.. class:: RemoveFile
remove file operation handler
"""
def __init__( self, operation = None, csPath = None ):
"""c'tor
:param self: self reference
:param Operation operation: Operation to execute
:param str csPath: CS path for this handler
"""
# # call base class ctor
OperationHandlerBase.__init__( self, operation, csPath )
# # gMOnitor stuff goes here
gMonitor.registerActivity( "RemoveFileAtt", "File removals attempted",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveFileOK", "Successful file removals",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RemoveFileFail", "Failed file removals",
"RequestExecutingAgent", "Files/min", gMonitor.OP_SUM )
# # re pattern for not existing files
self.reNotExisting = re.compile( r"(no|not) such file.*", re.IGNORECASE )
def __call__( self ):
""" action for 'removeFile' operation """
# # get waiting files
waitingFiles = self.getWaitingFilesList()
# # prepare waiting file dict
toRemoveDict = dict( [ ( opFile.LFN, opFile ) for opFile in waitingFiles ] )
gMonitor.addMark( "RemoveFileAtt", len( toRemoveDict ) )
# # 1st step - bulk removal
self.log.debug( "bulk removal of %s files" % len( toRemoveDict ) )
bulkRemoval = self.bulkRemoval( toRemoveDict )
if not bulkRemoval["OK"]:
self.log.error( "bulk removal failed: %s" % bulkRemoval["Message"] )
else:
gMonitor.addMark( "RemoveFileOK", len( toRemoveDict ) - len( bulkRemoval["Value"] ) )
toRemoveDict = bulkRemoval["Value"]
# # 2nd step - single file removal
for lfn, opFile in toRemoveDict.items():
self.log.info( "removing single file %s" % lfn )
singleRemoval = self.singleRemoval( opFile )
if not singleRemoval["OK"]:
self.log.error( 'Error removing single file', singleRemoval["Message"] )
gMonitor.addMark( "RemoveFileFail", 1 )
else:
self.log.info( "file %s has been removed" % lfn )
gMonitor.addMark( "RemoveFileOK", 1 )
# # set
failedFiles = [ ( lfn, opFile ) for ( lfn, opFile ) in toRemoveDict.items()
if opFile.Status in ( "Failed", "Waiting" ) ]
if failedFiles:
self.operation.Error = "failed to remove %d files" % len( failedFiles )
return S_OK()
def bulkRemoval( self, toRemoveDict ):
""" bulk removal using request owner DN
:param dict toRemoveDict: { lfn: opFile, ... }
:return: S_ERROR or S_OK( { lfn: opFile, ... } ) -- dict with files still waiting to be removed
"""
bulkRemoval = self.dm.removeFile( toRemoveDict.keys(), force = True )
if not bulkRemoval["OK"]:
error = bulkRemoval["Message"]
self.log.error( "unable to remove files: %s" % error )
self.operation.Error = error
for opFile in self.operation:
opFile.Error = error
return bulkRemoval
bulkRemoval = bulkRemoval["Value"]
# # filter results
for lfn, opFile in toRemoveDict.items():
if lfn in bulkRemoval["Successful"]:
opFile.Status = "Done"
elif lfn in bulkRemoval["Failed"]:
error = bulkRemoval["Failed"][lfn]
if type( error ) == dict:
error = ";".join( [ "%s-%s" % ( k, v ) for k, v in error.items() ] )
opFile.Error = error
if self.reNotExisting.search( opFile.Error ):
opFile.Status = "Done"
# # return files still waiting
toRemoveDict = dict( [ ( opFile.LFN, opFile ) for opFile in self.operation if opFile.Status == "Waiting" ] )
return S_OK( toRemoveDict )
def singleRemoval( self, opFile ):
""" remove single file
:param opFile: File instance
"""
# # try to remove with owner proxy
proxyFile = None
if "Write access not permitted for this credential" in opFile.Error:
if "DataManager" in self.shifter:
# # you're a data manager - get proxy for LFN and retry
saveProxy = os.environ["X509_USER_PROXY"]
try:
fileProxy = self.getProxyForLFN( opFile.LFN )
if not fileProxy["OK"]:
opFile.Error = "Error getting owner's proxy : %s" % fileProxy['Message']
else:
proxyFile = fileProxy["Value"]
self.log.info( "Trying to remove file with owner's proxy (file %s)" % proxyFile )
removeFile = self.dm.removeFile( opFile.LFN, force = True )
self.log.always( str( removeFile ) )
if not removeFile["OK"]:
opFile.Error = str( removeFile["Message"] )
if self.reNotExisting.search( str( removeFile["Message"] ).lower() ):
opFile.Status = "Done"
else:
removeFile = removeFile["Value"]
if opFile.LFN in removeFile["Failed"]:
error = removeFile["Failed"][opFile.LFN]
if type( error ) == dict:
error = ";".join( [ "%s-%s" % ( k, v ) for k, v in error.items() ] )
if self.reNotExisting.search( error ):
# This should never happen due to the "force" flag
opFile.Status = "Done"
else:
opFile.Error = error
else:
opFile.Status = "Done"
finally:
if proxyFile:
os.unlink( proxyFile )
# # put back request owner proxy to env
os.environ["X509_USER_PROXY"] = saveProxy
# # file removed? update its status to 'Done'
if opFile.Status == "Done":
return S_OK()
return S_ERROR( opFile.Error )
|
calancha/DIRAC
|
DataManagementSystem/Agent/RequestOperations/RemoveFile.py
|
Python
|
gpl-3.0
| 6,620
|
[
"DIRAC"
] |
7c3096b440b3623f503deaa7ddaa6d677a735790de06ece1493cb37fcb882280
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
#
from spack import *
class RAnnotationdbi(RPackage):
"""Provides user interface and database connection code for
annotation data packages using SQLite data storage."""
homepage = "https://www.bioconductor.org/packages/AnnotationDbi/"
url = "https://git.bioconductor.org/packages/AnnotationDbi"
list_url = homepage
version('1.38.2', git='https://git.bioconductor.org/packages/AnnotationDbi', commit='67d46facba8c15fa5f0eb47c4e39b53dbdc67c36')
depends_on('r@3.4.0:3.4.9', when='@1.38.2')
depends_on('r-biocgenerics', type=('build', 'run'))
depends_on('r-biobase', type=('build', 'run'))
depends_on('r-iranges', type=('build', 'run'))
depends_on('r-dbi', type=('build', 'run'))
depends_on('r-rsqlite', type=('build', 'run'))
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/r-annotationdbi/package.py
|
Python
|
lgpl-2.1
| 2,017
|
[
"Bioconductor"
] |
fea74a5a838d0a0a4d72e73ab775b77c4682a640daf70a59c91ed1407c4b848b
|
#!/usr/bin/env python
from __future__ import print_function
import vtk
import numpy
from icqsol.bem.icqBaseLaplaceSolver import BaseLaplaceSolver
class PoissonSolver(BaseLaplaceSolver):
def __init__(self, pdata, max_edge_length, order=5):
"""
Constructor
@param pdata instance of vtkPolyData
@param max_edge_length maximum edge length, used to turn
polygons into triangles
"""
#super(BaseLaplaceSolver, self).__init__(pdata, max_edge_length, order)
BaseLaplaceSolver.__init__(self, pdata, max_edge_length, order)
self.responseName = 'v'
self.sourceName = 'charge'
def computeResponseField(self):
"""
Compute the response field, in this case the potential due to a charge source
@return response
"""
srcIndex = self.getSourceArrayIndex()
src = self.getSourceArray(srcIndex)
# Get the response matrix.
gMat = self.getGreenMatrix()
# Compute the response.
rsp = numpy.dot(gMat, src)
self.addResponseField(rsp)
return rsp
###############################################################################
def testSingleTriangle():
"Single triangle"
h = 0.1
# create set of points
points = vtk.vtkPoints()
points.SetNumberOfPoints(3)
points.SetPoint(0, [1., -1.*h/3., -1.*h/3.])
points.SetPoint(1, [1., 2.*h/3., -1.*h/3.])
points.SetPoint(2, [1., -1.*h/3., 2.*h/3.])
# create vtkPolyData object
pdata = vtk.vtkPolyData()
pdata.SetPoints(points)
ptIds = vtk.vtkIdList()
ptIds.SetNumberOfIds(3)
ptIds.SetId(0, 0)
ptIds.SetId(1, 1)
ptIds.SetId(2, 2)
pdata.Allocate(1, 1)
pdata.InsertNextCell(vtk.VTK_POLYGON, ptIds)
for order in range(1, 6):
lslm = PoissonSolver(pdata, max_edge_length=1000.)
print('order = ', order)
print('g matrix: ', lslm.getGreenMatrix())
def testTwoTrianglesCoplanar():
"Two triangles"
# create set of points
points = vtk.vtkPoints()
points.SetNumberOfPoints(4)
points.SetPoint(0, [0., 0., 0.])
points.SetPoint(1, [1., 0., 0.])
points.SetPoint(2, [0., 1., 0.])
points.SetPoint(3, [1., 1., 0.])
# create vtkPolyData object
pdata = vtk.vtkPolyData()
pdata.SetPoints(points)
pdata.Allocate(2, 1)
ptIds = vtk.vtkIdList()
ptIds.SetNumberOfIds(3)
ptIds.SetId(0, 0)
ptIds.SetId(1, 1)
ptIds.SetId(2, 2)
pdata.InsertNextCell(vtk.VTK_POLYGON, ptIds)
ptIds.SetId(0, 1)
ptIds.SetId(1, 3)
ptIds.SetId(2, 2)
pdata.InsertNextCell(vtk.VTK_POLYGON, ptIds)
for order in range(1, 6):
lslm = PoissonSolver(pdata,
max_edge_length=1000.,
order=order)
print('order = ', order)
print('g matrix: ', lslm.getGreenMatrix())
def testTwoTriangles():
"Two triangles"
# create set of points
points = vtk.vtkPoints()
points.SetNumberOfPoints(4)
points.SetPoint(0, [0., 0., 0.])
points.SetPoint(1, [1., 0., 0.])
points.SetPoint(2, [0., 1., 0.])
points.SetPoint(3, [0., 0., 1.])
# create vtkPolyData object
pdata = vtk.vtkPolyData()
pdata.SetPoints(points)
pdata.Allocate(2, 1)
ptIds = vtk.vtkIdList()
ptIds.SetNumberOfIds(3)
ptIds.SetId(0, 0)
ptIds.SetId(1, 1)
ptIds.SetId(2, 3)
pdata.InsertNextCell(vtk.VTK_POLYGON, ptIds)
ptIds.SetId(0, 0)
ptIds.SetId(1, 3)
ptIds.SetId(2, 2)
pdata.InsertNextCell(vtk.VTK_POLYGON, ptIds)
for order in range(1, 6):
lslm = PoissonSolver(pdata,
max_edge_length=1000.,
order=order)
print('order = ', order)
print('g matrix: ', lslm.getGreenMatrix())
if __name__ == '__main__':
testSingleTriangle()
testTwoTrianglesCoplanar()
testTwoTriangles()
|
gregvonkuster/icqsol
|
bem/icqPoissonSolver.py
|
Python
|
mit
| 3,963
|
[
"VTK"
] |
f6441f4cd8f2beb9a2a78ddd6503a77e0611224d4d41aa66c9013d0b4ff69063
|
# -*- coding: utf-8 -*-
"""
.. _disc-stats:
=====================
Statistical inference
=====================
Here we will briefly cover multiple concepts of inferential statistics in an
introductory manner, and demonstrate how to use some MNE statistical functions.
"""
# Authors: Eric Larson <larson.eric.d@gmail.com>
# License: BSD (3-clause)
from functools import partial
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D # noqa, analysis:ignore
import mne
from mne.stats import (ttest_1samp_no_p, bonferroni_correction, fdr_correction,
permutation_t_test, permutation_cluster_1samp_test)
print(__doc__)
###############################################################################
# Hypothesis testing
# ------------------
# Null hypothesis
# ^^^^^^^^^^^^^^^
# From `Wikipedia <https://en.wikipedia.org/wiki/Null_hypothesis>`__:
#
# In inferential statistics, a general statement or default position that
# there is no relationship between two measured phenomena, or no
# association among groups.
#
# We typically want to reject a **null hypothesis** with
# some probability (e.g., p < 0.05). This probability is also called the
# significance level :math:`\alpha`.
# To think about what this means, let's follow the illustrative example from
# :footcite:`RidgwayEtAl2012` and construct a toy dataset consisting of a
# 40 x 40 square with a "signal" present in the center with white noise added
# and a Gaussian smoothing kernel applied.
width = 40
n_subjects = 10
signal_mean = 100
signal_sd = 100
noise_sd = 0.01
gaussian_sd = 5
sigma = 1e-3 # sigma for the "hat" method
n_permutations = 'all' # run an exact test
n_src = width * width
# For each "subject", make a smoothed noisy signal with a centered peak
rng = np.random.RandomState(2)
X = noise_sd * rng.randn(n_subjects, width, width)
# Add a signal at the center
X[:, width // 2, width // 2] = signal_mean + rng.randn(n_subjects) * signal_sd
# Spatially smooth with a 2D Gaussian kernel
size = width // 2 - 1
gaussian = np.exp(-(np.arange(-size, size + 1) ** 2 / float(gaussian_sd ** 2)))
for si in range(X.shape[0]):
for ri in range(X.shape[1]):
X[si, ri, :] = np.convolve(X[si, ri, :], gaussian, 'same')
for ci in range(X.shape[2]):
X[si, :, ci] = np.convolve(X[si, :, ci], gaussian, 'same')
###############################################################################
# The data averaged over all subjects looks like this:
fig, ax = plt.subplots()
ax.imshow(X.mean(0), cmap='inferno')
ax.set(xticks=[], yticks=[], title="Data averaged over subjects")
###############################################################################
# In this case, a null hypothesis we could test for each voxel is:
#
# There is no difference between the mean value and zero
# (:math:`H_0 \colon \mu = 0`).
#
# The alternative hypothesis, then, is that the voxel has a non-zero mean
# (:math:`H_1 \colon \mu \neq 0`).
# This is a *two-tailed* test because the mean could be less than
# or greater than zero, whereas a *one-tailed* test would test only one of
# these possibilities, i.e. :math:`H_1 \colon \mu \geq 0` or
# :math:`H_1 \colon \mu \leq 0`.
#
# .. note:: Here we will refer to each spatial location as a "voxel".
# In general, though, it could be any sort of data value,
# including cortical vertex at a specific time, pixel in a
# time-frequency decomposition, etc.
#
# Parametric tests
# ^^^^^^^^^^^^^^^^
# Let's start with a **paired t-test**, which is a standard test
# for differences in paired samples. Mathematically, it is equivalent
# to a 1-sample t-test on the difference between the samples in each condition.
# The paired t-test is **parametric**
# because it assumes that the underlying sample distribution is Gaussian, and
# is only valid in this case. This happens to be satisfied by our toy dataset,
# but is not always satisfied for neuroimaging data.
#
# In the context of our toy dataset, which has many voxels
# (:math:`40 \cdot 40 = 1600`), applying the paired t-test is called a
# *mass-univariate* approach as it treats each voxel independently.
titles = ['t']
out = stats.ttest_1samp(X, 0, axis=0)
ts = [out[0]]
ps = [out[1]]
mccs = [False] # these are not multiple-comparisons corrected
def plot_t_p(t, p, title, mcc, axes=None):
if axes is None:
fig = plt.figure(figsize=(6, 3))
axes = [fig.add_subplot(121, projection='3d'), fig.add_subplot(122)]
show = True
else:
show = False
p_lims = [0.1, 0.001]
t_lims = -stats.distributions.t.ppf(p_lims, n_subjects - 1)
p_lims = [-np.log10(p) for p in p_lims]
# t plot
x, y = np.mgrid[0:width, 0:width]
surf = axes[0].plot_surface(x, y, np.reshape(t, (width, width)),
rstride=1, cstride=1, linewidth=0,
vmin=t_lims[0], vmax=t_lims[1], cmap='viridis')
axes[0].set(xticks=[], yticks=[], zticks=[],
xlim=[0, width - 1], ylim=[0, width - 1])
axes[0].view_init(30, 15)
cbar = plt.colorbar(ax=axes[0], shrink=0.75, orientation='horizontal',
fraction=0.1, pad=0.025, mappable=surf)
cbar.set_ticks(t_lims)
cbar.set_ticklabels(['%0.1f' % t_lim for t_lim in t_lims])
cbar.set_label('t-value')
cbar.ax.get_xaxis().set_label_coords(0.5, -0.3)
if not show:
axes[0].set(title=title)
if mcc:
axes[0].title.set_weight('bold')
# p plot
use_p = -np.log10(np.reshape(np.maximum(p, 1e-5), (width, width)))
img = axes[1].imshow(use_p, cmap='inferno', vmin=p_lims[0], vmax=p_lims[1],
interpolation='nearest')
axes[1].set(xticks=[], yticks=[])
cbar = plt.colorbar(ax=axes[1], shrink=0.75, orientation='horizontal',
fraction=0.1, pad=0.025, mappable=img)
cbar.set_ticks(p_lims)
cbar.set_ticklabels(['%0.1f' % p_lim for p_lim in p_lims])
cbar.set_label(r'$-\log_{10}(p)$')
cbar.ax.get_xaxis().set_label_coords(0.5, -0.3)
if show:
text = fig.suptitle(title)
if mcc:
text.set_weight('bold')
plt.subplots_adjust(0, 0.05, 1, 0.9, wspace=0, hspace=0)
mne.viz.utils.plt_show()
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# "Hat" variance adjustment
# ~~~~~~~~~~~~~~~~~~~~~~~~~
# The "hat" technique regularizes the variance values used in the t-test
# calculation :footcite:`RidgwayEtAl2012` to compensate for implausibly small
# variances.
ts.append(ttest_1samp_no_p(X, sigma=sigma))
ps.append(stats.distributions.t.sf(np.abs(ts[-1]), len(X) - 1) * 2)
titles.append(r'$\mathrm{t_{hat}}$')
mccs.append(False)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Non-parametric tests
# ^^^^^^^^^^^^^^^^^^^^
# Instead of assuming an underlying Gaussian distribution, we could instead
# use a **non-parametric resampling** method. In the case of a paired t-test
# between two conditions A and B, which is mathematically equivalent to a
# one-sample t-test between the difference in the conditions A-B, under the
# null hypothesis we have the principle of **exchangeability**. This means
# that, if the null is true, we can exchange conditions and not change
# the distribution of the test statistic.
#
# When using a paired t-test, exchangeability thus means that we can flip the
# signs of the difference between A and B. Therefore, we can construct the
# **null distribution** values for each voxel by taking random subsets of
# samples (subjects), flipping the sign of their difference, and recording the
# absolute value of the resulting statistic (we record the absolute value
# because we conduct a two-tailed test). The absolute value of the statistic
# evaluated on the veridical data can then be compared to this distribution,
# and the p-value is simply the proportion of null distribution values that
# are smaller.
#
# .. warning:: In the case of a true one-sample t-test, i.e. analyzing a single
# condition rather than the difference between two conditions,
# it is not clear where/how exchangeability applies; see
# `this FieldTrip discussion <ft_exch_>`_.
#
# In the case where ``n_permutations`` is large enough (or "all") so
# that the complete set of unique resampling exchanges can be done
# (which is :math:`2^{N_{samp}}-1` for a one-tailed and
# :math:`2^{N_{samp}-1}-1` for a two-tailed test, not counting the
# veridical distribution), instead of randomly exchanging conditions
# the null is formed from using all possible exchanges. This is known
# as a permutation test (or exact test).
# Here we have to do a bit of gymnastics to get our function to do
# a permutation test without correcting for multiple comparisons:
X.shape = (n_subjects, n_src) # flatten the array for simplicity
titles.append('Permutation')
ts.append(np.zeros(width * width))
ps.append(np.zeros(width * width))
mccs.append(False)
for ii in range(n_src):
ts[-1][ii], ps[-1][ii] = permutation_t_test(X[:, [ii]], verbose=False)[:2]
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Multiple comparisons
# --------------------
# So far, we have done no correction for multiple comparisons. This is
# potentially problematic for these data because there are
# :math:`40 \cdot 40 = 1600` tests being performed. If we use a threshold
# p < 0.05 for each individual test, we would expect many voxels to be declared
# significant even if there were no true effect. In other words, we would make
# many **type I errors** (adapted from `here <errors_>`_):
#
# .. rst-class:: skinnytable
#
# +----------+--------+------------------+------------------+
# | | Null hypothesis |
# | +------------------+------------------+
# | | True | False |
# +==========+========+==================+==================+
# | | | Type I error | Correct |
# | | Yes | False positive | True positive |
# + Reject +--------+------------------+------------------+
# | | | Correct | Type II error |
# | | No | True Negative | False negative |
# +----------+--------+------------------+------------------+
#
# To see why, consider a standard :math:`\alpha = 0.05`.
# For a single test, our probability of making a type I error is 0.05.
# The probability of making at least one type I error in
# :math:`N_{\mathrm{test}}` independent tests is then given by
# :math:`1 - (1 - \alpha)^{N_{\mathrm{test}}}`:
N = np.arange(1, 80)
alpha = 0.05
p_type_I = 1 - (1 - alpha) ** N
fig, ax = plt.subplots(figsize=(4, 3))
ax.scatter(N, p_type_I, 3)
ax.set(xlim=N[[0, -1]], ylim=[0, 1], xlabel=r'$N_{\mathrm{test}}$',
ylabel=u'Probability of at least\none type I error')
ax.grid(True)
fig.tight_layout()
fig.show()
###############################################################################
# To combat this problem, several methods exist. Typically these
# provide control over either one of the following two measures:
#
# 1. `Familywise error rate (FWER) <fwer_>`_
# The probability of making one or more type I errors:
#
# .. math::
# \mathrm{P}(N_{\mathrm{type\ I}} >= 1 \mid H_0)
#
# 2. `False discovery rate (FDR) <fdr_>`_
# The expected proportion of rejected null hypotheses that are
# actually true:
#
# .. math::
# \mathrm{E}(\frac{N_{\mathrm{type\ I}}}{N_{\mathrm{reject}}}
# \mid N_{\mathrm{reject}} > 0) \cdot
# \mathrm{P}(N_{\mathrm{reject}} > 0 \mid H_0)
#
# We cover some techniques that control FWER and FDR below.
#
# Bonferroni correction
# ^^^^^^^^^^^^^^^^^^^^^
# Perhaps the simplest way to deal with multiple comparisons, `Bonferroni
# correction <https://en.wikipedia.org/wiki/Bonferroni_correction>`__
# conservatively multiplies the p-values by the number of comparisons to
# control the FWER.
titles.append('Bonferroni')
ts.append(ts[-1])
ps.append(bonferroni_correction(ps[0])[1])
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# False discovery rate (FDR) correction
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# Typically FDR is performed with the Benjamini-Hochberg procedure, which
# is less restrictive than Bonferroni correction for large numbers of
# comparisons (fewer type II errors), but provides less strict control of type
# I errors.
titles.append('FDR')
ts.append(ts[-1])
ps.append(fdr_correction(ps[0])[1])
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Non-parametric resampling test with a maximum statistic
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# **Non-parametric resampling tests** can also be used to correct for multiple
# comparisons. In its simplest form, we again do permutations using
# exchangeability under the null hypothesis, but this time we take the
# *maximum statistic across all voxels* in each permutation to form the
# null distribution. The p-value for each voxel from the veridical data
# is then given by the proportion of null distribution values
# that were smaller.
#
# This method has two important features:
#
# 1. It controls FWER.
# 2. It is non-parametric. Even though our initial test statistic
# (here a 1-sample t-test) is parametric, the null
# distribution for the null hypothesis rejection (the mean value across
# subjects is indistinguishable from zero) is obtained by permutations.
# This means that it makes no assumptions of Gaussianity
# (which do hold for this example, but do not in general for some types
# of processed neuroimaging data).
titles.append(r'$\mathbf{Perm_{max}}$')
out = permutation_t_test(X, verbose=False)[:2]
ts.append(out[0])
ps.append(out[1])
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Clustering
# ^^^^^^^^^^
# Each of the aforementioned multiple comparisons corrections have the
# disadvantage of not fully incorporating the correlation structure of the
# data, namely that points close to one another (e.g., in space or time) tend
# to be correlated. However, by defining the adjacency/adjacency/neighbor
# structure in our data, we can use **clustering** to compensate.
#
# To use this, we need to rethink our null hypothesis. Instead
# of thinking about a null hypothesis about means per voxel (with one
# independent test per voxel), we consider a null hypothesis about sizes
# of clusters in our data, which could be stated like:
#
# The distribution of spatial cluster sizes observed in two experimental
# conditions are drawn from the same probability distribution.
#
# Here we only have a single condition and we contrast to zero, which can
# be thought of as:
#
# The distribution of spatial cluster sizes is independent of the sign
# of the data.
#
# In this case, we again do permutations with a maximum statistic, but, under
# each permutation, we:
#
# 1. Compute the test statistic for each voxel individually.
# 2. Threshold the test statistic values.
# 3. Cluster voxels that exceed this threshold (with the same sign) based on
# adjacency.
# 4. Retain the size of the largest cluster (measured, e.g., by a simple voxel
# count, or by the sum of voxel t-values within the cluster) to build the
# null distribution.
#
# After doing these permutations, the cluster sizes in our veridical data
# are compared to this null distribution. The p-value associated with each
# cluster is again given by the proportion of smaller null distribution
# values. This can then be subjected to a standard p-value threshold
# (e.g., p < 0.05) to reject the null hypothesis (i.e., find an effect of
# interest).
#
# This reframing to consider *cluster sizes* rather than *individual means*
# maintains the advantages of the standard non-parametric permutation
# test -- namely controlling FWER and making no assumptions of parametric
# data distribution.
# Critically, though, it also accounts for the correlation structure in the
# data -- which in this toy case is spatial but in general can be
# multidimensional (e.g., spatio-temporal) -- because the null distribution
# will be derived from data in a way that preserves these correlations.
#
# .. sidebar:: Effect size
#
# For a nice description of how to compute the effect size obtained
# in a cluster test, see this
# `FieldTrip mailing list discussion <ft_cluster_effect_size_>`_.
#
# However, there is a drawback. If a cluster significantly deviates from
# the null, no further inference on the cluster (e.g., peak location) can be
# made, as the entire cluster as a whole is used to reject the null.
# Moreover, because the test statistic concerns the full data, the null
# hypothesis (and our rejection of it) refers to the structure of the full
# data. For more information, see also the comprehensive
# `FieldTrip tutorial <ft_cluster_>`_.
#
# Defining the adjacency matrix
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# First we need to define our adjacency (sometimes called "neighbors") matrix.
# This is a square array (or sparse matrix) of shape ``(n_src, n_src)`` that
# contains zeros and ones to define which spatial points are neighbors, i.e.,
# which voxels are adjacent to each other. In our case this
# is quite simple, as our data are aligned on a rectangular grid.
#
# Let's pretend that our data were smaller -- a 3 x 3 grid. Thinking about
# each voxel as being connected to the other voxels it touches, we would
# need a 9 x 9 adjacency matrix. The first row of this matrix contains the
# voxels in the flattened data that the first voxel touches. Since it touches
# the second element in the first row and the first element in the second row
# (and is also a neighbor to itself), this would be::
#
# [1, 1, 0, 1, 0, 0, 0, 0, 0]
#
# :mod:`sklearn.feature_extraction` provides a convenient function for this:
from sklearn.feature_extraction.image import grid_to_graph # noqa: E402
mini_adjacency = grid_to_graph(3, 3).toarray()
assert mini_adjacency.shape == (9, 9)
print(mini_adjacency[0])
###############################################################################
# In general the adjacency between voxels can be more complex, such as
# those between sensors in 3D space, or time-varying activation at brain
# vertices on a cortical surface. MNE provides several convenience functions
# for computing adjacency matrices (see the
# :ref:`Statistics API <api_reference_statistics>`).
#
# Standard clustering
# ~~~~~~~~~~~~~~~~~~~
# Here, since our data are on a grid, we can use ``adjacency=None`` to
# trigger optimized grid-based code, and run the clustering algorithm.
titles.append('Clustering')
# Reshape data to what is equivalent to (n_samples, n_space, n_time)
X.shape = (n_subjects, width, width)
# Compute threshold from t distribution (this is also the default)
threshold = stats.distributions.t.ppf(1 - alpha, n_subjects - 1)
t_clust, clusters, p_values, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold, adjacency=None,
n_permutations=n_permutations, out_type='mask')
# Put the cluster data in a viewable format
p_clust = np.ones((width, width))
for cl, p in zip(clusters, p_values):
p_clust[cl] = p
ts.append(t_clust)
ps.append(p_clust)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# "Hat" variance adjustment
# ~~~~~~~~~~~~~~~~~~~~~~~~~
# This method can also be used in this context to correct for small
# variances :footcite:`RidgwayEtAl2012`:
titles.append(r'$\mathbf{C_{hat}}$')
stat_fun_hat = partial(ttest_1samp_no_p, sigma=sigma)
t_hat, clusters, p_values, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold, adjacency=None, out_type='mask',
n_permutations=n_permutations, stat_fun=stat_fun_hat, buffer_size=None)
p_hat = np.ones((width, width))
for cl, p in zip(clusters, p_values):
p_hat[cl] = p
ts.append(t_hat)
ps.append(p_hat)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# .. _tfce_example:
#
# Threshold-free cluster enhancement (TFCE)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# TFCE eliminates the free parameter initial ``threshold`` value that
# determines which points are included in clustering by approximating
# a continuous integration across possible threshold values with a standard
# `Riemann sum <https://en.wikipedia.org/wiki/Riemann_sum>`__
# :footcite:`SmithNichols2009`.
# This requires giving a starting threshold ``start`` and a step
# size ``step``, which in MNE is supplied as a dict.
# The smaller the ``step`` and closer to 0 the ``start`` value,
# the better the approximation, but the longer it takes.
#
# A significant advantage of TFCE is that, rather than modifying the
# statistical null hypothesis under test (from one about individual voxels
# to one about the distribution of clusters in the data), it modifies the *data
# under test* while still controlling for multiple comparisons.
# The statistical test is then done at the level of individual voxels rather
# than clusters. This allows for evaluation of each point
# independently for significance rather than only as cluster groups.
titles.append(r'$\mathbf{C_{TFCE}}$')
threshold_tfce = dict(start=0, step=0.2)
t_tfce, _, p_tfce, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold_tfce, adjacency=None,
n_permutations=n_permutations, out_type='mask')
ts.append(t_tfce)
ps.append(p_tfce)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# We can also combine TFCE and the "hat" correction:
titles.append(r'$\mathbf{C_{hat,TFCE}}$')
t_tfce_hat, _, p_tfce_hat, H0 = permutation_cluster_1samp_test(
X, n_jobs=1, threshold=threshold_tfce, adjacency=None, out_type='mask',
n_permutations=n_permutations, stat_fun=stat_fun_hat, buffer_size=None)
ts.append(t_tfce_hat)
ps.append(p_tfce_hat)
mccs.append(True)
plot_t_p(ts[-1], ps[-1], titles[-1], mccs[-1])
###############################################################################
# Visualize and compare methods
# -----------------------------
# Let's take a look at these statistics. The top row shows each test statistic,
# and the bottom shows p-values for various statistical tests, with the ones
# with proper control over FWER or FDR with bold titles.
fig = plt.figure(facecolor='w', figsize=(14, 3))
assert len(ts) == len(titles) == len(ps)
for ii in range(len(ts)):
ax = [fig.add_subplot(2, 10, ii + 1, projection='3d'),
fig.add_subplot(2, 10, 11 + ii)]
plot_t_p(ts[ii], ps[ii], titles[ii], mccs[ii], ax)
fig.tight_layout(pad=0, w_pad=0.05, h_pad=0.1)
plt.show()
###############################################################################
# The first three columns show the parametric and non-parametric statistics
# that are not corrected for multiple comparisons:
#
# - Mass univariate **t-tests** result in jagged edges.
# - **"Hat" variance correction** of the t-tests produces less peaky edges,
# correcting for sharpness in the statistic driven by low-variance voxels.
# - **Non-parametric resampling tests** are very similar to t-tests. This is to
# be expected: the data are drawn from a Gaussian distribution, and thus
# satisfy parametric assumptions.
#
# The next three columns show multiple comparison corrections of the
# mass univariate tests (parametric and non-parametric). These
# too conservatively correct for multiple comparisons because neighboring
# voxels in our data are correlated:
#
# - **Bonferroni correction** eliminates any significant activity.
# - **FDR correction** is less conservative than Bonferroni.
# - A **permutation test with a maximum statistic** also eliminates any
# significant activity.
#
# The final four columns show the non-parametric cluster-based permutation
# tests with a maximum statistic:
#
# - **Standard clustering** identifies the correct region. However, the whole
# area must be declared significant, so no peak analysis can be done.
# Also, the peak is broad.
# - **Clustering with "hat" variance adjustment** tightens the estimate of
# significant activity.
# - **Clustering with TFCE** allows analyzing each significant point
# independently, but still has a broadened estimate.
# - **Clustering with TFCE and "hat" variance adjustment** tightens the area
# declared significant (again FWER corrected).
#
# Statistical functions in MNE
# ----------------------------
# The complete listing of statistical functions provided by MNE are in
# the :ref:`Statistics API list <api_reference_statistics>`, but we will give
# a brief overview here.
#
# MNE provides several convenience parametric testing functions that can be
# used in conjunction with the non-parametric clustering methods. However,
# the set of functions we provide is not meant to be exhaustive.
#
# If the univariate statistical contrast of interest is not listed here
# (e.g., interaction term in an unbalanced ANOVA), consider checking out the
# :mod:`statsmodels` package. It offers many functions for computing
# statistical contrasts, e.g., :func:`statsmodels.stats.anova.anova_lm`.
# To use these functions in clustering:
#
# 1. Determine which test statistic (e.g., t-value, F-value) you would use
# in a univariate context to compute your contrast of interest. In other
# words, if there were only a single output such as reaction times, what
# test statistic might you compute on the data?
# 2. Wrap the call to that function within a function that takes an input of
# the same shape that is expected by your clustering function,
# and returns an array of the same shape without the "samples" dimension
# (e.g., :func:`mne.stats.permutation_cluster_1samp_test` takes an array
# of shape ``(n_samples, p, q)`` and returns an array of shape ``(p, q)``).
# 3. Pass this wrapped function to the ``stat_fun`` argument to the clustering
# function.
# 4. Set an appropriate ``threshold`` value (float or dict) based on the
# values your statistical contrast function returns.
#
# Parametric methods provided by MNE
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# - :func:`mne.stats.ttest_1samp_no_p`
# Paired t-test, optionally with hat adjustment.
# This is used by default for contrast enhancement in paired cluster tests.
#
# - :func:`mne.stats.f_oneway`
# One-way ANOVA for independent samples.
# This can be used to compute various F-contrasts. It is used by default
# for contrast enhancement in non-paired cluster tests.
#
# - :func:`mne.stats.f_mway_rm`
# M-way ANOVA for repeated measures and balanced designs.
# This returns F-statistics and p-values. The associated helper function
# :func:`mne.stats.f_threshold_mway_rm` can be used to determine the
# F-threshold at a given significance level.
#
# - :func:`mne.stats.linear_regression`
# Compute ordinary least square regressions on multiple targets, e.g.,
# sensors, time points across trials (samples).
# For each regressor it returns the beta value, t-statistic, and
# uncorrected p-value. While it can be used as a test, it is
# particularly useful to compute weighted averages or deal with
# continuous predictors.
#
# Non-parametric methods
# ^^^^^^^^^^^^^^^^^^^^^^
#
# - :func:`mne.stats.permutation_cluster_test`
# Unpaired contrasts with clustering.
#
# - :func:`mne.stats.spatio_temporal_cluster_test`
# Unpaired contrasts with spatio-temporal clustering.
#
# - :func:`mne.stats.permutation_t_test`
# Paired contrast with no clustering.
#
# - :func:`mne.stats.permutation_cluster_1samp_test`
# Paired contrasts with clustering.
#
# - :func:`mne.stats.spatio_temporal_cluster_1samp_test`
# Paired contrasts with spatio-temporal clustering.
#
# .. warning:: In most MNE functions, data has shape
# ``(..., n_space, n_time)``, where the spatial dimension can
# be e.g. sensors or source vertices. But for our spatio-temporal
# clustering functions, the spatial dimensions need to be **last**
# for computational efficiency reasons. For example, for
# :func:`mne.stats.spatio_temporal_cluster_1samp_test`, ``X``
# needs to be of shape ``(n_samples, n_time, n_space)``. You can
# use :func:`numpy.transpose` to transpose axes if necessary.
#
# References
# ----------
# .. footbibliography::
#
# .. include:: ../../links.inc
|
rkmaddox/mne-python
|
tutorials/stats-sensor-space/10_background_stats.py
|
Python
|
bsd-3-clause
| 29,158
|
[
"Gaussian"
] |
d42014faffd82480cd92afd087024e5b41cfc97cc27b97455bfcec5d4a22c604
|
#!/usr/bin/env python3
# Copyright (C) 2012-2015 ASTRON (Netherlands Institute for Radio Astronomy)
# P.O. Box 2, 7990 AA Dwingeloo, The Netherlands
#
# This file is part of the LOFAR software suite.
# The LOFAR software suite is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The LOFAR software suite is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with the LOFAR software suite. If not, see <http://www.gnu.org/licenses/>.
# $Id: scraper.py 43211 2019-06-18 18:55:40Z klazema $
# TODO: add comments to methods
# TODO: code cleanup
# TODO: scraper should be able to process each directory more than once. Requires changes in store.py
import subprocess
import logging
import time
import datetime
import sys
import socket
import os
import os.path
import threading
import multiprocessing
from lofar.lta.ltastorageoverview import store
from lofar.common.util import humanreadablesize
from lofar.common.subprocess_utils import communicate_returning_strings
from random import random, randint
logger = logging.getLogger()
VISIT_INTERVAL = datetime.timedelta(days=7)
LEXAR_HOST = 'ingest@lexar004.control.lofar'
class FileInfo:
'''Simple struct to hold filename and size'''
def __init__(self, filename, size, created_at):
'''
Parameters
----------
filename : string
size : int
'''
self.filename = filename
self.size = size
self.created_at = created_at
def __str__(self):
return self.filename + " " + humanreadablesize(self.size) + " " + str(self.created_at)
class SrmlsException(Exception):
'''Exception which is raised when an srmls command failes'''
def __init__(self, command, exitcode, stdout, stderr):
self.command = command
self.exitcode = exitcode
self.stdout = stdout
self.stderr = stderr
def __str__(self):
return "%s failed with code %d.\nstdout: %s\nstderr: %s" % \
(self.command, self.exitcode, self.stdout, self.stderr)
class ParseException(Exception):
'''Exception which is raised when parsing srmls results fails'''
def __init__(self, message):
self.message = message
def __str__(self):
return self.message
class Location:
'''A Location is a directory at a storage site which can be queried with getResult()'''
def __init__(self, srmurl, directory):
'''
Parameters
----------
srmurl : string
the srm url of the storage site. for example: srm://srm.grid.sara.nl:8443
directory : int
a directory at the storage site. for example: /pnfs/grid.sara.nl/data/lofar/storage
'''
self.srmurl = srmurl.rstrip('/')
self.directory = directory.rstrip('/') if len(directory) > 1 else directory
if not self.srmurl.startswith('srm://'):
raise ValueError('malformed srm url: %s' % (self.srmurl,))
if not self.directory.startswith('/'):
raise ValueError('malformed directory path: "%s". should start with a /' % (self.directory,))
def path(self):
'''returns the full path srmurl + directory'''
return self.srmurl + self.directory
def isRoot(self):
'''is this a root directory?'''
return self.directory == '/'
def parentDir(self):
'''returns parent directory path'''
if self.isRoot():
return '/'
stripped = self.directory.rstrip('/')
ridx = stripped.rindex('/')
if ridx == 0:
return '/'
return stripped[:ridx]
def parentLocation(self):
'''returns a Location object for the parent directory'''
return Location(self.srmurl, self.parentDir())
def __str__(self):
'''returns the full path'''
return self.path()
def getResult(self, offset=0):
'''Returns LocationResult with the subdirectries and files in at this location'''
foundFiles = []
foundDirectories = []
logger.info("Scanning %s with offset=%s", self.path(), offset)
# the core command: do an srmls call and parse the results
# srmls can only yield max 900 items in a result, hence we can recurse for the next 900 by using the offset
cmd = ['ssh', '-tt', '-n', '-x', '-q', LEXAR_HOST,
"srmls -l -count=900 -offset=%d %s%s" % (
offset,
self.srmurl,
self.directory) ]
logger.debug(' '.join(cmd))
p = subprocess.Popen(cmd, stdin=open('/dev/null'), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
logs = communicate_returning_strings(p)
# logger.debug('Shell command for %s exited with code %s' % (self.path(), p.returncode))
loglines = logs[0].split('\n')
# parse logs from succesfull command
if p.returncode == 0 and len(loglines) > 1:
entries = []
entry = []
for line in loglines:
entry.append(line)
if 'Type:' in line:
entries.append(entry)
entry = []
for lines in entries:
if len(lines) < 2:
continue
pathLine = lines[0].strip()
pathLineItems = [x.strip() for x in pathLine.split()]
entryType = lines[-1].strip().split('Type:')[-1].strip()
if len(pathLineItems) < 2:
raise ParseException("path line shorter than expected: %s" % pathLine)
if entryType.lower() == 'directory':
dirname = pathLineItems[1]
if dirname.rstrip('/') == self.directory.rstrip('/'):
# skip current directory
continue
if len(dirname) < 1 or not dirname[0] == '/':
raise ParseException("Could not parse dirname from line: %s\nloglines:\n%s"
% (pathLineItems[1], logs[0]))
foundDirectories.append(Location(self.srmurl, dirname.rstrip('/')))
elif entryType.lower() == 'file':
try:
filesize = int(pathLineItems[0])
filename = pathLineItems[1]
timestamplines = [x for x in lines if 'ed at:' in x]
timestampline = None
for line in timestamplines:
if 'created' in line and '1970' not in line:
timestampline = line
break
timestampline = line
timestamppart = timestampline.split('at:')[1].strip()
timestamp = datetime.datetime.strptime(timestamppart + ' UTC', '%Y/%m/%d %H:%M:%S %Z')
foundFiles.append(FileInfo(filename, filesize, timestamp))
except Exception as e:
raise ParseException("Could not parse fileproperies:\n%s\nloglines:\n%s"
% (str(e), logs[0]))
else:
logger.error("Unknown type: %s" % entryType)
# recurse and ask for more files if we hit the 900 line limit
if len(entries) >= 900:
logger.debug('There are more than 900 lines in the results')
extraResult = self.getResult(offset + 900)
logger.debug('extraResult %s' % str(extraResult))
foundDirectories += extraResult.subDirectories
foundFiles += extraResult.files
else:
raise SrmlsException(' '.join(cmd), p.returncode, logs[0], logs[1])
return LocationResult(self, foundDirectories, foundFiles)
class LocationResult:
'''Holds the query result for a Location: a list of subDirectories and/or a list of files'''
def __init__(self, location, subDirectories = None, files = None):
'''
Parameters
----------
location : Location
For which location this result was generated. (i.e. it is the parent of the subdirectories)
subDirectories : [Location]
A list of subdirectories
files : [FileInfo]
A list of files in this location
'''
self.location = location
self.subDirectories = subDirectories if subDirectories else []
self.files = files if files else []
def __str__(self):
return "LocationResult: path=%s # subdirs=%d # files=%d totalFileSizeOfDir=%s" % (self.location.path(), self.nrOfSubDirs(), self.nrOfFiles(), humanreadablesize(self.totalFileSizeOfDir()))
def nrOfSubDirs(self):
return len(self.subDirectories)
def nrOfFiles(self):
return len(self.files)
def totalFileSizeOfDir(self):
return sum([fileinfo.size for fileinfo in self.files])
class ResultGetterThread(threading.Thread):
'''Helper class to query Locations asynchronously for results.
Gets the result for the first Location in the locations deque and appends it to the results deque
Appends the subdirectory Locations at the end of the locations deque for later processing'''
def __init__(self, dbcreds, dir_id):
threading.Thread.__init__(self)
self.daemon = True
self.dbcreds = dbcreds
self.dir_id = dir_id
def run(self):
'''A single location is pop\'ed from the locations deque and the results are queried.
Resulting subdirectories are appended to the locations deque'''
try:
with store.LTAStorageDb(self.dbcreds) as db:
dir = db.directory(self.dir_id)
if not dir:
return
dir_id = dir['dir_id']
dir_name = dir['dir_name']
site_id = dir['site_id']
site = db.site(site_id)
srm_url = site['url']
location = Location(srm_url, dir_name)
try:
def rescheduleVisit():
for i in range(5):
try:
with store.LTAStorageDb(self.dbcreds) as db:
logger.info('Rescheduling %s for new visit.' % (location.path(),))
db.updateDirectoryLastVisitTime(self.dir_id, datetime.datetime.utcnow() - VISIT_INTERVAL + datetime.timedelta(mins=1))
break
except:
time.sleep(1)
# get results... long blocking
result = location.getResult()
logger.info(result)
with store.LTAStorageDb(self.dbcreds) as db:
# convert the result.files list into a dict
#with (filename, dir_id) as key and a tuple with all file info as value
result_file_tuple_dict = {}
for file in result.files:
filename = file.filename.split('/')[-1]
key = (filename, dir_id)
file_tuple = (filename, int(file.size), file.created_at, dir_id)
result_file_tuple_dict[key] = file_tuple
# create a dict of all already known files from the db
known_file_dict = {}
for file in db.filesInDirectory(dir_id):
key = (str(file['name']), dir_id)
known_file_dict[key] = file
# now compare the result and known (filename, dir_id) sets
# and find out which a new, and which are known.
# compare only by (filename, dir_id) because for a given file the size and/or date might have changed,
# but that does not make it a new/unique file.
result_file_key_set = set(result_file_tuple_dict.keys())
known_file_key_set = set(known_file_dict.keys())
new_file_key_set = result_file_key_set - known_file_key_set
removed_file_key_set = known_file_key_set - result_file_key_set
logger.info("%s %s: %d out of %d files are new, and %d are already known", site['name'],
dir_name,
len(new_file_key_set),
len(result_file_key_set),
len(known_file_key_set))
if new_file_key_set:
new_file_tuple_set = [result_file_tuple_dict[key] for key in new_file_key_set]
file_ids = db.insertFileInfos(new_file_tuple_set)
if len(file_ids) != len(new_file_tuple_set):
rescheduleVisit()
if known_file_key_set:
for key, known_file in list(known_file_dict.items()):
if key in result_file_tuple_dict:
result_file_tuple = result_file_tuple_dict[key]
known_size = int(known_file['size'])
result_size = result_file_tuple[1]
if known_size != result_size:
logger.info("%s %s: updating %s (id=%d) size from %d to %d",
site['name'], dir_name, known_file['name'], known_file['id'],
known_size, result_size)
db.updateFileInfoSize(known_file['id'], result_size)
if removed_file_key_set:
for removed_file_key in removed_file_key_set:
db.deleteFileInfoFromDirectory(removed_file_key[0], removed_file_key[1])
# skip empty nikhef dirs
filteredSubDirectories = [loc for loc in result.subDirectories
if not ('nikhef' in loc.srmurl and 'generated' in loc.directory) ]
# skip sksp spectroscopy project
filteredSubDirectories = [loc for loc in filteredSubDirectories
if not ('sara' in loc.srmurl and 'sksp' in loc.directory and 'spectro' in loc.directory) ]
subDirectoryNames = [loc.directory for loc in filteredSubDirectories]
if subDirectoryNames:
#check for already known subdirectories in the db
known_subDirectoryNames_set = set(subdir['name'] for subdir in db.subDirectories(dir_id))
new_subdir_name_set = set(subDirectoryNames) - known_subDirectoryNames_set;
logger.info("%s %s: %d out of %d subdirs are new, and %d are already known", site['name'], dir_name, len(new_subdir_name_set), len(subDirectoryNames), len(known_subDirectoryNames_set))
if new_subdir_name_set:
subdir_ids = db.insertSubDirectories(new_subdir_name_set, dir_id)
if len(subdir_ids) != len(new_subdir_name_set):
rescheduleVisit()
except (SrmlsException, ParseException) as e:
logger.error('Error while scanning %s\n%s' % (location.path(), str(e)))
if 'does not exist' in str(e):
with store.LTAStorageDb(self.dbcreds) as db:
db.deleteDirectory(self.dir_id)
else:
rescheduleVisit()
except Exception as e:
logger.exception(str(e))
with store.LTAStorageDb(self.dbcreds) as db:
logger.info('Rescheduling dir_id %d for new visit.' % (self.dir_id,))
db.updateDirectoryLastVisitTime(self.dir_id, datetime.datetime.utcnow() - VISIT_INTERVAL)
def populateDbWithLTASitesAndRootDirs(db):
"""
Helper method to fill empty database with (hardcoded) information about our LTA partners/sites/quotas
"""
if not db.sites():
#db.insertSite('nikhef', 'srm://tbn18.nikhef.nl:8446')
sara_id = db.insertSiteIfNotExists('sara', 'srm://srm.grid.sara.nl:8443')
juelich_id = db.insertSiteIfNotExists('juelich', 'srm://lofar-srm.fz-juelich.de:8443')
poznan_id = db.insertSiteIfNotExists('poznan', 'srm://lta-head.lofar.psnc.pl:8443')
# insert the LTA site root dir(s)
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/software')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/ops')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/storage')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/eor')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/pulsar')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/cosmics')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/surveys')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/user')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/proc')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/trans')
db.insertRootDirectory('sara', '/pnfs/grid.sara.nl/data/lofar/lotest')
db.insertRootDirectory('juelich', '/pnfs/fz-juelich.de/data/lofar/ops')
db.insertRootDirectory('poznan', '/lofar/ops')
#db.insertRootDirectory('nikhef', '/dpm/nikhef.nl/home/lofar')
def end_of_year(year):
'''little helper function which returns a datetime timestamp for the end of the given year'''
return datetime.datetime(year, 12, 31, 23, 59, 59)
# insert quota as given by our LTA partners
db.insertSiteQuota(sara_id, 5e15, end_of_year(2012))
db.insertSiteQuota(sara_id, 8e15, end_of_year(2013))
db.insertSiteQuota(sara_id, 11e15, end_of_year(2014))
db.insertSiteQuota(sara_id, 14e15, end_of_year(2015))
db.insertSiteQuota(sara_id, 17e15, end_of_year(2016))
db.insertSiteQuota(sara_id, 20e15, end_of_year(2017))
db.insertSiteQuota(sara_id, 23e15, end_of_year(2018))
db.insertSiteQuota(juelich_id, 2.5e15, end_of_year(2013))
db.insertSiteQuota(juelich_id, 4.5e15, end_of_year(2014))
db.insertSiteQuota(juelich_id, 6.5e15, end_of_year(2015))
db.insertSiteQuota(juelich_id, 8.5e15, end_of_year(2016))
db.insertSiteQuota(juelich_id, 10.5e15, end_of_year(2017))
db.insertSiteQuota(juelich_id, 12.5e15, end_of_year(2018))
db.insertSiteQuota(poznan_id, 0.5e15, end_of_year(2016))
db.insertSiteQuota(poznan_id, 3.5e15, end_of_year(2017))
db.insertSiteQuota(poznan_id, 5.5e15, end_of_year(2018))
def main():
'''the main function scanning all locations and gathering the results'''
from optparse import OptionParser
from lofar.common import dbcredentials
from lofar.messaging import DEFAULT_BROKER, DEFAULT_BUSNAME
from lofar.lta.ltastorageoverview.ingesteventhandler import LTASOIngestEventHandler, IngestEventMesssageBusListener
# Check the invocation arguments
parser = OptionParser("%prog [options]", description='runs the lta scraper and stores results in the speficied database.')
parser.add_option('-j', '--parallel', dest='parallel', type='int', default=8, help='number of parallel srmls jobs to run, default: %default')
parser.add_option('-b', '--broker', dest='broker', type='string', default=DEFAULT_BROKER,
help='Address of the messaging broker, default: %default')
parser.add_option('-e', '--exchange', dest='exchange', type='string',
default=DEFAULT_BUSNAME,
help='Name of the bus exchange on the broker on which the ingest notifications are published, default: %default')
parser.add_option('-V', '--verbose', dest='verbose', action='store_true', help='verbose logging')
parser.add_option_group(dbcredentials.options_group(parser))
parser.set_defaults(dbcredentials="LTASO")
(options, args) = parser.parse_args()
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG if options.verbose else logging.INFO)
options.parallel = max(1, min(8*multiprocessing.cpu_count(), options.parallel))
logger.info("Using maximum number of parallel srmls jobs: %d" % options.parallel)
dbcreds = dbcredentials.parse_options(options)
logger.info("Using dbcreds: %s" % dbcreds.stringWithHiddenPassword())
db = store.LTAStorageDb(dbcreds)
populateDbWithLTASitesAndRootDirs(db)
# for each site we want one or more ResultGetterThreads
# so make a dict with a list per site based on the locations
getters = dict([(site['name'],[]) for site in db.sites()])
# some helper functions
def numLocationsInQueues():
'''returns the total number of locations in the queues'''
return db.numDirectoriesNotVisitedSince(datetime.datetime.utcnow() - VISIT_INTERVAL)
def totalNumGetters():
'''returns the total number of parallel running ResultGetterThreads'''
return sum([len(v) for v in list(getters.values())])
def cleanupFinishedGetters():
# get rid of old finished ResultGetterThreads
finishedGetters = dict([(site_name, [getter for getter in getterList if not getter.isAlive()]) for site_name, getterList in list(getters.items())])
for site_name,finishedGetterList in list(finishedGetters.items()):
for finishedGetter in finishedGetterList:
getters[site_name].remove(finishedGetter)
# the main loop
# loop over the locations and spawn ResultGetterThreads to get the results parallel
# use load balancing over the different sites and with respect to queue lengths
# do not overload this host system
with IngestEventMesssageBusListener(handler_type=LTASOIngestEventHandler,
handler_kwargs={'dbcreds': dbcreds},
exchange=options.exchange, broker=options.broker):
while True:
cleanupFinishedGetters()
# spawn new ResultGetterThreads
# do not overload this host system
num_waiting = numLocationsInQueues()
while (num_waiting > 0 and
totalNumGetters() < options.parallel and
os.getloadavg()[0] < 4*multiprocessing.cpu_count()):
sitesStats = db.visitStats(datetime.datetime.utcnow() - VISIT_INTERVAL)
for site_name, site_stats in list(sitesStats.items()):
numGetters = len(getters[site_name])
queue_length = site_stats['queue_length']
weight = float(queue_length) / float(20 * (numGetters + 1))
if numGetters == 0 and queue_length > 0:
weight = 1e6 # make getterless sites extra important, so each site keeps flowing
site_stats['# get'] = numGetters
site_stats['weight'] = weight
totalWeight = max(1.0, sum([site_stats['weight'] for site_stats in list(sitesStats.values())]))
logger.debug("siteStats:\n%s" % str('\n'.join([str((k, v)) for k, v in list(sitesStats.items())])))
# now pick a random site using the weights
chosen_site_name = None
cumul = 0.0
r = random()
for site_name,site_stats in list(sitesStats.items()):
ratio = site_stats['weight']/totalWeight
cumul += ratio
if r <= cumul and site_stats['queue_length'] > 0:
chosen_site_name = site_name
break
if not chosen_site_name:
break
chosen_dir_id = sitesStats[chosen_site_name]['least_recent_visited_dir_id']
db.updateDirectoryLastVisitTime(chosen_dir_id, datetime.datetime.utcnow())
logger.debug("chosen_site_name: %s chosen_dir_id: %s", chosen_site_name, chosen_dir_id)
# make and start a new ResultGetterThread the location deque of the chosen site
newGetter = ResultGetterThread(dbcreds, chosen_dir_id)
newGetter.start()
getters[chosen_site_name].append(newGetter)
cleanupFinishedGetters()
# refresh num_waiting
num_waiting = numLocationsInQueues()
logger.info('numLocationsInQueues=%d totalNumGetters=%d siteQueueLengths: %s load_5min: %.1f' % (num_waiting,
totalNumGetters(),
' '.join(['%s:%d' % (name, stats['queue_length']) for name, stats in list(sitesStats.items())]),
os.getloadavg()[0]))
# sleep before main loop next iteration
# to wait for some results
# and some getters to finish
time.sleep(30 if num_waiting <= options.parallel else 0.25)
# all locations were processed
if __name__ == "__main__":
main()
|
kernsuite-debian/lofar
|
LTA/ltastorageoverview/lib/scraper.py
|
Python
|
gpl-3.0
| 26,376
|
[
"VisIt"
] |
af73c1c6284c3a809f406f3de8a7cc4f230256a6482cdbde00887606572c3162
|
#!/usr/bin/env python
# Dan Blankenberg
import sys
import bx.align.maf
from galaxy.tools.util import maf_utilities
assert sys.version_info[:2] >= ( 2, 4 )
def __main__():
output_name = sys.argv.pop(1)
input_name = sys.argv.pop(1)
species = sys.argv.pop(1)
out = open(output_name, 'w')
count = 0
# write interval header line
out.write( "#chrom\tstart\tend\tstrand\n" )
try:
for block in bx.align.maf.Reader( open( input_name, 'r' ) ):
for c in maf_utilities.iter_components_by_src_start( block, species ):
if c is not None:
out.write( "%s\t%i\t%i\t%s\n" % ( maf_utilities.src_split( c.src )[-1], c.get_forward_strand_start(), c.get_forward_strand_end(), c.strand ) )
count += 1
except Exception as e:
print >> sys.stderr, "There was a problem processing your input: %s" % e
out.close()
print "%i MAF blocks converted to Genomic Intervals for species %s." % ( count, species )
if __name__ == "__main__":
__main__()
|
icaoberg/cellorganizer-galaxy-tools
|
datatypes/converters/maf_to_interval_converter.py
|
Python
|
gpl-3.0
| 1,052
|
[
"Galaxy"
] |
c85a2c3830f673ed20393d14224dd6c7155ece636a958d5835d1f5c847cfd0d7
|
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
'''VASP POSCAR, CHGCAR and POTCAR file formats'''
import numpy as np
from horton.units import angstrom, electronvolt
from horton.periodic import periodic
from horton.cext import Cell
from horton.grid.cext import UniformGrid
__all__ = ['load_chgcar', 'load_locpot', 'load_poscar', 'dump_poscar']
def _unravel_counter(counter, shape):
result = []
for i in range(0, len(shape)):
result.append(counter % shape[i])
counter //= shape[i]
return result
def _load_vasp_header(f):
'''Load the cell and atoms from a VASP file
File specification provided here:
http://cms.mpi.univie.ac.at/vasp/guide/node59.html
**Arguments:**
f
An open file object
**Returns:** ``title``, ``cell``, ``numbers``, ``coordinates``
'''
# read the title
title = next(f).strip()
# read the universal scaling factor
scaling = float(next(f).strip())
# read cell parameters in angstrom, without the universal scaling factor.
# each row is one cell vector
rvecs = []
for i in range(3):
rvecs.append([float(w) for w in next(f).split()])
rvecs = np.array(rvecs) * angstrom * scaling
# Convert to cell object
cell = Cell(rvecs)
# note that in older VASP version the following line might be absent
vasp_numbers = [periodic[w].number for w in next(f).split()]
vasp_counts = [int(w) for w in next(f).split()]
numbers = []
for n, c in zip(vasp_numbers, vasp_counts):
numbers.extend([n] * c)
numbers = np.array(numbers)
line = next(f)
# the 7th line can optionally indicate selective dynamics
if line[0].lower() in ['s']:
line = next(f)
# parse direct/cartesian switch
cartesian = line[0].lower() in ['c', 'k']
# read the coordinates
coordinates = []
for line in f:
# check if all coordinates are read
if (len(line.strip()) == 0) or (len(coordinates) == numbers.shape[0]):
break
coordinates.append([float(w) for w in line.split()[:3]])
if cartesian:
coordinates = np.array(coordinates) * angstrom * scaling
else:
coordinates = np.dot(np.array(coordinates), rvecs)
return title, cell, numbers, coordinates
def _load_vasp_grid(filename):
'''Load a grid data file from VASP 5
**Arguments:**
filename
The VASP filename
**Returns:** a dictionary containing: ``title``, ``coordinates``,
``numbers``, ``cell``, ``grid``, ``cube_data``.
'''
with open(filename) as f:
# Load header
title, cell, numbers, coordinates = _load_vasp_header(f)
# read the shape of the data
shape = np.array([int(w) for w in next(f).split()])
# read data
cube_data = np.zeros(shape, float)
counter = 0
for line in f:
if counter >= cube_data.size:
break
for w in line.split():
i0, i1, i2 = _unravel_counter(counter, shape)
# Fill in the data with transposed indexes. In horton, X is
# the slowest index while Z is the fastest.
cube_data[i0, i1, i2] = float(w)
counter += 1
assert counter == cube_data.size
return {
'title': title,
'coordinates': coordinates,
'numbers': numbers,
'cell': cell,
'grid': UniformGrid(np.zeros(3), cell.rvecs / shape.reshape(-1, 1), shape, np.ones(3, int)),
'cube_data': cube_data,
}
def load_chgcar(filename):
'''Reads a vasp 5 chgcar file.
**Arguments:**
filename
The VASP filename
**Returns:** a dictionary containing: ``title``, ``coordinates``,
``numbers``, ``cell``, ``grid``, ``cube_data``.
'''
result = _load_vasp_grid(filename)
# renormalize electron density
result['cube_data'] /= result['cell'].volume
return result
def load_locpot(filename):
'''Reads a vasp 5 locpot file.
**Arguments:**
filename
The VASP filename
**Returns:** a dictionary containing: ``title``, ``coordinates``,
``numbers``, ``cell``, ``grid``, ``cube_data``.
'''
result = _load_vasp_grid(filename)
# convert locpot to atomic units
result['cube_data'] *= electronvolt
return result
def load_poscar(filename):
'''Reads a vasp 5 poscar file.
**Arguments:**
filename
The VASP filename
**Returns:** a dictionary containing: ``title``, ``coordinates``,
``numbers``, ``cell``.
'''
with open(filename) as f:
# Load header
title, cell, numbers, coordinates = _load_vasp_header(f)
return {
'title': title,
'coordinates': coordinates,
'numbers': numbers,
'cell': cell,
}
def dump_poscar(filename, data):
'''Write a file in VASP's POSCAR format
**Arguments:**
filename
The name of the file to be written. This is usually POSCAR.
data
An IOData instance. Must contain ``coordinates``, ``numbers``,
``cell``. May contain ``title``.
'''
with open(filename, 'w') as f:
print(getattr(data, 'title', 'Created with HORTON'), file=f)
print(' 1.00000000000000', file=f)
# Write cell vectors, each row is one vector in angstrom:
rvecs = data.cell.rvecs
for rvec in rvecs:
print(' % 21.16f % 21.16f % 21.16f' % tuple(rvec / angstrom), file=f)
# Construct list of elements to make sure the coordinates get written
# in this order. Heaviest elements are put furst.
unumbers = sorted(np.unique(data.numbers))[::-1]
print(' '.join('%5s' % periodic[unumber].symbol for unumber in unumbers), file=f)
print(' '.join('%5i' % (data.numbers == unumber).sum() for unumber in unumbers), file=f)
print('Selective dynamics', file=f)
print('Direct', file=f)
# Write the coordinates
for unumber in unumbers:
indexes = (data.numbers == unumber).nonzero()[0]
for index in indexes:
row = data.cell.to_frac(data.coordinates[index])
print(' % 21.16f % 21.16f % 21.16f F F F' % tuple(row), file=f)
|
theochem/horton
|
horton/io/vasp.py
|
Python
|
gpl-3.0
| 7,128
|
[
"VASP"
] |
edabaf2510db29cabae42160455e101f5f011912c8439ff52f63c52a4bc6eea3
|
import networkx as nx
import random
import time
import sys
import tqdm
import os
__author__ = "Giulio Rossetti"
__contact__ = "giulio.rossetti@isti.cnr.it"
__license__ = "BSD 2 Clause"
def timeit(method):
"""
Decorator: Compute the execution time of a function
:param method: the function
:return: the method runtime
"""
def timed(*arguments, **kw):
ts = time.time()
result = method(*arguments, **kw)
te = time.time()
sys.stdout.write('Time: %r %2.2f sec\n' % (method.__name__.strip("_"), te - ts))
sys.stdout.write('------------------------------------\n')
sys.stdout.flush()
return result
return timed
class Demon(object):
"""
Flat Merge version of Demon algorithm as described in:
Michele Coscia, Giulio Rossetti, Fosca Giannotti, Dino Pedreschi:
DEMON: a local-first discovery method for overlapping communities.
KDD 2012:615-623
"""
def __init__(self, graph=None, network_filename=None, epsilon=0.25, min_community_size=3, file_output=None):
"""
Constructor
:@param network_filename: the networkx filename
:@param epsilon: the tolerance required in order to merge communities
:@param min_community_size:min nodes needed to form a community
:@param file_output: True/False
"""
if graph is None:
self.g = nx.Graph()
if network_filename is not None:
self.__read_graph(network_filename)
else:
raise ImportError
else:
self.g = graph
self.epsilon = epsilon
self.min_community_size = min_community_size
self.file_output = file_output
self.base = os.getcwd()
@timeit
def __read_graph(self, network_filename):
"""
Read .ncol network file
:param network_filename: complete path for the .ncol file
:return: an undirected network
"""
self.g = nx.read_edgelist(network_filename, nodetype=int)
@timeit
def execute(self):
"""
Execute Demon algorithm
"""
for n in self.g.nodes():
self.g.nodes[n]['communities'] = [n]
all_communities = {}
for ego in tqdm.tqdm(nx.nodes(self.g), ncols=35, bar_format='Exec: {l_bar}{bar}'):
ego_minus_ego = nx.ego_graph(self.g, ego, 1, False)
community_to_nodes = self.__overlapping_label_propagation(ego_minus_ego, ego)
# merging phase
for c in community_to_nodes.keys():
if len(community_to_nodes[c]) > self.min_community_size:
actual_community = community_to_nodes[c]
all_communities = self.__merge_communities(all_communities, actual_community)
# write output on file
if self.file_output:
with open(self.file_output, "w") as out_file_com:
for idc, c in enumerate(all_communities.keys()):
out_file_com.write("%d\t%s\n" % (idc, str(sorted(c))))
return list(all_communities.keys())
@staticmethod
def __overlapping_label_propagation(ego_minus_ego, ego, max_iteration=10):
"""
:@param max_iteration: number of desired iteration for the label propagation
:@param ego_minus_ego: ego network minus its center
:@param ego: ego network center
"""
t = 0
old_node_to_coms = {}
while t <= max_iteration:
t += 1
node_to_coms = {}
nodes = list(nx.nodes(ego_minus_ego))
random.shuffle(nodes)
count = -len(nodes)
for n in nodes:
label_freq = {}
n_neighbors = list(nx.neighbors(ego_minus_ego, n))
if len(n_neighbors) < 1:
continue
# compute the frequency of the labels
for nn in n_neighbors:
communities_nn = [nn]
if nn in old_node_to_coms:
communities_nn = old_node_to_coms[nn]
for nn_c in communities_nn:
if nn_c in label_freq:
v = label_freq.get(nn_c)
label_freq[nn_c] = v + 1
else:
label_freq[nn_c] = 1
# first run, random community label initialization
if t == 1:
if not len(n_neighbors) == 0:
r_label = random.sample(label_freq.keys(), 1)
ego_minus_ego.nodes[n]['communities'] = r_label
old_node_to_coms[n] = r_label
count += 1
continue
# choosing the majority
else:
labels = []
max_freq = -1
for l, c in label_freq.items():
if c > max_freq:
max_freq = c
labels = [l]
elif c == max_freq:
labels.append(l)
node_to_coms[n] = labels
if n not in old_node_to_coms or not set(node_to_coms[n]) == set(old_node_to_coms[n]):
old_node_to_coms[n] = node_to_coms[n]
ego_minus_ego.nodes[n]['communities'] = labels
# build the communities reintroducing the ego
community_to_nodes = {}
for n in nx.nodes(ego_minus_ego):
if len(list(nx.neighbors(ego_minus_ego, n))) == 0:
ego_minus_ego.nodes[n]['communities'] = [n]
c_n = ego_minus_ego.nodes[n]['communities']
for c in c_n:
if c in community_to_nodes:
com = community_to_nodes.get(c)
com.append(n)
else:
nodes = [n, ego]
community_to_nodes[c] = nodes
return community_to_nodes
def __merge_communities(self, communities, actual_community):
"""
:param communities: dictionary of communities
:param actual_community: a community
"""
# if the community is already present return
if tuple(actual_community) in communities:
return communities
else:
# search a community to merge with
inserted = False
for test_community in communities.items():
union = self.__generalized_inclusion(actual_community, test_community[0])
# community to merge with identified!
# N.B. one-to-one merge with no predefined visit ordering: non-deterministic behaviours expected
if union is not None:
communities.pop(test_community[0])
communities[tuple(sorted(union))] = 0
inserted = True
break
# not merged: insert the original community
if not inserted:
communities[tuple(sorted(actual_community))] = 0
return communities
def __generalized_inclusion(self, c1, c2):
"""
:param c1: community
:param c2: community
"""
intersection = set(c2) & set(c1)
smaller_set = min(len(c1), len(c2))
if len(intersection) == 0:
return None
res = 0
if not smaller_set == 0:
res = float(len(intersection)) / float(smaller_set)
if res >= self.epsilon: # at least e% of similarity wrt the smallest set
union = set(c2) | set(c1)
return union
return None
def main():
import argparse
sys.stdout.write("-------------------------------------\n")
sys.stdout.write(" {DEMON} \n")
sys.stdout.write(" Democratic Estimate of the \n")
sys.stdout.write(" Modular Organization of a Network \n")
sys.stdout.write("-------------------------------------\n")
sys.stdout.write("Author: " + __author__ + "\n")
sys.stdout.write("Email: " + __contact__ + "\n")
sys.stdout.write("------------------------------------\n")
parser = argparse.ArgumentParser()
parser.add_argument('network_file', type=str, help='network file (edge list format)')
parser.add_argument('epsilon', type=float, help='merging threshold')
parser.add_argument('-c', '--min_com_size', type=int, help='minimum community size', default=3)
args = parser.parse_args()
dm = Demon(g=None, network_filename=args.network_file, epsilon=args.epsilon,
min_community_size=args.min_com_size, file_output="demon_communities.tsv")
dm.execute()
|
GiulioRossetti/DEMON
|
demon/alg/Demon.py
|
Python
|
bsd-2-clause
| 8,828
|
[
"VisIt"
] |
1c10aa7d1f03adf44184e6d3ebcf29fbb7ee6d3feb42805f42df11a0aec7ba24
|
#!/usr/bin/env python
#JSON {"lot": "RHF/cc-pvtz",
#JSON "scf": "PlainSCFSolver",
#JSON "linalg": "DenseLinalgFactory",
#JSON "difficulty": 1,
#JSON "description": "Basic RHF example with dense matrices, includes export of Hamiltonian"}
from horton import *
import numpy as np
# Hartree-Fock calculation
# ------------------------
# Construct a molecule from scratch
bond_length = 1.098*angstrom
mol = IOData(title='dinitrogen')
mol.coordinates = np.array([[0.0, 0.0, 0.0], [0.0, 0.0, bond_length]])
mol.numbers = np.array([7, 7])
# Create a Gaussian basis set
obasis = get_gobasis(mol.coordinates, mol.numbers, 'cc-pvdz')
# Create a linalg factory
lf = DenseLinalgFactory(obasis.nbasis)
# Compute Gaussian integrals
olp = obasis.compute_overlap(lf)
kin = obasis.compute_kinetic(lf)
na = obasis.compute_nuclear_attraction(mol.coordinates, mol.pseudo_numbers, lf)
er = obasis.compute_electron_repulsion(lf)
# Create alpha orbitals
exp_alpha = lf.create_expansion()
# Initial guess
guess_core_hamiltonian(olp, kin, na, exp_alpha)
# Construct the restricted HF effective Hamiltonian
external = {'nn': compute_nucnuc(mol.coordinates, mol.pseudo_numbers)}
terms = [
RTwoIndexTerm(kin, 'kin'),
RDirectTerm(er, 'hartree'),
RExchangeTerm(er, 'x_hf'),
RTwoIndexTerm(na, 'ne'),
]
ham = REffHam(terms, external)
# Decide how to occupy the orbitals (7 alpha electrons)
occ_model = AufbauOccModel(7)
# Converge WFN with plain SCF
scf_solver = PlainSCFSolver(1e-6)
scf_solver(ham, lf, olp, occ_model, exp_alpha)
# Write SCF results to a file
# ---------------------------
# Assign results to the molecule object and write it to a file, e.g. for
# later analysis
mol.title = 'RHF computation on dinitrogen'
mol.energy = ham.cache['energy']
mol.obasis = obasis
mol.exp_alpha = exp_alpha
# useful for visualization:
mol.to_file('n2-scf.molden')
# useful for post-processing (results stored in double precision)
mol.to_file('n2-scf.h5')
# Export Hamiltonian in Hartree-Fock molecular orbital basis (all orbitals active)
# --------------------------------------------------------------------------------
# Transform orbitals
one = kin.copy()
one.iadd(na)
two = er
(one_mo,), (two_mo,) = transform_integrals(one, two, 'tensordot', mol.exp_alpha)
# Write files
mol_all_active = IOData(core_energy=external['nn'], one_mo=one_mo, two_mo=two_mo)
# useful for exchange with other codes
mol_all_active.to_file('n2.FCIDUMP')
# useful for exchange with other HORTON scripts
mol_all_active.to_file('n2-hamiltonian.h5')
# Export Hamiltonian in Hartree-Fock molecular orbital basis for CAS(8,8)
# -----------------------------------------------------------------------
# Transform orbitals
one_small, two_small, core_energy = split_core_active(one, er,
external['nn'], exp_alpha, ncore=2, nactive=8)
# Write files
mol_cas88 = IOData(core_energy=core_energy, one_mo=one_mo, two_mo=two_mo, nelec=8, ms2=0, lf=lf)
# useful for exchange with other codes
mol_cas88.to_file('n2-cas8-8.FCIDUMP')
# useful for exchange with other HORTON scripts
mol_cas88.to_file('n2-hamiltonian-cas8-8.h5')
|
crisely09/horton
|
data/examples/hf_dft/rhf_n2_dense.py
|
Python
|
gpl-3.0
| 3,100
|
[
"Gaussian"
] |
e04976c3a52f76893cebfaf4a12742f2ed364a0fba932753001ac8f21f20bc5a
|
#!/usr/bin/env python
########################################################################
# File : dirac-dms-get-file
# Author : Stuart Paterson
########################################################################
"""
Retrieve a single file or list of files from Grid storage to the current directory.
Example:
$ dirac-dms-get-file /formation/user/v/vhamar/Example.txt
{'Failed': {},
'Successful': {'/formation/user/v/vhamar/Example.txt': '/afs/in2p3.fr/home/h/hamar/Tests/DMS/Example.txt'}}
"""
import DIRAC
from DIRAC.Core.Base.Script import Script
@Script()
def main():
# Registering arguments will automatically add their description to the help menu
Script.registerArgument(["LFN: Logical File Name or file containing LFNs"])
Script.parseCommandLine(ignoreErrors=True)
lfns = Script.getPositionalArgs()
if len(lfns) < 1:
Script.showHelp()
from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
exitCode = 0
if len(lfns) == 1:
try:
with open(lfns[0], "r") as f:
lfns = f.read().splitlines()
except Exception:
pass
result = dirac.getFile(lfns, printOutput=True)
if not result["OK"]:
print("ERROR %s" % (result["Message"]))
exitCode = 2
DIRAC.exit(exitCode)
if __name__ == "__main__":
main()
|
DIRACGrid/DIRAC
|
src/DIRAC/Interfaces/scripts/dirac_dms_get_file.py
|
Python
|
gpl-3.0
| 1,368
|
[
"DIRAC"
] |
97320b323ac47e1796aea79e9d1b6fefecbd55b1f1ef4d2c4c63d9cd5931ff91
|
# -*- coding: utf-8 -*
"""
This file contains the Qudi logic class for optimizing scanner position.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
from qtpy import QtCore
import numpy as np
import time
from logic.generic_logic import GenericLogic
from core.module import Connector, ConfigOption, StatusVar
from core.util.mutex import Mutex
class OptimizerLogic(GenericLogic):
"""This is the Logic class for optimizing scanner position on bright features.
"""
_modclass = 'optimizerlogic'
_modtype = 'logic'
# declare connectors
confocalscanner1 = Connector(interface='ConfocalScannerInterface')
fitlogic = Connector(interface='FitLogic')
# declare status vars
_clock_frequency = StatusVar('clock_frequency', 50)
return_slowness = StatusVar(default=20)
refocus_XY_size = StatusVar('xy_size', 0.6e-6)
optimizer_XY_res = StatusVar('xy_resolution', 10)
refocus_Z_size = StatusVar('z_size', 2e-6)
optimizer_Z_res = StatusVar('z_resolution', 30)
hw_settle_time = StatusVar('settle_time', 0.1)
optimization_sequence = StatusVar(default=['XY', 'Z'])
do_surface_subtraction = StatusVar('surface_subtraction', False)
surface_subtr_scan_offset = StatusVar('surface_subtraction_offset', 1e-6)
opt_channel = StatusVar('optimization_channel', 0)
# "private" signals to keep track of activities here in the optimizer logic
_sigScanNextXyLine = QtCore.Signal()
_sigScanZLine = QtCore.Signal()
_sigCompletedXyOptimizerScan = QtCore.Signal()
_sigDoNextOptimizationStep = QtCore.Signal()
_sigFinishedAllOptimizationSteps = QtCore.Signal()
# public signals
sigImageUpdated = QtCore.Signal()
sigRefocusStarted = QtCore.Signal(str)
sigRefocusXySizeChanged = QtCore.Signal()
sigRefocusZSizeChanged = QtCore.Signal()
sigRefocusFinished = QtCore.Signal(str, list)
sigClockFrequencyChanged = QtCore.Signal(int)
sigPositionChanged = QtCore.Signal(float, float, float)
def __init__(self, config, **kwargs):
super().__init__(config=config, **kwargs)
# locking for thread safety
self.threadlock = Mutex()
self.stopRequested = False
self.is_crosshair = True
# Keep track of who called the refocus
self._caller_tag = ''
def on_activate(self):
""" Initialisation performed during activation of the module.
@return int: error code (0:OK, -1:error)
"""
self._scanning_device = self.get_connector('confocalscanner1')
self._fit_logic = self.get_connector('fitlogic')
# Reads in the maximal scanning range. The unit of that scan range is micrometer!
self.x_range = self._scanning_device.get_position_range()[0]
self.y_range = self._scanning_device.get_position_range()[1]
self.z_range = self._scanning_device.get_position_range()[2]
self._initial_pos_x = 0.
self._initial_pos_y = 0.
self._initial_pos_z = 0.
self.optim_pos_x = self._initial_pos_x
self.optim_pos_y = self._initial_pos_y
self.optim_pos_z = self._initial_pos_z
self.optim_sigma_x = 0.
self.optim_sigma_y = 0.
self.optim_sigma_z = 0.
self._max_offset = 3.
# Sets the current position to the center of the maximal scanning range
self._current_x = (self.x_range[0] + self.x_range[1]) / 2
self._current_y = (self.y_range[0] + self.y_range[1]) / 2
self._current_z = (self.z_range[0] + self.z_range[1]) / 2
self._current_a = 0.0
###########################
# Fit Params and Settings #
model, params = self._fit_logic.make_gaussianlinearoffset_model()
self.z_params = params
self.use_custom_params = {name: False for name, param in params.items()}
# Initialization of internal counter for scanning
self._xy_scan_line_count = 0
# Initialization of optimization sequence step counter
self._optimization_step = 0
# Sets connections between signals and functions
self._sigScanNextXyLine.connect(self._refocus_xy_line, QtCore.Qt.QueuedConnection)
self._sigScanZLine.connect(self.do_z_optimization, QtCore.Qt.QueuedConnection)
self._sigCompletedXyOptimizerScan.connect(self._set_optimized_xy_from_fit, QtCore.Qt.QueuedConnection)
self._sigDoNextOptimizationStep.connect(self._do_next_optimization_step, QtCore.Qt.QueuedConnection)
self._sigFinishedAllOptimizationSteps.connect(self.finish_refocus)
self._initialize_xy_refocus_image()
self._initialize_z_refocus_image()
return 0
def on_deactivate(self):
""" Reverse steps of activation
@return int: error code (0:OK, -1:error)
"""
return 0
def check_optimization_sequence(self):
""" Check the sequence of scan events for the optimization.
"""
# Check the supplied optimization sequence only contains 'XY' and 'Z'
if len(set(self.optimization_sequence).difference({'XY', 'Z'})) > 0:
self.log.error('Requested optimization sequence contains unknown steps. Please provide '
'a sequence containing only \'XY\' and \'Z\' strings. '
'The default [\'XY\', \'Z\'] will be used.')
self.optimization_sequence = ['XY', 'Z']
def get_scanner_count_channels(self):
""" Get lis of counting channels from scanning device.
@return list(str): names of counter channels
"""
return self._scanning_device.get_scanner_count_channels()
def set_clock_frequency(self, clock_frequency):
"""Sets the frequency of the clock
@param int clock_frequency: desired frequency of the clock
@return int: error code (0:OK, -1:error)
"""
# checks if scanner is still running
if self.getState() == 'locked':
return -1
else:
self._clock_frequency = int(clock_frequency)
self.sigClockFrequencyChanged.emit(self._clock_frequency)
return 0
def set_refocus_XY_size(self, size):
""" Set the number of pixels in the refocus image for X and Y directions
@param int size: XY image size in pixels
"""
self.refocus_XY_size = size
self.sigRefocusXySizeChanged.emit()
def set_refocus_Z_size(self, size):
""" Set the number of values for Z refocus
@param int size: number of values for Z refocus
"""
self.refocus_Z_size = size
self.sigRefocusZSizeChanged.emit()
def start_refocus(self, initial_pos=None, caller_tag='unknown', tag='logic'):
""" Starts the optimization scan around initial_pos
@param list initial_pos: with the structure [float, float, float]
@param str caller_tag:
@param str tag:
"""
# checking if refocus corresponding to crosshair or corresponding to initial_pos
if isinstance(initial_pos, (np.ndarray,)) and initial_pos.size >= 3:
self._initial_pos_x, self._initial_pos_y, self._initial_pos_z = initial_pos[0:3]
elif isinstance(initial_pos, (list, tuple)) and len(initial_pos) >= 3:
self._initial_pos_x, self._initial_pos_y, self._initial_pos_z = initial_pos[0:3]
elif initial_pos is None:
scpos = self._scanning_device.get_scanner_position()[0:3]
self._initial_pos_x, self._initial_pos_y, self._initial_pos_z = scpos
else:
pass # TODO: throw error
# Keep track of where the start_refocus was initiated
self._caller_tag = caller_tag
# Set the optim_pos values to match the initial_pos values.
# This means we can use optim_pos in subsequent steps and ensure
# that we benefit from any completed optimization step.
self.optim_pos_x = self._initial_pos_x
self.optim_pos_y = self._initial_pos_y
self.optim_pos_z = self._initial_pos_z
self.optim_sigma_x = 0.
self.optim_sigma_y = 0.
self.optim_sigma_z = 0.
self._xy_scan_line_count = 0
self._optimization_step = 0
self.check_optimization_sequence()
scanner_status = self.start_scanner()
if scanner_status < 0:
self.sigRefocusFinished.emit(
self._caller_tag,
[self.optim_pos_x, self.optim_pos_y, self.optim_pos_z, 0])
return
self.sigRefocusStarted.emit(tag)
self._sigDoNextOptimizationStep.emit()
def stop_refocus(self):
"""Stops refocus."""
with self.threadlock:
self.stopRequested = True
def _initialize_xy_refocus_image(self):
"""Initialisation of the xy refocus image."""
self._xy_scan_line_count = 0
# Take optim pos as center of refocus image, to benefit from any previous
# optimization steps that have occurred.
x0 = self.optim_pos_x
y0 = self.optim_pos_y
# defining position intervals for refocushttp://www.spiegel.de/
xmin = np.clip(x0 - 0.5 * self.refocus_XY_size, self.x_range[0], self.x_range[1])
xmax = np.clip(x0 + 0.5 * self.refocus_XY_size, self.x_range[0], self.x_range[1])
ymin = np.clip(y0 - 0.5 * self.refocus_XY_size, self.y_range[0], self.y_range[1])
ymax = np.clip(y0 + 0.5 * self.refocus_XY_size, self.y_range[0], self.y_range[1])
self._X_values = np.linspace(xmin, xmax, num=self.optimizer_XY_res)
self._Y_values = np.linspace(ymin, ymax, num=self.optimizer_XY_res)
self._Z_values = self.optim_pos_z * np.ones(self._X_values.shape)
self._A_values = np.zeros(self._X_values.shape)
self._return_X_values = np.linspace(xmax, xmin, num=self.optimizer_XY_res)
self._return_A_values = np.zeros(self._return_X_values.shape)
self.xy_refocus_image = np.zeros((
len(self._Y_values),
len(self._X_values),
3 + len(self.get_scanner_count_channels())))
self.xy_refocus_image[:, :, 0] = np.full((len(self._Y_values), len(self._X_values)), self._X_values)
y_value_matrix = np.full((len(self._X_values), len(self._Y_values)), self._Y_values)
self.xy_refocus_image[:, :, 1] = y_value_matrix.transpose()
self.xy_refocus_image[:, :, 2] = self.optim_pos_z * np.ones((len(self._Y_values), len(self._X_values)))
def _initialize_z_refocus_image(self):
"""Initialisation of the z refocus image."""
self._xy_scan_line_count = 0
# Take optim pos as center of refocus image, to benefit from any previous
# optimization steps that have occurred.
z0 = self.optim_pos_z
zmin = np.clip(z0 - 0.5 * self.refocus_Z_size, self.z_range[0], self.z_range[1])
zmax = np.clip(z0 + 0.5 * self.refocus_Z_size, self.z_range[0], self.z_range[1])
self._zimage_Z_values = np.linspace(zmin, zmax, num=self.optimizer_Z_res)
self._fit_zimage_Z_values = np.linspace(zmin, zmax, num=self.optimizer_Z_res)
self._zimage_A_values = np.zeros(self._zimage_Z_values.shape)
self.z_refocus_line = np.zeros((
len(self._zimage_Z_values),
len(self.get_scanner_count_channels())))
self.z_fit_data = np.zeros(len(self._fit_zimage_Z_values))
def _move_to_start_pos(self, start_pos):
"""Moves the scanner from its current position to the start position of the optimizer scan.
@param start_pos float[]: 3-point vector giving x, y, z position to go to.
"""
n_ch = len(self._scanning_device.get_scanner_axes())
scanner_pos = self._scanning_device.get_scanner_position()
lsx = np.linspace(scanner_pos[0], start_pos[0], self.return_slowness)
lsy = np.linspace(scanner_pos[1], start_pos[1], self.return_slowness)
lsz = np.linspace(scanner_pos[2], start_pos[2], self.return_slowness)
if n_ch <= 3:
move_to_start_line = np.vstack((lsx, lsy, lsz)[0:n_ch])
else:
move_to_start_line = np.vstack((lsx, lsy, lsz, np.ones(lsx.shape) * scanner_pos[3]))
counts = self._scanning_device.scan_line(move_to_start_line)
if np.any(counts == -1):
return -1
time.sleep(self.hw_settle_time)
return 0
def _refocus_xy_line(self):
"""Scanning a line of the xy optimization image.
This method repeats itself using the _sigScanNextXyLine
until the xy optimization image is complete.
"""
n_ch = len(self._scanning_device.get_scanner_axes())
# stop scanning if instructed
if self.stopRequested:
with self.threadlock:
self.stopRequested = False
self.finish_refocus()
self.sigImageUpdated.emit()
self.sigRefocusFinished.emit(
self._caller_tag,
[self.optim_pos_x, self.optim_pos_y, self.optim_pos_z, 0][0:n_ch])
return
# move to the start of the first line
if self._xy_scan_line_count == 0:
status = self._move_to_start_pos([self.xy_refocus_image[0, 0, 0],
self.xy_refocus_image[0, 0, 1],
self.xy_refocus_image[0, 0, 2]])
if status < 0:
self.log.error('Error during move to starting point.')
self.stop_refocus()
self._sigScanNextXyLine.emit()
return
lsx = self.xy_refocus_image[self._xy_scan_line_count, :, 0]
lsy = self.xy_refocus_image[self._xy_scan_line_count, :, 1]
lsz = self.xy_refocus_image[self._xy_scan_line_count, :, 2]
# scan a line of the xy optimization image
if n_ch <= 3:
line = np.vstack((lsx, lsy, lsz)[0:n_ch])
else:
line = np.vstack((lsx, lsy, lsz, np.zeros(lsx.shape)))
line_counts = self._scanning_device.scan_line(line)
if np.any(line_counts == -1):
self.log.error('The scan went wrong, killing the scanner.')
self.stop_refocus()
self._sigScanNextXyLine.emit()
return
lsx = self._return_X_values
lsy = self.xy_refocus_image[self._xy_scan_line_count, 0, 1] * np.ones(lsx.shape)
lsz = self.xy_refocus_image[self._xy_scan_line_count, 0, 2] * np.ones(lsx.shape)
if n_ch <= 3:
return_line = np.vstack((lsx, lsy, lsz))
else:
return_line = np.vstack((lsx, lsy, lsz, np.zeros(lsx.shape)))
return_line_counts = self._scanning_device.scan_line(return_line)
if np.any(return_line_counts == -1):
self.log.error('The scan went wrong, killing the scanner.')
self.stop_refocus()
self._sigScanNextXyLine.emit()
return
s_ch = len(self.get_scanner_count_channels())
self.xy_refocus_image[self._xy_scan_line_count, :, 3:3 + s_ch] = line_counts
self.sigImageUpdated.emit()
self._xy_scan_line_count += 1
if self._xy_scan_line_count < np.size(self._Y_values):
self._sigScanNextXyLine.emit()
else:
self._sigCompletedXyOptimizerScan.emit()
def _set_optimized_xy_from_fit(self):
"""Fit the completed xy optimizer scan and set the optimized xy position."""
fit_x, fit_y = np.meshgrid(self._X_values, self._Y_values)
xy_fit_data = self.xy_refocus_image[:, :, 3].ravel()
axes = np.empty((len(self._X_values) * len(self._Y_values), 2))
axes = (fit_x.flatten(), fit_y.flatten())
result_2D_gaus = self._fit_logic.make_twoDgaussian_fit(
xy_axes=axes,
data=xy_fit_data,
estimator=self._fit_logic.estimate_twoDgaussian
)
# print(result_2D_gaus.fit_report())
if result_2D_gaus.success is False:
self.log.error('Error: 2D Gaussian Fit was not successfull!.')
print('2D gaussian fit not successfull')
self.optim_pos_x = self._initial_pos_x
self.optim_pos_y = self._initial_pos_y
self.optim_sigma_x = 0.
self.optim_sigma_y = 0.
# hier abbrechen
else:
# @reviewer: Do we need this. With constraints not one of these cases will be possible....
if abs(self._initial_pos_x - result_2D_gaus.best_values['center_x']) < self._max_offset and abs(self._initial_pos_x - result_2D_gaus.best_values['center_x']) < self._max_offset:
if result_2D_gaus.best_values['center_x'] >= self.x_range[0] and result_2D_gaus.best_values['center_x'] <= self.x_range[1]:
if result_2D_gaus.best_values['center_y'] >= self.y_range[0] and result_2D_gaus.best_values['center_y'] <= self.y_range[1]:
self.optim_pos_x = result_2D_gaus.best_values['center_x']
self.optim_pos_y = result_2D_gaus.best_values['center_y']
self.optim_sigma_x = result_2D_gaus.best_values['sigma_x']
self.optim_sigma_y = result_2D_gaus.best_values['sigma_y']
else:
self.optim_pos_x = self._initial_pos_x
self.optim_pos_y = self._initial_pos_y
self.optim_sigma_x = 0.
self.optim_sigma_y = 0.
# emit image updated signal so crosshair can be updated from this fit
self.sigImageUpdated.emit()
self._sigDoNextOptimizationStep.emit()
def do_z_optimization(self):
""" Do the z axis optimization."""
# z scaning
self._scan_z_line()
# z-fit
# If subtracting surface, then data can go negative and the gaussian fit offset constraints need to be adjusted
if self.do_surface_subtraction:
adjusted_param = {}
adjusted_param['offset'] = {
'value': 1e-12,
'min': -self.z_refocus_line[:, self.opt_channel].max(),
'max': self.z_refocus_line[:, self.opt_channel].max()
}
result = self._fit_logic.make_gausspeaklinearoffset_fit(
x_axis=self._zimage_Z_values,
data=self.z_refocus_line[:, self.opt_channel],
add_params=adjusted_param)
else:
if any(self.use_custom_params.values()):
result = self._fit_logic.make_gausspeaklinearoffset_fit(
x_axis=self._zimage_Z_values,
data=self.z_refocus_line[:, self.opt_channel],
# Todo: It is required that the changed parameters are given as a dictionary or parameter object
add_params=None)
else:
result = self._fit_logic.make_gaussianlinearoffset_fit(
x_axis=self._zimage_Z_values,
data=self.z_refocus_line[:, self.opt_channel],
units='m',
estimator=self._fit_logic.estimate_gaussianlinearoffset_peak
)
self.z_params = result.params
if result.success is False:
self.log.error('error in 1D Gaussian Fit.')
self.optim_pos_z = self._initial_pos_z
self.optim_sigma_z = 0.
# interrupt here?
else: # move to new position
# @reviewer: Do we need this. With constraints not one of these cases will be possible....
# checks if new pos is too far away
if abs(self._initial_pos_z - result.best_values['center']) < self._max_offset:
# checks if new pos is within the scanner range
if result.best_values['center'] >= self.z_range[0] and result.best_values['center'] <= self.z_range[1]:
self.optim_pos_z = result.best_values['center']
self.optim_sigma_z = result.best_values['sigma']
gauss, params = self._fit_logic.make_gaussianlinearoffset_model()
self.z_fit_data = gauss.eval(
x=self._fit_zimage_Z_values, params=result.params)
else: # new pos is too far away
# checks if new pos is too high
self.optim_sigma_z = 0.
if result.best_values['center'] > self._initial_pos_z:
if self._initial_pos_z + 0.5 * self.refocus_Z_size <= self.z_range[1]:
# moves to higher edge of scan range
self.optim_pos_z = self._initial_pos_z + 0.5 * self.refocus_Z_size
else:
self.optim_pos_z = self.z_range[1] # moves to highest possible value
else:
if self._initial_pos_z + 0.5 * self.refocus_Z_size >= self.z_range[0]:
# moves to lower edge of scan range
self.optim_pos_z = self._initial_pos_z + 0.5 * self.refocus_Z_size
else:
self.optim_pos_z = self.z_range[0] # moves to lowest possible value
self.sigImageUpdated.emit()
self._sigDoNextOptimizationStep.emit()
def finish_refocus(self):
""" Finishes up and releases hardware after the optimizer scans."""
self.kill_scanner()
self.log.info(
'Optimised from ({0:.3e},{1:.3e},{2:.3e}) to local '
'maximum at ({3:.3e},{4:.3e},{5:.3e}).'.format(
self._initial_pos_x,
self._initial_pos_y,
self._initial_pos_z,
self.optim_pos_x,
self.optim_pos_y,
self.optim_pos_z))
# Signal that the optimization has finished, and "return" the optimal position along with
# caller_tag
self.sigRefocusFinished.emit(
self._caller_tag,
[self.optim_pos_x, self.optim_pos_y, self.optim_pos_z, 0])
def _scan_z_line(self):
"""Scans the z line for refocus."""
# Moves to the start value of the z-scan
status = self._move_to_start_pos(
[self.optim_pos_x, self.optim_pos_y, self._zimage_Z_values[0]])
if status < 0:
self.log.error('Error during move to starting point.')
self.stop_refocus()
return
n_ch = len(self._scanning_device.get_scanner_axes())
# defining trace of positions for z-refocus
scan_z_line = self._zimage_Z_values
scan_x_line = self.optim_pos_x * np.ones(self._zimage_Z_values.shape)
scan_y_line = self.optim_pos_y * np.ones(self._zimage_Z_values.shape)
if n_ch <= 3:
line = np.vstack((scan_x_line, scan_y_line, scan_z_line)[0:n_ch])
else:
line = np.vstack((scan_x_line, scan_y_line, scan_z_line, np.zeros(scan_x_line.shape)))
# Perform scan
line_counts = self._scanning_device.scan_line(line)
if np.any(line_counts == -1):
self.log.error('Z scan went wrong, killing the scanner.')
self.stop_refocus()
return
# Set the data
self.z_refocus_line = line_counts
# If subtracting surface, perform a displaced depth line scan
if self.do_surface_subtraction:
# Move to start of z-scan
status = self._move_to_start_pos([
self.optim_pos_x + self.surface_subtr_scan_offset,
self.optim_pos_y,
self._zimage_Z_values[0]])
if status < 0:
self.log.error('Error during move to starting point.')
self.stop_refocus()
return
# define an offset line to measure "background"
if n_ch <= 3:
line_bg = np.vstack(
(scan_x_line + self.surface_subtr_scan_offset, scan_y_line, scan_z_line)[0:n_ch])
else:
line_bg = np.vstack(
(scan_x_line + self.surface_subtr_scan_offset,
scan_y_line,
scan_z_line,
np.zeros(scan_x_line.shape)))
line_bg_counts = self._scanning_device.scan_line(line_bg)
if np.any(line_bg_counts[0] == -1):
self.log.error('The scan went wrong, killing the scanner.')
self.stop_refocus()
return
# surface-subtracted line scan data is the difference
self.z_refocus_line = line_counts - line_bg_counts
def start_scanner(self):
"""Setting up the scanner device.
@return int: error code (0:OK, -1:error)
"""
self.lock()
clock_status = self._scanning_device.set_up_scanner_clock(
clock_frequency=self._clock_frequency)
if clock_status < 0:
self.unlock()
return -1
scanner_status = self._scanning_device.set_up_scanner()
if scanner_status < 0:
self._scanning_device.close_scanner_clock()
self.unlock()
return -1
return 0
def kill_scanner(self):
"""Closing the scanner device.
@return int: error code (0:OK, -1:error)
"""
try:
rv = self._scanning_device.close_scanner()
except:
self.log.exception('Closing refocus scanner failed.')
return -1
try:
rv2 = self._scanning_device.close_scanner_clock()
except:
self.log.exception('Closing refocus scanner clock failed.')
return -1
self.unlock()
return rv + rv2
def _do_next_optimization_step(self):
"""Handle the steps through the specified optimization sequence
"""
# At the end fo the sequence, finish the optimization
if self._optimization_step == len(self.optimization_sequence):
self._sigFinishedAllOptimizationSteps.emit()
return
# Read the next step in the optimization sequence
this_step = self.optimization_sequence[self._optimization_step]
# Increment the step counter
self._optimization_step += 1
# Launch the next step
if this_step == 'XY':
self._initialize_xy_refocus_image()
self._sigScanNextXyLine.emit()
elif this_step == 'Z':
self._initialize_z_refocus_image()
self._sigScanZLine.emit()
def set_position(self, tag, x=None, y=None, z=None, a=None):
""" Set focus position.
@param str tag: sting indicating who caused position change
@param float x: x axis position in m
@param float y: y axis position in m
@param float z: z axis position in m
@param float a: a axis position in m
"""
if x is not None:
self._current_x = x
if y is not None:
self._current_y = y
if z is not None:
self._current_z = z
self.sigPositionChanged.emit(self._current_x, self._current_y, self._current_z)
|
childresslab/MicrocavityExp1
|
logic/optimizer_logic.py
|
Python
|
gpl-3.0
| 27,933
|
[
"Gaussian"
] |
c3ba11e51dceff89a30a18b16a3fdc0a0d38d0c95ed7c53a8e75ec205ffc8f70
|
###########################################################################
#
# This program is part of Zenoss Core, an open source monitoring platform.
# Copyright (C) 2008, Zenoss Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# For complete information please visit: http://www.zenoss.com/oss/
#
###########################################################################
__doc__="Define structure for the ServerAlive2 RPC call"
from pysamba.library import *
class COMVERSION(Structure):
_fields_ = [
('MajorVersion', uint16_t),
('MinorVersion', uint16_t),
]
class COMINFO(Structure):
_fields_ = [
('version', COMVERSION),
('unknown1', uint32_t),
]
class DUALSTRINGARRAY(Structure):
_fields_ = [
('stringbindings', c_void_p), # POINTER(POINTER(STRINGBINDING))),
('securitybindings', c_void_p), # POINTER(PIONTER(SECURITYBINDING))),
]
uint_t = c_uint
class ServerAlive2_out(Structure):
_fields_ = [
('info', POINTER(COMINFO)),
('dualstring', POINTER(DUALSTRINGARRAY)),
('unknown2', uint8_t*3),
('result', WERROR),
]
class ServerAlive2(Structure):
_fields_ = [('out', ServerAlive2_out)]
|
racemidev/WMI_cmd
|
pysamba/rpc/oxidresolver.py
|
Python
|
gpl-2.0
| 1,376
|
[
"VisIt"
] |
adedf0b048491e6f203ad2b56a393eb73fe7d3464a664c82d0075feeb678b158
|
# -*- coding: utf-8 -*-
"""
Project: Parallel.Archive
Date: 3/3/17 11:27 AM
Author: Demian D. Gomez
"""
from os.path import getmtime
from pprint import pprint
import traceback
import warnings
import sys
import os
from time import time
from io import BytesIO
import base64
import logging
from logging import INFO, ERROR, WARNING, DEBUG, StreamHandler, Formatter
# deps
import numpy as np
from numpy import sin, cos, pi
from scipy.stats import chi2
import pg
import matplotlib
if not os.environ.get('DISPLAY', None):
matplotlib.use('Agg')
from matplotlib.widgets import Button
# app
import pyStationInfo
import pyDate
import pyEvents
from Utils import ct2lg, lg2ct, rotlg2ct, crc32, stationID
from pyBunch import Bunch
language = {
'eng': {
"station" : "Station",
"north" : "North",
"east" : "East",
"up" : "Up",
"table_title" : "Year Day Relx [mm] Mag",
"periodic" : "Periodic amp",
"velocity" : "Velocity",
"from_model" : "from model",
"acceleration" : "Acceleration",
"position" : "Ref. Position",
"completion" : "Completion",
"other" : "other polynomial terms",
"not_enough" : "Not enough solutions to fit an ETM.",
"table_too_long" : "Table too long to print!",
"frequency" : "Frequency",
"N residuals" : "N Residuals",
"E residuals" : "E Residuals",
"U residuals" : "U Residuals",
"histogram plot" : "Histogram",
"residual plot" : "Residual Plot"
},
'spa': {
"station" : "Estación",
"north" : "Norte",
"east" : "Este",
"up" : "Arriba",
"table_title" : "Año Día Relx [mm] Mag",
"periodic" : "Amp. Periódica",
"velocity" : "Velocidad",
"from_model" : "de modelo",
"acceleration" : "Aceleración",
"position" : "Posición de ref.",
"completion" : "Completitud",
"other" : "otros términos polinómicos",
"not_enough" : "No hay suficientes soluciones para ajustar trayectorias.",
"table_too_long" : "Tabla demasiado larga!",
"frequency" : "Frecuencia",
"N residuals" : "Residuos N",
"E residuals" : "Residuos E",
"U residuals" : "Residuos U",
"histogram plot" : "Histograma",
"residual plot" : "Gráfico de Residuos"
}}
if not 'LANG' in globals():
LANG = 'eng'
def LABEL(msg):
global LANG
return language[LANG][msg]
# logger information and setup
logger = logging.getLogger('pyETM')
stream = StreamHandler()
stream.setFormatter(Formatter(' -- %(message)s'))
logger.addHandler(stream)
def tic():
global tt
tt = time()
def toc(text):
global tt
print(text + ': ' + str(time() - tt))
LIMIT = 2.5
type_dict = {-1 : 'UNDETERMINED',
1 : 'GENERIC_JUMP',
2 : 'ANTENNA_CHANGE',
5 : 'REFERENCE_FRAME_JUMP',
10 : 'CO_SEISMIC_JUMP_DECAY',
15 : 'CO_SEISMIC_JUMP',
20 : 'CO_SEISMIC_DECAY'}
# unknown jump
UNDETERMINED = -1
# no effect: display purposes
GENERIC_JUMP = 1
# antenna change jump
ANTENNA_CHANGE = 2
# reference frame jump
REFERENCE_FRAME_JUMP = 5
# co-seismic jump and decay
CO_SEISMIC_JUMP_DECAY = 10
# co-seismic jump only, no decay
CO_SEISMIC_JUMP = 15
# co-seismic decay only
CO_SEISMIC_DECAY = 20
EQ_MIN_DAYS = 15
JP_MIN_DAYS = 5
DEFAULT_RELAXATION = np.array([0.5])
DEFAULT_POL_TERMS = 2
DEFAULT_FREQUENCIES = np.array((1/365.25, 1/(365.25/2))) # (1 yr, 6 months) expressed in 1/days (one year = 365.25)
SIGMA_FLOOR_H = 0.10
SIGMA_FLOOR_V = 0.15
ESTIMATION = 0
DATABASE = 1
VERSION = '1.2.1'
class pyETMException(Exception):
def __init__(self, value):
self.value = value
self.event = pyEvents.Event(Description=value, EventType='error')
def __str__(self):
return str(self.value)
class pyETMException_NoDesignMatrix(pyETMException):
pass
def distance(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1 = lon1*pi/180
lat1 = lat1*pi/180
lon2 = lon2*pi/180
lat2 = lat2*pi/180
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
c = 2 * np.arcsin(np.sqrt(a))
km = 6371 * c
return km
def to_postgres(dictionary):
if isinstance(dictionary, dict):
for key, val in list(dictionary.items()):
if isinstance(val, np.ndarray):
dictionary[key] = str(val.flatten().tolist()).replace('[', '{').replace(']', '}')
else:
dictionary = str(dictionary.flatten().tolist()).replace('[', '{').replace(']', '}')
return dictionary
def to_list(dictionary):
for key, val in list(dictionary.items()):
if isinstance(val, np.ndarray):
dictionary[key] = val.tolist()
elif isinstance(val, pyDate.datetime):
dictionary[key] = val.strftime('%Y-%m-%d %H:%M:%S')
return dictionary
class PppSoln:
""""class to extract the PPP solutions from the database"""
def __init__(self, cnn, NetworkCode, StationCode):
self.NetworkCode = NetworkCode
self.StationCode = StationCode
self.hash = 0
stn_id = stationID(self)
self.type = 'ppp'
self.stack_name = 'ppp'
# get the station from the stations table
stn = cnn.query('SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
% (NetworkCode, StationCode))
stn = stn.dictresult()[0]
if stn['lat'] is None:
raise pyETMException('Station %s has no valid metadata in the stations table.' % stn_id)
self.lat = np.array([float(stn['lat'])])
self.lon = np.array([float(stn['lon'])])
self.height = np.array([float(stn['height'])])
self.auto_x = np.array([float(stn['auto_x'])])
self.auto_y = np.array([float(stn['auto_y'])])
self.auto_z = np.array([float(stn['auto_z'])])
x = np.array([float(stn['auto_x'])])
y = np.array([float(stn['auto_y'])])
z = np.array([float(stn['auto_z'])])
if stn['max_dist'] is not None:
self.max_dist = stn['max_dist']
else:
self.max_dist = 20
# load all the PPP coordinates available for this station
# exclude ppp solutions in the exclude table and any solution that is more than 20 meters from the simple
# linear trend calculated above
self.excluded = cnn.query_float('SELECT "Year", "DOY" FROM ppp_soln_excl '
'WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
% (NetworkCode, StationCode))
self.table = cnn.query_float(
'SELECT "X", "Y", "Z", "Year", "DOY" FROM ppp_soln p1 '
'WHERE p1."NetworkCode" = \'%s\' AND p1."StationCode" = \'%s\' ORDER BY "Year", "DOY"'
% (NetworkCode, StationCode))
self.table = [item for item in self.table
if np.sqrt(np.square(item[0] - x) + np.square(item[1] - y) + np.square(item[2] - z)) <=
self.max_dist and item[3:] not in self.excluded]
self.blunders = [item for item in self.table
if np.sqrt(np.square(item[0] - x) + np.square(item[1] - y) + np.square(item[2] - z)) >
self.max_dist and item[3:] not in self.excluded]
self.solutions = len(self.table)
self.ts_blu = np.array([pyDate.Date(year=item[3], doy=item[4]).fyear for item in self.blunders])
if self.solutions >= 1:
a = np.array(self.table)
self.x = a[:, 0]
self.y = a[:, 1]
self.z = a[:, 2]
self.t = np.array([pyDate.Date(year=item[0], doy=item[1]).fyear for item in a[:, 3:5]])
self.mjd = np.array([pyDate.Date(year=item[0], doy=item[1]).mjd for item in a[:, 3:5]])
self.date = [pyDate.Date(year=item[0], doy=item[1]) for item in a[:, 3:5]]
# continuous time vector for plots
ts = np.arange(np.min(self.mjd), np.max(self.mjd) + 1, 1)
self.mjds = ts
self.ts = np.array([pyDate.Date(mjd=tts).fyear for tts in ts])
elif len(self.blunders) >= 1:
raise pyETMException('No viable PPP solutions available for %s (all blunders!)\n'
' -> min distance to station coordinate is %.1f meters'
% (stn_id, np.array([item[5]
for item in self.blunders]).min()))
else:
raise pyETMException('No PPP solutions available for %s' % stn_id)
# get a list of the epochs with files but no solutions.
# This will be shown in the outliers plot as a special marker
rnx = cnn.query(
'SELECT r."ObservationFYear" FROM rinex_proc as r '
'LEFT JOIN ppp_soln as p ON '
'r."NetworkCode" = p."NetworkCode" AND '
'r."StationCode" = p."StationCode" AND '
'r."ObservationYear" = p."Year" AND '
'r."ObservationDOY" = p."DOY"'
'WHERE r."NetworkCode" = \'%s\' AND r."StationCode" = \'%s\' AND '
'p."NetworkCode" IS NULL' % (NetworkCode, StationCode))
self.rnx_no_ppp = rnx.getresult()
self.ts_ns = np.array([item for item in self.rnx_no_ppp])
self.completion = 100. - float(len(self.ts_ns)) / float(len(self.ts_ns) + len(self.t)) * 100.
ppp_hash = cnn.query_float('SELECT sum(hash) FROM ppp_soln p1 '
'WHERE p1."NetworkCode" = \'%s\' AND p1."StationCode" = \'%s\''
% (NetworkCode, StationCode))
self.hash = crc32(str(len(self.t) + len(self.blunders)) + ' ' +
str(self.auto_x) +
str(self.auto_y) +
str(self.auto_z) +
str(ts[0]) + ' ' +
str(ts[-1]) + ' ' +
str(ppp_hash[0][0]) +
VERSION)
class GamitSoln:
""""class to extract the GAMIT polyhedrons from the database"""
def __init__(self, cnn, polyhedrons, NetworkCode, StationCode, stack_name):
self.NetworkCode = NetworkCode
self.StationCode = StationCode
stn_id = stationID(self)
self.stack_name = stack_name
self.hash = 0
self.type = 'gamit'
# get the station from the stations table
stn = cnn.query_float('SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
% (NetworkCode, StationCode), as_dict=True)[0]
if stn['lat'] is not None:
self.lat = np.array([float(stn['lat'])])
self.lon = np.array([float(stn['lon'])])
self.height = np.array([stn['height']])
self.auto_x = np.array([float(stn['auto_x'])])
self.auto_y = np.array([float(stn['auto_y'])])
self.auto_z = np.array([float(stn['auto_z'])])
if stn['max_dist'] is not None:
self.max_dist = stn['max_dist']
else:
self.max_dist = 20
self.solutions = len(polyhedrons)
# blunders
self.blunders = []
self.ts_blu = np.array([])
if self.solutions >= 1:
a = np.array(polyhedrons, dtype=float)
if np.sqrt(np.square(np.sum(np.square(a[0, 0:3])))) > 6.3e3:
# coordinates given in XYZ
nb = np.sqrt(np.square(np.sum(
np.square(a[:, 0:3] - np.array([stn['auto_x'], stn['auto_y'], stn['auto_z']])), axis=1))) \
<= self.max_dist
else:
# coordinates are differences
nb = np.sqrt(np.square(np.sum(np.square(a[:, 0:3]), axis=1))) <= self.max_dist
if np.any(nb):
self.x = a[nb, 0]
self.y = a[nb, 1]
self.z = a[nb, 2]
self.t = np.array([pyDate.Date(year=item[0], doy=item[1]).fyear for item in a[nb, 3:5]])
self.mjd = np.array([pyDate.Date(year=item[0], doy=item[1]).mjd for item in a[nb, 3:5]])
self.date = [pyDate.Date(year=item[0], doy=item[1]) for item in a[nb, 3:5]]
# continuous time vector for plots
ts = np.arange(np.min(self.mjd), np.max(self.mjd) + 1, 1)
self.mjds = ts
self.ts = np.array([pyDate.Date(mjd=tts).fyear for tts in ts])
else:
dd = np.sqrt(np.square(np.sum(
np.square(a[:, 0:3] - np.array([stn['auto_x'], stn['auto_y'], stn['auto_z']])), axis=1)))
raise pyETMException('No viable GAMIT solutions available for %s (all blunders!)\n'
' -> min distance to station coordinate is %.1f meters'
% (stn_id, dd.min()))
else:
raise pyETMException('No GAMIT polyhedrons vertices available for %s' % stn_id)
# get a list of the epochs with files but no solutions.
# This will be shown in the outliers plot as a special marker
rnx = cnn.query(
'SELECT r.* FROM rinex_proc as r '
'LEFT JOIN stacks as p ON '
'r."NetworkCode" = p."NetworkCode" AND '
'r."StationCode" = p."StationCode" AND '
'r."ObservationYear" = p."Year" AND '
'r."ObservationDOY" = p."DOY" AND '
'p."name" = \'%s\''
'WHERE r."NetworkCode" = \'%s\' AND r."StationCode" = \'%s\' AND '
'p."NetworkCode" IS NULL' % (stack_name, NetworkCode, StationCode))
self.rnx_no_ppp = rnx.dictresult()
self.ts_ns = np.array([float(item['ObservationFYear']) for item in self.rnx_no_ppp])
self.completion = 100. - float(len(self.ts_ns)) / float(len(self.ts_ns) + len(self.t)) * 100.
self.hash = crc32(str(len(self.t) + len(self.blunders)) + ' ' +
str(ts[0]) + ' ' +
str(ts[-1]) +
VERSION)
else:
raise pyETMException('Station %s has no valid metadata in the stations table.' % stn_id)
class ListSoln(GamitSoln):
""""class to extract the polyhedrons from a list"""
def __init__(self, cnn, polyhedrons, NetworkCode, StationCode, stack_name='file-unknown'):
super(ListSoln, self).__init__(cnn=cnn, polyhedrons=polyhedrons, NetworkCode=NetworkCode,
StationCode=StationCode, stack_name=stack_name)
self.rnx_no_ppp = []
class JumpTable:
def __init__(self, cnn, NetworkCode, StationCode, soln, t, FitEarthquakes=True, FitGenericJumps=True):
self.table = []
# get earthquakes for this station
self.earthquakes = Earthquakes(cnn, NetworkCode, StationCode, soln, t, FitEarthquakes)
self.generic_jumps = GenericJumps(cnn, NetworkCode, StationCode, soln, t, FitGenericJumps)
jumps = self.earthquakes.table + self.generic_jumps.table
jumps.sort()
# add the relevant jumps, make sure none are incompatible
for jump in jumps:
self.insert_jump(jump)
# verify last jump to make sure there's enough data
if len(self.table) > 0:
jump = None
# find last active jump
for j in self.table[-1::-1]:
# find the previous active jump
if j.fit:
jump = j
break
if jump:
dt = np.max(t[jump.design[:, -1] != 0]) - \
np.min(t[jump.design[:, -1] != 0])
# check for minimum data of coseismic jumps + decays
if (jump.p.jump_type == CO_SEISMIC_JUMP_DECAY and
(dt < 1 and np.count_nonzero(jump.design[:, -1]) / 365.25 < 0.5)):
# was a jump and decay, leave the jump
jump.p.jump_type = CO_SEISMIC_JUMP
jump.param_count -= jump.nr # subtract from param count the number of relaxations
jump.p.params = np.zeros((3, 1))
jump.p.sigmas = np.zeros((3, 1))
# reevaluate the design matrix!
jump.design = jump.eval(t)
jump.rehash()
# get the coseismic and coseismic decay jumps
jcs = [j for j in self.table if (j.p.jump_type == CO_SEISMIC_JUMP_DECAY
or j.p.jump_type == CO_SEISMIC_DECAY) and j.fit is True]
if len(jcs) > 1:
for j, i in zip(jcs[0:], jcs[1:]):
j.constrain_years = (i.min_date - j.min_date)
j.constrain_data_points = np.count_nonzero(t[np.logical_and(t > j.min_date, t < i.min_date)])
jcs[-1].constrain_years = t.max() - jcs[-1].min_date
jcs[-1].constrain_data_points = np.count_nonzero(t[np.logical_and(t > jcs[-1].min_date, t < t.max())])
elif len(jcs) == 1:
jcs[0].constrain_years = (t.max() - jcs[0].min_date)
jcs[0].constrain_data_points = np.count_nonzero(t[np.logical_and(t > jcs[0].min_date, t < t.max())])
self.constrains = np.array([])
def param_count(self):
return sum([jump.param_count for jump in self.table if jump.fit])
def insert_jump(self, jump):
if len(self.table) == 0:
self.table.append(jump)
else:
# take last jump and compare to adding jump
jj = None
for j in self.table[-1::-1]:
# find the previous active jump
if j.fit:
jj = j
break
if not jj:
# no active jumps in the table!
self.table.append(jump)
return
elif jump.fit:
# this operation determines if jumps are equivalent
# result is true if equivalent, decision is which one survives
result, decision = jj.__eq__(jump)
if result:
# jumps are equivalent
# decision branches:
# 1) decision == jump, remove previous; add jump
# 2) decision == jj , do not add jump (i.e. do nothing)
if decision is jump:
jj.remove_from_fit()
else:
jump.remove_from_fit()
self.table.append(jump)
def get_design_ts(self, t):
# if function call NOT for inversion, return the columns even if the design matrix is unstable
A = np.array([])
# get the design matrix for the jump table
for jump in self.table:
if jump.fit:
a = jump.eval(t)
if a.size:
if A.size:
# if A is not empty, verify that this jump will not make the matrix singular
tA = np.column_stack((A, a))
# getting the condition number might trigger divide_zero warning => turn off
np.seterr(divide='ignore', invalid='ignore')
if np.linalg.cond(tA) < 1e10:
# adding this jumps doesn't make the matrix singular
A = tA
else:
# if matrix becomes singular, remove from fit!
jump.remove_from_fit()
warnings.warn('%s had to be removed due to high condition number' % str(jump))
else:
A = a
return A
def load_parameters(self, params, sigmas):
for jump in self.table:
if jump.fit:
jump.load_parameters(params=params, sigmas=sigmas)
def print_parameters(self):
output_n = [LABEL('table_title')]
output_e = [LABEL('table_title')]
output_u = [LABEL('table_title')]
for jump in self.table:
# relaxation counter
rx = 0
m = ' -' if np.isnan(jump.magnitude) else jump.magnitude
if jump.fit:
for j, p in enumerate(np.arange(jump.param_count)):
psc = jump.p.params[:, p]
if j == 0 and jump.p.jump_type is not CO_SEISMIC_DECAY:
output_n.append('{} {:>7.1f} {} {}'.format(jump.date.yyyyddd(), psc[0] * 1000.0, m, jump.action))
output_e.append('{} {:>7.1f} {} {}'.format(jump.date.yyyyddd(), psc[1] * 1000.0, m, jump.action))
output_u.append('{} {:>7.1f} {} {}'.format(jump.date.yyyyddd(), psc[2] * 1000.0, m, jump.action))
else:
output_n.append('{} {:4.2f} {:>7.1f} {} {}'.format(jump.date.yyyyddd(), jump.p.relaxation[rx],
psc[0] * 1000.0, m, jump.action))
output_e.append('{} {:4.2f} {:>7.1f} {} {}'.format(jump.date.yyyyddd(), jump.p.relaxation[rx],
psc[1] * 1000.0, m, jump.action))
output_u.append('{} {:4.2f} {:>7.1f} {} {}'.format(jump.date.yyyyddd(), jump.p.relaxation[rx],
psc[2] * 1000.0, m, jump.action))
# relaxation counter
rx += 1
else:
for j, _ in enumerate(np.arange(jump.param_count)):
if j == 0 and jump.p.jump_type is not CO_SEISMIC_DECAY:
# the only type of jump that does not show the jump is a co-seismic decay
output_n.append('{} - {} {}'.format(jump.date.yyyyddd(), m, jump.action))
output_e.append('{} - {} {}'.format(jump.date.yyyyddd(), m, jump.action))
output_u.append('{} - {} {}'.format(jump.date.yyyyddd(), m, jump.action))
else:
output_n.append('{} {:4.2f} - {} {}'.format(jump.date.yyyyddd(), jump.p.relaxation[rx],
m, jump.action))
output_e.append('{} {:4.2f} - {} {}'.format(jump.date.yyyyddd(), jump.p.relaxation[rx],
m, jump.action))
output_u.append('{} {:4.2f} - {} {}'.format(jump.date.yyyyddd(), jump.p.relaxation[rx],
m, jump.action))
# relaxation counter
rx += 1
if len(output_n) > 22:
output_n = output_n[0:22] + [LABEL('table_too_long')]
output_e = output_e[0:22] + [LABEL('table_too_long')]
output_u = output_u[0:22] + [LABEL('table_too_long')]
return '\n'.join(output_n), '\n'.join(output_e), '\n'.join(output_u)
class EtmFunction:
def __init__(self, **kwargs):
self.p = Bunch()
self.p.NetworkCode = kwargs['NetworkCode']
self.p.StationCode = kwargs['StationCode']
self.p.soln = kwargs['soln'].type
self.p.stack = kwargs['soln'].stack_name
self.p.params = np.array([])
self.p.sigmas = np.array([])
self.p.object = ''
self.p.metadata = None
self.p.hash = 0
self.param_count = 0
self.column_index = np.array([])
self.format_str = ''
self.fit = True
def load_parameters(self, **kwargs):
params = kwargs['params']
sigmas = kwargs['sigmas']
if params.ndim == 1:
# parameters coming from the database, reshape
params = params.reshape((3, params.shape[0] // 3))
if sigmas.ndim == 1:
# parameters coming from the database, reshape
sigmas = sigmas.reshape((3, sigmas.shape[0] // 3))
# determine if parameters are coming from the X vector (LSQ) or from the database (solution for self only)
if params.shape[1] > self.param_count:
# X vector
self.p.params = params[:, self.column_index]
self.p.sigmas = sigmas[:, self.column_index]
else:
# database (solution for self only; no need for column_index)
self.p.params = params
self.p.sigmas = sigmas
class Jump(EtmFunction):
"""
generic jump (mechanic jump, frame change, etc) class
:argument NetworkCode
:argument StationCode
"""
def __init__(self, NetworkCode, StationCode, soln, t, date, metadata, dtype=GENERIC_JUMP, action='A', fit=True):
super(Jump, self).__init__(NetworkCode=NetworkCode, StationCode=StationCode, soln=soln)
# in the future, can load parameters from the db
self.p.object = 'jump'
# define initial state variables
self.date = date
self.p.jump_date = date.datetime()
self.p.metadata = metadata
self.p.jump_type = dtype
# new property to identify manually added (or removed) jumps
self.action = action
# new property indicating if jump should be adjusted or not
self.fit = fit
# add the magnitude property to allow transformation from CO_SEISMIC_JUMP_DECAY to CO_SEISMIC_JUMP and still
# print the magnitude of the event in the jump table
self.magnitude = np.nan
# the param count of a jump is one!
self.param_count = 1
if self.fit:
# evaluate only if the jump is not flagged as NO EFFECT
self.design = Jump.eval(self, t)
else:
self.design = np.array([])
if not np.any(self.design) or np.all(self.design):
# a valid jump only has some rows == 1 in the design matrix,
# not all rows (all rows produces a singular matrix)
self.design = np.array([])
self.fit = False
if dtype not in (CO_SEISMIC_JUMP,
CO_SEISMIC_DECAY,
CO_SEISMIC_JUMP_DECAY):
logger.info('Mechanical Jump -> Adding jump on %s type: %s; Action: %s; Fit: %s'
% (self.date.yyyyddd(), type_dict[dtype], action, str(self.fit)))
Jump.rehash(self)
def rehash(self):
self.p.hash = crc32(str(self.date) + str(self.fit) + VERSION)
def remove_from_fit(self):
# this method will make this jump type = NO_EFFECT and adjust its params
self.fit = False
self.design = np.array([])
self.rehash()
def eval(self, t):
# given a time vector t, return the design matrix column vector(s)
if not self.fit:
return np.array([])
ht = np.zeros((t.shape[0], 1))
ht[t > self.date.fyear] = 1.
return ht
def load_parameters(self, **kwargs):
if self.fit:
EtmFunction.load_parameters(self, **kwargs)
def __eq__(self, jump):
if not isinstance(jump, Jump):
raise pyETMException('type: ' + str(type(jump)) + ' invalid. Can compare two Jump objects')
if not self.fit and jump.fit:
# if comparing to a self that has NO_EFFECT, remove and keep jump
return True, jump
elif self.fit and not jump.fit:
# if comparing against a jump that has NO_EFFECT, remove jump keep self
return True, self
elif not self.fit and not jump.fit:
# no jump has an effect, return None. This will be interpreted as False (if not result)
return None, None
# if we got here, then both jumps have fit == True
# compare two jumps together and make sure they will not generate a singular (or near singular) system of eq
c = np.sum(np.logical_xor(self.design[:, 0], jump.design[:, 0]))
dt = jump.date - self.date
# print ' ', jump.date, self.date, dt, c
if self.p.jump_type >= 10 and jump.p.jump_type >= 10:
# jump type > 10 => co-seismic jump
# if self is a co-seismic jump and next jump is also co-seismic
# and there are more than two weeks of data to constrain params, return false (not equal)
# otherwise, decide based on the magnitude of events
if c < self.param_count + 1 or (dt < 365 and c / 365.25 < 0.1):
if self.magnitude < jump.magnitude:
return True, jump
else:
return True, self
else:
return False, None
elif self.p.jump_type >= 10 and 0 < jump.p.jump_type < 10:
if c < self.param_count + 1 or (dt < 365 and c / 365.25 < 0.1):
# can't fit the co-seismic or generic jump AND the generic jump after, remove generic jump
return True, self
else:
return False, None
elif 0 < self.p.jump_type < 10:
if jump.p.jump_type >= 10:
if c < self.param_count + 1 or (dt < 365 and c / 365.25 < 0.1):
# if generic jump before an earthquake jump and less than 5 days, co-seismic prevails
return True, jump
else:
return False, None
elif 0 < jump.p.jump_type < 10:
# two generic jumps. As long as they can be constrained, we are fine
if c < self.param_count + 1 or (dt < 365 and c / 365.25 < 0.1):
return True, jump
else:
return False, None
# @todo possible bug when returning None here?
def __str__(self):
return 'date=' + str(self.date) + \
', type=' + type_dict[self.p.jump_type] + \
', metadata="' + self.p.metadata + \
'", action="' + str(self.action) + \
'", fit=' + str(self.fit)
def __repr__(self):
return 'pyPPPETM.Jump(%s)' % str(self)
def __check_cmp(self, jump):
if not isinstance(jump, Jump):
raise pyETMException('type: '+str(type(jump))+' invalid. Can only compare Jump objects')
def __lt__(self, jump):
self.__check_cmp(jump)
return self.date.fyear < jump.date.fyear
def __le__(self, jump):
self.__check_cmp(jump)
return self.date.fyear <= jump.date.fyear
def __gt__(self, jump):
self.__check_cmp(jump)
return self.date.fyear > jump.date.fyear
def __ge__(self, jump):
self.__check_cmp(jump)
return self.date.fyear >= jump.date.fyear
def __hash__(self):
# to make the object hashable
return hash(self.date.fyear)
class CoSeisJump(Jump):
def __init__(self, NetworkCode, StationCode, soln, t, date, relaxation, metadata,
dtype=CO_SEISMIC_JUMP_DECAY, magnitude=0., action='A', fit=True, postseismic=None):
# postseismic input is a dictionary with 'relaxation': [T0, T1, ...] and 'a': amplitude[T0, T1, ...][N, E, U]
# super-class initialization
Jump.__init__(self, NetworkCode, StationCode, soln, t, date, metadata, dtype, action, fit)
# if t.min() > date, change to CO_SEISMIC_DECAY
# if jump / decay manually deactivated, fit == False and it's not changed below
if date.fyear < t.min():
self.p.jump_type = CO_SEISMIC_DECAY
# save the minimum date validity
self.min_date = t.min()
else:
self.p.jump_type = dtype
# save the minimum date validity
self.min_date = date.fyear
# new feature informs the magnitude of the event in the plot
self.magnitude = magnitude
# constrain_years saves how many years of data constrains this jump
# filled by JumpTable
self.constrain_years = None
# if post-seismic component is passed, then subtract from the data
self.postseismic = np.zeros((3, t.shape[0]))
if not self.fit and fit:
# came back from init with empty design matrix (self.fit = false) and originally fit was True.
# Maybe a jump before t.min()
# assign just the decay
self.p.jump_type = CO_SEISMIC_DECAY
# put fit back to original state
self.fit = fit
# if T is an array, it contains the corresponding decays
# otherwise, it is a single decay
if not isinstance(relaxation, np.ndarray):
relaxation = np.array([relaxation])
self.param_count += relaxation.shape[0]
if self.p.jump_type == CO_SEISMIC_DECAY:
# if CO_SEISMIC_DECAY, subtract one from parameters
self.param_count -= 1
self.nr = relaxation.shape[0]
self.p.relaxation = relaxation
if self.fit:
self.design = self.eval(t)
else:
self.design = np.array([])
logger.info('Geophysical Jump -> Adding jump on %s type: %s; Mag: %.1f; Action: %s; Fit: %s'
% (self.date.yyyyddd(), type_dict[dtype], magnitude, action, str(self.fit)))
self.rehash()
def rehash(self):
# co-seismic jump already has the version hash value from Jump object
self.p.hash = crc32(str(self.date) + str(self.fit) + str(self.param_count) + str(self.p.jump_type) +
str(self.p.relaxation) + str(self.fit) + VERSION)
def eval(self, t):
ht = Jump.eval(self, t)
# if there is nothing in ht, then there is no expected output, return none
if not np.any(ht):
return np.array([])
# if it was determined that this is just a co-seismic jump (no decay), return ht
elif self.p.jump_type == CO_SEISMIC_JUMP:
return ht
# support more than one decay
hl = np.zeros((t.shape[0], self.nr))
for i, T in enumerate(self.p.relaxation):
hl[t > self.date.fyear, i] = np.log10(1. + (t[t > self.date.fyear] - self.date.fyear) / T)
# if it's both jump and decay, return ht + hl
if np.any(hl):
if self.p.jump_type == CO_SEISMIC_JUMP_DECAY:
return np.column_stack((ht, hl))
elif self.p.jump_type == CO_SEISMIC_DECAY:
# if decay only, return hl
return hl
# @todo possible bug returning None?
def __str__(self):
return Jump.__str__(self) + ', relax=' + str(self.p.relaxation)
def __repr__(self):
return 'pyPPPETM.CoSeisJump(%s)' % str(self)
class Earthquakes:
def __init__(self, cnn, NetworkCode, StationCode, soln, t, FitEarthquakes=True):
self.StationCode = StationCode
self.NetworkCode = NetworkCode
# station location
stn = cnn.query('SELECT * FROM stations WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\''
% (NetworkCode, StationCode))
stn = stn.dictresult()[0]
# load metadata
lat = float(stn['lat'])
lon = float(stn['lon'])
# establish the limit dates. Ignore jumps before 5 years from the earthquake
# sdate = pyDate.Date(fyear=t.min() - 5)
# DDG 30/04/2020: now do not treat the earthquakes before the start date
# the same as those happening after the start date
sdate = pyDate.Date(fyear=t.min())
edate = pyDate.Date(fyear=t.max())
# get the earthquakes based on Mike's expression
# earthquakes before the start data: only magnitude 7+
jumps = cnn.query_float('SELECT * FROM earthquakes '
'WHERE date BETWEEN \'%s\' AND \'%s\' UNION '
'SELECT * FROM earthquakes '
'WHERE date BETWEEN \'%s\' AND \'%s\' AND mag >= 7 '
'ORDER BY date'
% (sdate.yyyymmdd(), edate.yyyymmdd(),
pyDate.Date(fyear=t.min() - 5).yyyymmdd(), sdate.yyyymmdd()), as_dict=True)
# check if data range returned any jumps
if jumps and FitEarthquakes:
eq = [[float(jump['lat']), float(jump['lon']), float(jump['mag']),
int(jump['date'].year), int(jump['date'].month), int(jump['date'].day),
int(jump['date'].hour), int(jump['date'].minute), int(jump['date'].second)]
for jump in jumps]
eq = np.array(list(eq))
dist = distance(lon, lat, eq[:, 1], eq[:, 0])
m = -0.8717 * (np.log10(dist) - 2.25) + 0.4901 * (eq[:, 2] - 6.6928)
# build the earthquake jump table
# remove event events that happened the same day
eq_jumps = sorted({(float(eqs[2]), pyDate.Date(year=int(eqs[3]), month=int(eqs[4]), day=int(eqs[5]),
hour=int(eqs[6]), minute=int(eqs[7]), second=int(eqs[8])))
for eqs in eq[m > 0, :]},
key=lambda x: (x[1], -x[0]))
# open the jumps table
jp = cnn.query_float('SELECT * FROM etm_params WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
'AND soln = \'%s\' AND jump_type <> 0 AND object = \'jump\''
% (NetworkCode, StationCode, soln.type), as_dict=True)
# start by collapsing all earthquakes for the same day.
# Do not allow more than one earthquake on the same day
f_jumps = []
next_date = None
for mag, date in eq_jumps:
# jumps are analyzed in windows that are EQ_MIN_DAYS long
# a date should not be analyzed if it's < next_date
if next_date is not None:
if date < next_date:
continue
# obtain jumps in a EQ_MIN_DAYS window
jumps = [(m, d) for m, d in eq_jumps if date <= d < date + EQ_MIN_DAYS]
if len(jumps) > 1:
# if more than one jump, get the max magnitude
mmag = max(m for m, _ in jumps)
# only keep the earthquake with the largest magnitude
for m, d in jumps:
table = {j['action'] for j in jp if j['Year'] == d.year and j['DOY'] == d.doy}
# get a different relaxation for this date
relax = [j['relaxation'] for j in jp if j['Year'] == d.year and j['DOY'] == d.doy]
if relax and relax[0] is not None:
relaxation = np.array(relax[0])
else:
relaxation = DEFAULT_RELAXATION
# if present in jump table, with either + of -, don't use default decay
if m == mmag and '-' not in table:
f_jumps.append(CoSeisJump(NetworkCode, StationCode, soln, t, d, relaxation,
'mag=%.1f' % m, magnitude=m, action='+' if '+' in table else 'A'))
# once the jump was added, exit for loop
break
elif '+' in table:
# add only if in jump list with a '+'
f_jumps.append(CoSeisJump(NetworkCode, StationCode, soln, t, d,
relaxation, 'mag=%.1f' % m, magnitude=m, action='+'))
# once the jump was added, exit for loop
break
else:
f_jumps.append(CoSeisJump(NetworkCode, StationCode, soln, t, d,
relaxation, 'mag=%.1f' % m, action='-', fit=False))
else:
# add, unless marked in table with '-'
table = {j['action'] for j in jp if j['Year'] == date.year and j['DOY'] == date.doy}
# get a different relaxation for this date
relax = [j['relaxation'] for j in jp if j['Year'] == date.year and j['DOY'] == date.doy]
if relax and relax[0] is not None:
relaxation = np.array(relax[0])
else:
relaxation = DEFAULT_RELAXATION
if '-' not in table:
f_jumps.append(CoSeisJump(NetworkCode, StationCode, soln, t, date,
relaxation, 'mag=%.1f' % mag, magnitude=mag,
action='+' if '+' in table else 'A'))
else:
# add it with NO_EFFECT for display purposes
f_jumps.append(CoSeisJump(NetworkCode, StationCode, soln, t, date,
relaxation, 'mag=%.1f' % mag, magnitude=mag, action='-', fit=False))
next_date = date + EQ_MIN_DAYS
# final jump table
self.table = f_jumps
else:
self.table = []
class GenericJumps:
def __init__(self, cnn, NetworkCode, StationCode, soln, t, FitGenericJumps=True):
self.solution_type = soln.type
self.table = []
if t.size >= 2:
# analyze if it is possible to add the jumps (based on the available data)
wt = np.sort(np.unique(t - np.fix(t)))
# analyze the gaps in the data
dt = np.diff(wt)
# max dt (internal)
dtmax = np.max(dt)
# dt wrapped around
dt_interyr = 1 - wt[-1] + wt[0]
if dt_interyr > dtmax:
dtmax = dt_interyr
if dtmax <= 0.2465 and FitGenericJumps:
# put jumps in
self.add_metadata_jumps = True
else:
# no jumps
self.add_metadata_jumps = False
else:
self.add_metadata_jumps = False
# open the jumps table
jp = cnn.query('SELECT * FROM etm_params WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
'AND soln = \'%s\' AND jump_type = 0 AND object = \'jump\''
% (NetworkCode, StationCode, self.solution_type))
jp = jp.dictresult()
# get station information
self.stninfo = pyStationInfo.StationInfo(cnn, NetworkCode, StationCode)
for stninfo in self.stninfo.records[1:]:
date = stninfo['DateStart']
table = [j['action'] for j in jp if j['Year'] == date.year and j['DOY'] == date.doy]
# add to list only if:
# 1) add_meta = True AND there is no '-' OR
# 2) add_meta = False AND there is a '+'
self.table.append(Jump(NetworkCode, StationCode, soln, t, date,
'Ant-Rec: %s-%s' % (stninfo['AntennaCode'], stninfo['ReceiverCode']),
dtype = ANTENNA_CHANGE,
action = table[0] if table else 'A',
fit = ('+' in table or (self.add_metadata_jumps and '-' not in table))
))
# frame changes if ppp
if self.solution_type == 'ppp':
frames = cnn.query(
'SELECT distinct on ("ReferenceFrame") "ReferenceFrame", "Year", "DOY" from ppp_soln WHERE '
'"NetworkCode" = \'%s\' AND "StationCode" = \'%s\' order by "ReferenceFrame", "Year", "DOY"' %
(NetworkCode, StationCode))
frames = frames.dictresult()
if len(frames) > 1:
# more than one frame, add a jump
frames.sort(key=lambda k: k['Year'])
for frame in frames[1:]:
date = pyDate.Date(Year=frame['Year'], doy=frame['DOY'])
table = [j['action'] for j in jp if j['Year'] == date.year and j['DOY'] == date.doy]
self.table.append(Jump(NetworkCode, StationCode, soln, t, date,
'Frame Change: %s' % frame['ReferenceFrame'],
dtype = REFERENCE_FRAME_JUMP,
action = table[0] if table else 'A',
fit = ('-' not in table)))
# now check the jump table to add specific jumps
jp = cnn.query('SELECT * FROM etm_params WHERE "NetworkCode" = \'%s\' AND "StationCode" = \'%s\' '
'AND soln = \'%s\' AND jump_type = 0 AND object = \'jump\' '
'AND action = \'+\'' % (NetworkCode, StationCode, self.solution_type)).dictresult()
table = {j.date for j in self.table}
for j in jp:
date = pyDate.Date(Year=j['Year'], doy=j['DOY'])
if date not in table:
self.table.append(Jump(NetworkCode, StationCode, soln, t, date, 'mechanic-jump',
dtype = GENERIC_JUMP,
action = '+'))
class Periodic(EtmFunction):
""""class to determine the periodic terms to be included in the ETM"""
def __init__(self, cnn, NetworkCode, StationCode, soln, t, FitPeriodic=True):
super(Periodic, self).__init__(NetworkCode=NetworkCode, StationCode=StationCode, soln=soln)
try:
# load the frequencies from the database
etm_param = cnn.get('etm_params',
{'NetworkCode' : NetworkCode,
'StationCode' : StationCode,
'soln' : soln.type,
'object' : 'periodic'
},
['NetworkCode', 'StationCode', 'soln', 'object'])
self.p.frequencies = np.array([float(p) for p in etm_param['frequencies']])
except pg.DatabaseError:
self.p.frequencies = DEFAULT_FREQUENCIES
self.p.object = 'periodic'
if t.size > 1 and FitPeriodic:
# wrap around the solutions
wt = np.sort(np.unique(t - np.fix(t)))
# analyze the gaps in the data
dt = np.diff(wt)
# max dt (internal)
dtmax = np.max(dt)
# dt wrapped around
dt_interyr = 1 - wt[-1] + wt[0]
if dt_interyr > dtmax:
dtmax = dt_interyr
# save the value of the max wrapped delta time
self.dt_max = dtmax
# get the 50 % of Nyquist for each component (and convert to average fyear)
self.nyquist = ((1 / self.p.frequencies) / 2.) * 0.5 * 1 / 365.25
# frequency count
self.frequency_count = int(np.sum(self.dt_max <= self.nyquist))
# redefine the frequencies vector to accommodate only the frequencies that can be fit
self.p.frequencies = self.p.frequencies[self.dt_max <= self.nyquist]
else:
# no periodic terms
self.frequency_count = 0
self.p.frequencies = np.array([])
self.dt_max = 1 # one year of delta t
logger.info('Periodic -> Frequency count: %i; FitPeriodic: %s' % (self.frequency_count, str(FitPeriodic)))
# build the metadata description for the json string
self.p.metadata = '['
for k in ('n', 'e', 'u'):
self.p.metadata += '['
meta = []
for i in ('sin', 'cos'):
for f in (1 / (self.p.frequencies * 365.25)).tolist():
meta.append('%s:%s(%.1f yr)' % (k, i, f))
self.p.metadata += ','.join(meta) + '],'
self.p.metadata = self.p.metadata + ']'
self.design = self.get_design_ts(t)
self.param_count = self.frequency_count * 2
# declare the location of the answer (to be filled by Design object)
self.column_index = np.array([])
self.format_str = LABEL('periodic') + ' (' + \
', '.join('%.1f yr' % i for i in (1 / (self.p.frequencies * 365.25)).tolist()) + \
') N: %s E: %s U: %s [mm]'
self.p.hash = crc32(str(self.p.frequencies) + VERSION)
def get_design_ts(self, ts):
# if dtmax < 3 months (90 days = 0.1232), then we can fit the annual
# if dtmax < 1.5 months (45 days = 0.24657), then we can fit the semi-annual too
if self.frequency_count > 0:
f = self.p.frequencies
f = np.tile(f, (ts.shape[0], 1))
As = np.array(sin(2 * pi * f * 365.25 * np.tile(ts[:, np.newaxis], (1, f.shape[1]))))
Ac = np.array(cos(2 * pi * f * 365.25 * np.tile(ts[:, np.newaxis], (1, f.shape[1]))))
A = np.column_stack((As, Ac))
else:
# no periodic terms
A = np.array([])
return A
def print_parameters(self):
n = np.array([])
e = np.array([])
u = np.array([])
for p in np.arange(self.param_count):
psc = self.p.params[:, p]
sn = psc[0]
se = psc[1]
su = psc[2]
n = np.append(n, sn)
e = np.append(e, se)
u = np.append(u, su)
shape = (2, self.param_count // 2)
n = n.reshape(shape)
e = e.reshape(shape)
u = u.reshape(shape)
# calculate the amplitude of the components
an = np.sqrt(np.square(n[0, :]) + np.square(n[1, :]))
ae = np.sqrt(np.square(e[0, :]) + np.square(e[1, :]))
au = np.sqrt(np.square(u[0, :]) + np.square(u[1, :]))
return self.format_str % (np.array_str(an * 1000.0, precision=1),
np.array_str(ae * 1000.0, precision=1),
np.array_str(au * 1000.0, precision=1))
class Polynomial(EtmFunction):
""""class to build the linear portion of the design matrix"""
def __init__(self, cnn, NetworkCode, StationCode, soln, t, t_ref=0, interseismic=None):
super(Polynomial, self).__init__(NetworkCode=NetworkCode, StationCode=StationCode, soln=soln)
# t ref (just the beginning of t vector)
if t_ref == 0:
t_ref = np.min(t)
self.p.object = 'polynomial'
self.p.t_ref = t_ref
self.interseismic = np.zeros((3, t.shape[0]))
if interseismic:
logger.info('Polynomial -> Interseismic velocity provided: removing velocity from fit')
# interseismic model provided, do not fit linear (remove trend)
tt = (t - t_ref)
if type(interseismic) is list:
interseismic = np.array(interseismic)
# convert to np if list is given
for i in range(3):
self.interseismic[i] = tt * interseismic[i]
self.terms = 1
self.format_str = LABEL('position') + ' (%.3f' % t_ref + \
') X: {:.3f} Y: {:.3f} Z: {:.3f} [m]\n' \
+ LABEL('velocity') + ' (' \
+ LABEL('from_model') + ')' + \
' N: {:.2f} E: {:.2f} U: {:.2f} [mm/yr]'.format(*(interseismic * 1000))
self.p.metadata = '[[n:pos, n:vel],[e:pos, e:vel],[u:pos, u:vel]]'
else:
try:
# load the number of terms from the database
etm_param = cnn.get('etm_params',
{'NetworkCode' : NetworkCode,
'StationCode' : StationCode,
'soln' : soln.type,
'object' : 'polynomial'},
['NetworkCode', 'StationCode', 'soln', 'object'])
self.terms = int(etm_param['terms'])
except pg.DatabaseError:
self.terms = DEFAULT_POL_TERMS
logger.info('Polynomial -> Fitting %i term(s)' % self.terms)
if self.terms == 1:
self.format_str = LABEL('position') + ' (%.3f' % t_ref + \
') X: {:.3f} Y: {:.3f} Z: {:.3f} [m]'
self.p.metadata = '[[n:pos],[e:pos],[u:pos]]'
elif self.terms == 2:
self.format_str = LABEL('position') + ' (%.3f' % t_ref + \
') X: {:.3f} Y: {:.3f} Z: {:.3f} [m]\n' \
+ LABEL('velocity') + ' N: {:.2f} E: {:.2f} U: {:.2f} [mm/yr]'
self.p.metadata = '[[n:pos, n:vel],[e:pos, e:vel],[u:pos, u:vel]]'
elif self.terms == 3:
self.format_str = LABEL('position')+ ' (%.3f' % t_ref + \
') X: {:.3f} Y: {:.3f} Z: {:.3f} [m]\n' \
+ LABEL('velocity') + ' N: {:.3f} E: {:.3f} U: {:.3f} [mm/yr]\n' \
+ LABEL('acceleration') + ' N: {:.2f} E: {:.2f} U: {:.2f} [mm/yr**2]'
self.p.metadata = '[[n:pos, n:vel, n:acc],[e:pos, e:vel, e:acc],[u:pos, u:vel, u:acc]]'
elif self.terms > 3:
self.format_str = LABEL('position') + ' (%.3f' % t_ref + \
') X: {:.3f} Y: {:.3f} Z: {:.3f} [m]\n' \
+ LABEL('velocity') + ' N: {:.3f} E: {:.3f} U: {:.3f} [mm/yr]\n' \
+ LABEL('acceleration') + ' N: {:.2f} E: {:.2f} U: {:.2f} [mm/yr**2] + ' \
+ '%i ' % (self.terms - 3) + LABEL('other')
self.p.metadata = '[[n:pos, n:vel, n:acc, n:tx...],' \
'[e:pos, e:vel, e:acc, e:tx...],' \
'[u:pos, u:vel, u:acc, u:tx...]]'
self.design = self.get_design_ts(t)
# always first in the list of A, index columns are fixed
self.column_index = np.arange(self.terms)
# param count is the same as terms
self.param_count = self.terms
# save the hash of the object
self.p.hash = crc32(str(self.terms) + VERSION)
def load_parameters(self, params, sigmas, t_ref):
super(Polynomial, self).load_parameters(params=params, sigmas=sigmas)
self.p.t_ref = t_ref
def print_parameters(self, ref_xyz, lat, lon):
params = np.zeros((3,))
for p in np.arange(self.terms):
if p == 0:
params[0], params[1], params[2] = lg2ct(self.p.params[0, 0],
self.p.params[1, 0],
self.p.params[2, 0], lat, lon)
params += ref_xyz.flatten()
elif p > 0:
n = self.p.params[0, p]
e = self.p.params[1, p]
u = self.p.params[2, p]
params = np.append(params, (n*1000, e*1000, u*1000))
return self.format_str.format(*params.tolist())
def get_design_ts(self, ts):
A = np.zeros((ts.size, self.terms))
for p in np.arange(self.terms):
A[:, p] = np.power(ts - self.p.t_ref, p)
return A
class Design(np.ndarray):
def __new__(subtype, Linear, Jumps, Periodic, dtype=float, buffer=None, offset=0, strides=None, order=None):
# Create the ndarray instance of our type, given the usual
# ndarray input arguments. This will call the standard
# ndarray constructor, but return an object of our type.
# It also triggers a call to InfoArray.__array_finalize__
shape = (Linear.design.shape[0], Linear.param_count + Jumps.param_count() + Periodic.param_count)
A = super(Design, subtype).__new__(subtype, shape, dtype, buffer, offset, strides, order)
A[:, Linear.column_index] = Linear.design
# determine the column_index for all objects
col_index = Linear.param_count
for jump in Jumps.table:
# save the column index
if jump.fit:
jump.column_index = np.arange(col_index, col_index + jump.param_count)
# assign the portion of the design matrix
A[:, jump.column_index] = jump.design
# increment the col_index
col_index += jump.param_count
Periodic.column_index = np.arange(col_index, col_index + Periodic.param_count)
A[:, Periodic.column_index] = Periodic.design
# save the object list
A.objects = (Linear, Jumps, Periodic)
# save the number of total parameters
A.linear_params = Linear.param_count
A.jump_params = Jumps.param_count()
A.periodic_params = Periodic.param_count
A.params = Linear.param_count + Jumps.param_count() + Periodic.param_count
# save the constrains matrix
A.constrains = Jumps.constrains
# Finally, we must return the newly created object:
return A
def __call__(self, ts=None, constrains=False):
if ts is None:
if constrains and self.constrains.size:
A = self.copy()
# resize matrix (use A.resize so that it fills with zeros)
A.resize((self.shape[0] + self.constrains.shape[0], self.shape[1]), refcheck=False)
# apply constrains
A[-self.constrains.shape[0]:, self.jump_params] = self.constrains
return A
else:
return self
else:
A = np.array([])
for obj in self.objects:
tA = obj.get_design_ts(ts)
if A.size:
A = np.column_stack((A, tA)) if tA.size else A
else:
A = tA
return A
def get_l(self, L, constrains=False):
if constrains and self.constrains.size:
tL = L.copy()
tL.resize((L.shape[0] + self.constrains.shape[0]), refcheck=False)
return tL
return L
def get_p(self, constrains=False):
# return a weight matrix full of ones with or without the extra elements for the constrains
return np.ones(self.shape[0] if not constrains else \
(self.shape[0] + self.constrains.shape[0]))
def remove_constrains(self, v):
# remove the constrains to whatever vector is passed
if self.constrains.size:
return v[0:-self.constrains.shape[0]]
else:
return v
class ETM:
def __init__(self, cnn, soln, no_model=False, FitEarthquakes=True, FitGenericJumps=True, FitPeriodic=True,
interseismic=None):
# to display more verbose warnings
# warnings.showwarning = self.warn_with_traceback
self.C = np.array([])
self.S = np.array([])
self.F = np.array([])
self.R = np.array([])
self.P = np.array([])
self.factor = np.array([])
self.covar = np.zeros((3, 3))
self.A = None
self.param_origin = ESTIMATION
self.soln = soln
self.no_model = no_model
self.FitEarthquakes = FitEarthquakes
self.FitGenericJumps = FitGenericJumps
self.FitPeriodic = FitPeriodic
self.NetworkCode = soln.NetworkCode
self.StationCode = soln.StationCode
stn_id = stationID(self)
logger.info('Creating ETM object for %s' % stn_id)
# save the function objects
self.Linear = Polynomial(cnn, soln.NetworkCode, soln.StationCode, self.soln, self.soln.t,
interseismic=interseismic)
self.Periodic = Periodic(cnn, soln.NetworkCode, soln.StationCode, self.soln, self.soln.t, FitPeriodic)
self.Jumps = JumpTable(cnn, soln.NetworkCode, soln.StationCode, self.soln, self.soln.t, FitEarthquakes,
FitGenericJumps)
# calculate the hash value for this station
# now hash also includes the timestamp of the last time pyETM was modified.
self.hash = soln.hash
# anything less than four is not worth it
if soln.solutions > 4 and not no_model:
# to obtain the parameters
self.A = Design(self.Linear, self.Jumps, self.Periodic)
# check if problem can be solved!
if self.A.shape[1] >= soln.solutions:
self.A = None
else:
self.As = self.A(soln.ts)
else:
logger.info('Less than 4 solutions, cannot calculate ETM')
def run_adjustment(self, cnn, l, plotit=False, soln=None):
if self.A is not None:
# try to load the last ETM solution from the database
etm_objects = cnn.query_float('SELECT * FROM etms WHERE "NetworkCode" = \'%s\' '
'AND "StationCode" = \'%s\' AND soln = \'%s\' AND stack = \'%s\''
% (self.NetworkCode, self.StationCode, self.soln.type,
self.soln.stack_name), as_dict=True)
# DDG: Attention: it is not always possible to retrieve the parameters from the database using the hash
# strategy. The jump table is determined and their hash values calculated. The fit attribute goes into the
# hash value. When an unrealistic jump is detected, the jump is removed from the fit and the final
# parameters are saved without this jump. Thus, when loading the object, the jump will be added to fit but
# it will not be present in the database.
db_hash_sum = sum(obj['hash'] for obj in etm_objects)
jumps_hash = sum(o.p.hash for o in self.Jumps.table if o.fit)
ob_hash_sum = self.Periodic.p.hash + self.Linear.p.hash + self.hash + jumps_hash
cn_object_sum = len([o.p.hash for o in self.Jumps.table if o.fit]) + 2
# -1 to account for the var_factor entry
if len(etm_objects) - 1 == cn_object_sum and db_hash_sum == ob_hash_sum:
logger.info('ETM -> Loading parameters from database (db hash %i; ob hash %i)'
% (db_hash_sum, ob_hash_sum))
# load the parameters from th db
self.load_parameters(etm_objects, l)
# signal the outside world that the parameters were loaded from the database (no need to save them)
self.param_origin = DATABASE
else:
logger.info('ETM -> Estimating parameters (db hash %i; ob hash %i)'
% (db_hash_sum, ob_hash_sum))
# signal the outside world that the parameters were estimated (and need to be saves)
self.param_origin = ESTIMATION
# purge table and recompute
cnn.query('DELETE FROM etms WHERE "NetworkCode" = \'%s\' AND '
'"StationCode" = \'%s\' AND soln = \'%s\' AND stack = \'%s\''
% (self.NetworkCode, self.StationCode, self.soln.type, self.soln.stack_name))
if self.soln.type == 'dra':
# if the solution is of type 'dra', delete the excluded solutions
cnn.query('DELETE FROM gamit_soln_excl WHERE "NetworkCode" = \'%s\' AND '
'"StationCode" = \'%s\'' % (self.NetworkCode, self.StationCode))
# use the default parameters from the objects
t_ref = self.Linear.p.t_ref
j = 0
do_again = False
while j < 10:
c = []
f = []
s = []
r = []
p = []
factor = []
for i in range(3):
x, sigma, index, residuals, fact, w = self.adjust_lsq(self.A, l[i])
c.append(x)
s.append(sigma)
f.append(index)
r.append(residuals)
factor.append(fact)
p.append(w)
self.C = np.array(c)
self.S = np.array(s)
self.F = np.array(f)
self.R = np.array(r)
self.factor = np.array(factor)
self.P = np.array(p)
# load_parameters to the objects
self.Linear .load_parameters(self.C, self.S, t_ref)
self.Jumps .load_parameters(self.C, self.S)
self.Periodic.load_parameters(params=self.C, sigmas=self.S)
# determine if any jumps are unrealistic
# DDG Feb-7-2022: to determine if a jump is unrealistic, we check that the postseismic deformation
# is > 1 meter in amplitude. This value is a priori and a study should be done to determine a better
# estimate of what this value should be.
for jump in self.Jumps.table:
if jump.fit and \
jump.p.jump_type in (CO_SEISMIC_JUMP_DECAY,
CO_SEISMIC_DECAY) and \
np.any(np.abs(jump.p.params[:, -jump.nr:]) > 1):
# unrealistic, remove
jump.remove_from_fit()
do_again = True
logger.info('ETM -> Unrealistic jump detected (%s : %s), removing and redoing fit'
% (np.array_str(jump.p.params[:, -jump.nr:].flatten(), precision=1),
type_dict[jump.p.jump_type]))
if not do_again:
break
else:
self.A = Design(self.Linear, self.Jumps, self.Periodic)
if soln:
self.As = self.A(soln.ts)
j += 1
# load the covariances using the correlations
self.process_covariance()
if plotit:
self.plot()
else:
logger.info('ETM -> Empty design matrix')
def process_covariance(self):
cov = np.zeros((3, 1))
# save the covariance between N-E, E-U, N-U
f = self.F[0] * self.F[1] * self.F[2]
# load the covariances using the correlations
cov[0] = np.corrcoef(self.R[0][f], self.R[1][f])[0, 1] * self.factor[0] * self.factor[1]
cov[1] = np.corrcoef(self.R[1][f], self.R[2][f])[0, 1] * self.factor[1] * self.factor[2]
cov[2] = np.corrcoef(self.R[0][f], self.R[2][f])[0, 1] * self.factor[0] * self.factor[2]
# build a variance-covariance matrix
self.covar = np.diag(np.square(self.factor))
self.covar[0, 1] = cov[0]
self.covar[1, 0] = cov[0]
self.covar[2, 1] = cov[1]
self.covar[1, 2] = cov[1]
self.covar[0, 2] = cov[2]
self.covar[2, 0] = cov[2]
if not self.isPD(self.covar):
self.covar = self.nearestPD(self.covar)
def save_excluded_soln(self, cnn):
# only save if something to save
if self.F.size > 0:
for date, f, r in zip(self.soln.date,
np.logical_and(np.logical_and(self.F[0], self.F[1]), self.F[2]),
np.sqrt(np.sum(np.square(self.R), axis=0))):
if not cnn.query_float('SELECT * FROM gamit_soln_excl WHERE "NetworkCode" = \'%s\' AND '
'"StationCode" = \'%s\' AND "Project" = \'%s\' AND "Year" = %i AND "DOY" = %i'
% (self.NetworkCode, self.StationCode, self.soln.stack_name,
date.year, date.doy)) \
and not f:
cnn.query('INSERT INTO gamit_soln_excl ("NetworkCode", "StationCode", "Project", "Year", "DOY", '
'residual) VALUES (\'%s\', \'%s\', \'%s\', %i ,%i, %.4f)'
% (self.NetworkCode, self.StationCode, self.soln.stack_name, date.year, date.doy, r))
def save_parameters(self, cnn):
# only save the parameters when they've been estimated, not when loaded from database
if self.param_origin == ESTIMATION:
# insert linear parameters
cnn.insert('etms', row=to_postgres(self.Linear.p.toDict()))
# insert jumps
for jump in self.Jumps.table:
if jump.fit:
cnn.insert('etms', row=to_postgres(jump.p.toDict()))
# insert periodic params
cnn.insert('etms', row=to_postgres(self.Periodic.p.toDict()))
# save the variance factors
cnn.query('INSERT INTO etms ("NetworkCode", "StationCode", soln, object, params, hash, stack) VALUES '
'(\'%s\', \'%s\', \'%s\', \'var_factor\', \'%s\', %i, \'%s\')'
% (self.NetworkCode, self.StationCode, self.soln.type, to_postgres(self.factor),
self.hash, self.soln.stack_name))
def plot(self, pngfile=None, t_win=None, residuals=False, plot_missing=True,
ecef=False, plot_outliers=True, fileio=None):
import matplotlib.pyplot as plt
L = self.l * 1000
# definitions
m = []
if ecef:
labels = ('X [mm]', 'Y [mm]', 'Z [mm]')
else:
labels = (LABEL('north') + ' [mm]',
LABEL('east') + ' [mm]',
LABEL('up') + ' [mm]')
# get filtered observations
if self.A is not None:
filt = self.F[0] * self.F[1] * self.F[2]
m = [(np.dot(self.As, self.C[i])) * 1000
for i in range(3)]
else:
filt = np.ones(self.soln.x.shape[0], dtype=bool)
# rotate to NEU
if ecef:
lneu = self.rotate_2xyz(L)
else:
lneu = L
# determine the window of the plot, if requested
if t_win is not None:
if type(t_win) is tuple:
# data range, with possibly a final value
if len(t_win) == 1:
t_win = (t_win[0], self.soln.t.max())
else:
# approximate a day in fyear
t_win = (self.soln.t.max() - t_win/365.25, self.soln.t.max())
# new behaviour: plots the time series even if there is no ETM fit
if self.A is not None:
# create the axis
if plot_outliers:
f, axis = plt.subplots(nrows=3, ncols=2, sharex=True, figsize=(15, 10)) # type: plt.subplots
axis_vect = (axis[0][0], axis[1][0], axis[2][0])
else:
f, axis = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(15, 10)) # type: plt.subplots
axis_vect = (axis[0], axis[1], axis[2])
# rotate modeled ts
if not ecef:
mneu = m
rneu = self.R
fneu = self.factor * 1000
else:
mneu = self.rotate_2xyz(m)
# rotate residuals
rneu = self.rotate_2xyz(self.R)
fneu = np.sqrt(np.diag(self.rotate_sig_cov(covar=self.covar))) * 1000
# ################# FILTERED PLOT #################
f.suptitle(LABEL('station') + ' %s (%s %.2f%%) lat: %.5f lon: %.5f\n'
'%s\n%s\n'
'NEU wrms [mm]: %5.2f %5.2f %5.2f' %
(stationID(self),
self.soln.stack_name.upper(),
self.soln.completion,
self.soln.lat,
self.soln.lon,
self.Linear.print_parameters(np.array([self.soln.auto_x, self.soln.auto_y, self.soln.auto_z]),
self.soln.lat, self.soln.lon),
self.Periodic.print_parameters(),
fneu[0],
fneu[1],
fneu[2]),
fontsize=9, family='monospace')
table_n, table_e, table_u = self.Jumps.print_parameters()
tables = (table_n, table_e, table_u)
for i, ax in enumerate(axis_vect):
# plot filtered time series
if not residuals:
ax.plot(self.soln.t[filt], lneu[i][filt], 'ob', markersize=2)
ax.plot(self.soln.ts, mneu[i], 'r')
# error bars
ax.plot(self.soln.ts, mneu[i] - fneu[i] * LIMIT, 'b', alpha=0.1)
ax.plot(self.soln.ts, mneu[i] + fneu[i] * LIMIT, 'b', alpha=0.1)
ax.fill_between(self.soln.ts, mneu[i] - fneu[i] * LIMIT, mneu[i] + fneu[i] * LIMIT,
antialiased=True, alpha=0.2)
else:
ax.plot(self.soln.t[filt], rneu[i][filt]*1000, 'ob', markersize=2)
# error bars
ax.plot(self.soln.ts, - np.repeat(fneu[i], self.soln.ts.shape[0]) * LIMIT, 'b', alpha=0.1)
ax.plot(self.soln.ts, np.repeat(fneu[i], self.soln.ts.shape[0]) * LIMIT, 'b', alpha=0.1)
ax.fill_between(self.soln.ts, -fneu[i] * LIMIT, fneu[i] * LIMIT, antialiased=True, alpha=0.2)
ax.grid(True)
# labels
ax.set_ylabel(labels[i])
p = ax.get_position()
f.text(0.005, p.y0, tables[i], fontsize=8, family='monospace')
# window data
self.set_lims(t_win, plt, ax)
# plot jumps
self.plot_jumps(ax)
# ################# OUTLIERS PLOT #################
if plot_outliers:
for i, ax in enumerate((axis[0][1], axis[1][1], axis[2][1])):
ax.plot(self.soln.t, lneu[i], 'oc', markersize=2)
ax.plot(self.soln.t[filt], lneu[i][filt], 'ob', markersize=2)
ax.plot(self.soln.ts, mneu[i], 'r')
# error bars
ax.plot(self.soln.ts, mneu[i] - fneu[i] * LIMIT, 'b', alpha=0.1)
ax.plot(self.soln.ts, mneu[i] + fneu[i] * LIMIT, 'b', alpha=0.1)
ax.fill_between(self.soln.ts, mneu[i] - fneu[i]*LIMIT, mneu[i] + fneu[i]*LIMIT,
antialiased=True, alpha=0.2)
self.set_lims(t_win, plt, ax)
ax.set_ylabel(labels[i])
ax.grid(True)
if plot_missing:
self.plot_missing_soln(ax)
f.subplots_adjust(left=0.18)
else:
f, axis = plt.subplots(nrows=3, ncols=1, sharex=True, figsize=(15, 10)) # type: plt.subplots
f.suptitle(LABEL('station') + ' %s (%s %.2f%%) lat: %.5f lon: %.5f'
% (stationID(self), self.soln.type.upper(), self.soln.completion,
self.soln.lat, self.soln.lon) +
'\n' + LABEL('not_enough'), fontsize=9, family='monospace')
for i, ax in enumerate((axis[0], axis[1], axis[2])):
ax.plot(self.soln.t, lneu[i], 'ob', markersize=2)
ax.set_ylabel(labels[i])
ax.grid(True)
self.set_lims(t_win, plt, ax)
self.plot_jumps(ax)
if plot_missing:
self.plot_missing_soln(ax)
# save / show plot
if pngfile is not None:
plt.savefig(pngfile)
plt.close()
elif fileio is not None:
plt.savefig(fileio, format='png')
# plt.show()
fileio.seek(0) # rewind to beginning of file
plt.close()
return base64.b64encode(fileio.getvalue()).decode()
else:
self.f = f
self.picking = False
self.plt = plt
axprev = plt.axes([0.85, 0.01, 0.08, 0.055])
bcut = Button(axprev, 'Add jump', color='red', hovercolor='green')
bcut.on_clicked(self.enable_picking)
plt.show()
plt.close()
def onpick(self, event):
import dbConnection
self.f.canvas.mpl_disconnect(self.cid)
self.picking = False
print('Epoch: %s' % pyDate.Date(fyear=event.xdata).yyyyddd())
jtype = int(eval(input(' -- Enter type of jump (0 = mechanic; 1 = geophysical): ')))
if jtype == 1:
relx = eval(input(' -- Enter relaxation (e.g. 0.5, 0.5,0.01): '))
operation = str(input(' -- Enter operation (+, -): '))
print(' >> Jump inserted')
# now insert the jump into the db
cnn = dbConnection.Cnn('gnss_data.cfg')
self.plt.close()
# reinitialize ETM
# wait for 'keep' or 'undo' command
def enable_picking(self, event):
if not self.picking:
print('Entering picking mode')
self.picking = True
self.cid = self.f.canvas.mpl_connect('button_press_event', self.onpick)
else:
print('Disabling picking mode')
self.picking = False
self.f.canvas.mpl_disconnect(self.cid)
def plot_hist(self, pngfile=None, fileio=None):
import matplotlib.pyplot as plt
import matplotlib.mlab as mlab
from scipy.stats import norm
from matplotlib.patches import Ellipse
labels = (LABEL('north') + ' [mm]',
LABEL('east') + ' [mm]',
LABEL('up') + ' [mm]')
if self.A is not None:
filt = self.F[0] * self.F[1] * self.F[2]
f, axis = plt.subplots(nrows=2, ncols=2, figsize=(15, 10)) # type: plt.subplots
f.suptitle(LABEL('station') + ' %s (%s %.2f%%) lat: %.5f lon: %.5f\n'
'VAR (N E U) : %s\n'
'COV (N-E N-U E-U): %s'
% (stationID(self),
self.soln.type.upper(), self.soln.completion,
self.soln.lat, self.soln.lon,
' '.join('%10.3e' % i for i in np.diag(self.covar)),
' '.join('%10.3e' % i for i in [self.covar[0, 1], self.covar[0, 2], self.covar[1, 2]])),
fontsize=9, family='monospace')
n = np.sqrt(np.sum(self.R ** 2, axis=0))
N = self.R[0][n <= 0.05] * 1000
E = self.R[1][n <= 0.05] * 1000
U = self.R[2][n <= 0.05] * 1000
# N-E residuals and error ellipse
ax = axis[0][0]
ax.plot(E, N, 'ob', markersize=2)
# ax.plot(E[filt], N[filt], 'ob', markersize=2)
# ax.plot(E[np.logical_not(filt)], N[np.logical_not(filt)], 'oc', markersize=2)
# process the covariance matrix
c = self.covar[0:2, 0:2]
c[1, 1], c[0, 0] = c[0, 0], c[1, 1]
w, v = np.linalg.eigh(self.covar[0:2, 0:2])
order = w.argsort()[::-1]
w, v = w[order], v[:, order]
theta = np.degrees(np.arctan2(*v[:, 0][::-1]))
ellipse = Ellipse((np.mean(self.R[1][filt]),
np.mean(self.R[1][filt])),
width = 2. * np.sqrt(w[0]) * 2.5 * 1000,
height = 2. * np.sqrt(w[1]) * 2.5 * 1000,
angle=theta,
facecolor='none',
edgecolor='red',
zorder=3,
label=r'$2.5\sigma$')
ax.add_patch(ellipse)
ax.grid(True)
ax.set_ylabel(labels[0])
ax.set_xlabel(labels[1])
ax.set_title("%s %s-%s" % (LABEL('residual plot'), LABEL('north'), LABEL('east')))
ax.axis('equal')
f.canvas.draw()
ax.legend()
nn = ax.get_ylim()
ee = ax.get_xlim()
# N histogram
ax = axis[0][1]
# (mu, sigma) = norm.fit(N)
n, bins, patches = ax.hist(N, 200, alpha=0.75, facecolor='blue', orientation='horizontal')
# y = mlab.normpdf(bins, mu, sigma)
# ax.plot(y, bins, 'r--', linewidth=2)
ax.grid(True)
ax.set_xlabel(LABEL('frequency'))
ax.set_ylabel(LABEL('N residuals') + ' [mm]')
ax.set_title(LABEL('histogram plot') + ' ' + LABEL('north'))
ax.set_ylim(nn)
# E histogram
ax = axis[1][0]
# (mu, sigma) = norm.fit(E)
n, bins, patches = ax.hist(E, 200, alpha=0.75, facecolor='blue')
# y = mlab.normpdf(bins, mu, sigma)
# ax.plot(bins, y, 'r--', linewidth=2)
ax.grid(True)
ax.set_ylabel(LABEL('frequency'))
ax.set_xlabel(LABEL('E residuals') + ' [mm]')
ax.set_title(LABEL('histogram plot') + ' ' + LABEL('east'))
ax.set_xlim(ee)
# Up histogram
ax = axis[1][1]
# (mu, sigma) = norm.fit(U)
n, bins, patches = ax.hist(U, 200, alpha=0.75, facecolor='blue')
# y = mlab.normpdf(bins, mu, sigma)
# ax.plot(bins, y, 'r--', linewidth=2)
ax.grid(True)
ax.set_ylabel(LABEL('frequency'))
ax.set_xlabel(LABEL('U residuals') + ' [mm]')
ax.set_title(LABEL('histogram plot') + ' ' + LABEL('up'))
#residuals = np.sqrt(np.square(L[0]) + np.square(L[1]) + np.square(L[2])) - \
# np.sqrt(np.square(np.dot(self.A, self.C[0])) + np.square(np.dot(self.A, self.C[1])) +
# np.square(np.dot(self.A, self.C[2])))
#(mu, sigma) = norm.fit(residuals)
#n, bins, patches = plt.hist(residuals, 200, normed=1, alpha=0.75, facecolor='blue')
#y = mlab.normpdf(bins, mu, sigma)
#plt.plot(bins, y, 'r--', linewidth=2)
#plt.title(r'$\mathrm{Histogram\ of\ residuals (mm):}\ \mu=%.3f,\ \sigma=%.3f$' % (mu*1000, sigma*1000))
#plt.grid(True)
if pngfile is not None:
plt.savefig(pngfile)
plt.close()
elif fileio is not None:
plt.savefig(fileio, format='png')
# plt.show()
fileio.seek(0) # rewind to beginning of file
plt.close()
return base64.b64encode(fileio.getvalue())
else:
plt.show()
plt.close()
@staticmethod
def autoscale_y(ax, margin=0.1):
"""This function rescales the y-axis based on the data that is visible given the current xlim of the axis.
ax -- a matplotlib axes object
margin -- the fraction of the total height of the y-data to pad the upper and lower ylims"""
def get_bottom_top(line):
xd = line.get_xdata()
yd = line.get_ydata()
lo, hi = ax.get_xlim()
y_displayed = yd[((xd > lo) & (xd < hi))]
h = np.max(y_displayed) - np.min(y_displayed)
bot = np.min(y_displayed) - margin * h
top = np.max(y_displayed) + margin * h
return bot, top
lines = ax.get_lines()
bot, top = np.inf, -np.inf
for line in lines:
new_bot, new_top = get_bottom_top(line)
if new_bot < bot:
bot = new_bot
if new_top > top:
top = new_top
if bot == top:
ax.autoscale(enable=True, axis='y', tight=False)
ax.autoscale(enable=False, axis='y', tight=False)
else:
ax.set_ylim(bot, top)
def set_lims(self, t_win, plt, ax):
if t_win is None:
# turn on to adjust the limits, then turn off to plot jumps
ax.autoscale(enable=True, axis='x', tight=False)
ax.autoscale(enable=False, axis='x', tight=False)
ax.autoscale(enable=True, axis='y', tight=False)
ax.autoscale(enable=False, axis='y', tight=False)
else:
if t_win[0] == t_win[1]:
t_win[0] = t_win[0] - 1./365.25
t_win[1] = t_win[1] + 1./365.25
plt.xlim(t_win)
self.autoscale_y(ax)
def plot_missing_soln(self, ax):
# plot missing solutions
for missing in self.soln.ts_ns:
ax.plot((missing, missing), ax.get_ylim(), color=(1, 0, 1, 0.2), linewidth=1)
# plot the position of the outliers
for blunder in self.soln.ts_blu:
ax.quiver((blunder, blunder), ax.get_ylim(), (0, 0), (-0.01, 0.01), scale_units='height',
units='height', pivot='tip', width=0.008, edgecolors='r')
def plot_jumps(self, ax):
for jump in self.Jumps.table:
if jump.date < self.soln.date[0] or jump.date > self.soln.date[-1]:
continue
c = ':'
color = None
if not jump.fit:
color = 'tab:gray'
elif jump.p.jump_type == GENERIC_JUMP:
c = 'c:'
elif jump.p.jump_type == ANTENNA_CHANGE:
c = 'b:'
elif jump.p.jump_type == REFERENCE_FRAME_JUMP:
color = 'tab:green'
elif jump.p.jump_type == CO_SEISMIC_JUMP_DECAY:
c = 'r:'
elif jump.p.jump_type == CO_SEISMIC_JUMP:
color = 'tab:purple'
else:
continue
ax.plot((jump.date.fyear, jump.date.fyear), ax.get_ylim(),
c, **({'color':color} if color else {}))
def todictionary(self, time_series=False, model=False):
# convert the ETM adjustment into a dictionary
# optionally, output the whole time series and evaluated model as well
L = self.l
# start with the parameters
etm = {
'Network' : self.NetworkCode,
'Station' : self.StationCode,
'lat' : self.soln.lat[0],
'lon' : self.soln.lon[0],
'ref_x' : self.soln.auto_x[0],
'ref_y' : self.soln.auto_y[0],
'ref_z' : self.soln.auto_z[0],
'Jumps' : [to_list(jump.p.toDict()) for jump in self.Jumps.table]
}
if self.A is not None:
etm['Polynomial'] = to_list(self.Linear.p.toDict())
etm['Periodic'] = to_list(self.Periodic.p.toDict())
etm['wrms'] = {'n': self.factor[0],
'e': self.factor[1],
'u': self.factor[2]}
etm['xyz_covariance'] = self.rotate_sig_cov(covar=self.covar).tolist()
etm['neu_covariance'] = self.covar.tolist()
if time_series:
etm['time_series'] = {
't' : np.array([self.soln.t.tolist(), self.soln.mjd.tolist()]).transpose().tolist(),
'mjd' : self.soln.mjd.tolist(),
'x' : self.soln.x.tolist(),
'y' : self.soln.y.tolist(),
'z' : self.soln.z.tolist(),
'n' : L[0].tolist(),
'e' : L[1].tolist(),
'u' : L[2].tolist(),
'residuals' : self.R.tolist(),
'weights' : self.P.transpose().tolist(),
'model_neu' : [] if self.A is None or not model else \
[(np.dot(self.As, self.C[i]).tolist()) for i in range(3)],
'filter' : [] if self.A is None else \
np.logical_and(np.logical_and(self.F[0], self.F[1]), self.F[2]).tolist()
}
return etm
def get_xyz_s(self, year, doy, jmp=None, sigma_h=SIGMA_FLOOR_H, sigma_v=SIGMA_FLOOR_V, force_model=False):
# this function find the requested epochs and returns an X Y Z and sigmas
# jmp = 'pre' returns the coordinate immediately before a jump
# jmp = 'post' returns the coordinate immediately after a jump
# jmp = None returns either the coordinate before or after, depending on the time of the jump.
# find this epoch in the t vector
date = pyDate.Date(year=year, doy=doy)
window = None
for jump in self.Jumps.table:
if jump.date == date and \
jump.p.jump_type in (GENERIC_JUMP, CO_SEISMIC_JUMP_DECAY, ANTENNA_CHANGE, CO_SEISMIC_JUMP) and \
jump.fit and \
np.sqrt(np.sum(np.square(jump.p.params[:, 0]))) > 0.02:
window = jump.date
# if no pre or post specified, then determine using the time of the jump
if jmp is None:
if (jump.date.datetime().hour + jump.date.datetime().minute / 60.0) < 12:
jmp = 'post'
else:
jmp = 'pre'
# use the previous or next date to get the APR
# if jmp == 'pre':
# date -= 1
# else:
# date += 1
index = np.where(self.soln.mjd == date.mjd)[0]
neu = np.zeros((3, 1))
L = self.L
ref_pos = np.array([self.soln.auto_x,
self.soln.auto_y,
self.soln.auto_z])
if index.size and self.A is not None:
# found a valid epoch in the t vector
# now see if this epoch was filtered
if np.all(self.F[:, index]) and force_model is False:
# the coordinate is good
xyz = L[:, index]
sig = self.R[:, index]
source = self.soln.stack_name.upper() + ' with ETM solution: good'
else:
# the coordinate is marked as bad
# get the requested epoch from the ETM
idt = np.argmin(np.abs(self.soln.ts - date.fyear))
for i in range(3):
neu[i] = np.dot(self.As[idt, :], self.C[i])
xyz = self.rotate_2xyz(neu) + ref_pos
# Use the deviation from the ETM multiplied by 2.5 to estimate the error
sig = 2.5 * self.R[:, index]
source = self.soln.stack_name.upper() + ' with ETM solution: filtered'
elif not index.size and self.A is not None:
# the coordinate doesn't exist, get it from the ETM
idt = np.argmin(np.abs(self.soln.ts - date.fyear))
source = 'No ' + self.soln.stack_name.upper() + ' solution: ETM'
for i in range(3):
neu[i] = np.dot(self.As[idt, :], self.C[i])
xyz = self.rotate_2xyz(neu) + ref_pos
# since there is no way to estimate the error,
# use the nominal sigma multiplied by 2.5
sig = 2.5 * self.factor[:, np.newaxis]
elif index.size and self.A is None:
# no ETM (too few points), but we have a solution for the requested day
xyz = L[:, index]
# set the uncertainties in NEU by hand
sig = np.array([[9.99], [9.99], [9.99]])
source = self.soln.stack_name.upper() + ' solution, no ETM'
else:
# no ETM (too few points) and no solution for this day, get average
source = 'No ' + self.soln.stack_name.upper() + ' solution, no ETM: mean coordinate'
xyz = np.mean(L, axis=1)[:, np.newaxis]
# set the uncertainties in NEU by hand
sig = np.array([[9.99], [9.99], [9.99]])
if self.A is not None:
# get the velocity of the site
if np.sqrt(np.square(self.Linear.p.params[0, 1]) +
np.square(self.Linear.p.params[1, 1]) +
np.square(self.Linear.p.params[2, 1])) > 0.2:
# fast moving station! bump up the sigma floor
sigma_h = 99.9
sigma_v = 99.9
source += '. fast moving station, bumping up sigmas'
# apply floor sigmas
sig = np.sqrt(np.square(sig) + np.square(np.array([[sigma_h], [sigma_h], [sigma_v]])))
return xyz, sig, window, source
def rotate_2neu(self, ecef):
return np.array(ct2lg(ecef[0], ecef[1], ecef[2], self.soln.lat, self.soln.lon))
def rotate_2xyz(self, neu):
return np.array(lg2ct(neu[0], neu[1], neu[2], self.soln.lat, self.soln.lon))
def rotate_sig_cov(self, sigmas=None, covar=None):
if sigmas is None and covar is None:
raise pyETMException('Error in rotate_sig_cov: must provide either sigmas or covariance matrix')
R = rotlg2ct(self.soln.lat, self.soln.lon)
if sigmas is not None:
# build a covariance matrix based on sigmas
sd = np.diagflat(np.square(sigmas))
sd[0, 1] = self.covar[0, 1]
sd[1, 0] = self.covar[1, 0]
sd[2, 1] = self.covar[2, 1]
sd[1, 2] = self.covar[1, 2]
sd[0, 2] = self.covar[0, 2]
sd[2, 0] = self.covar[2, 0]
# check that resulting matrix is PSD:
if not self.isPD(sd):
sd = self.nearestPD(sd)
sneu = np.dot(np.dot(R[:, :, 0], sd), R[:, :, 0].transpose())
dneu = np.sqrt(np.diag(sneu))
else:
# covariance matrix given, assume it is a covariance matrix
dneu = np.dot(np.dot(R[:, :, 0], covar), R[:, :, 0].transpose())
return dneu
def nearestPD(self, A):
"""Find the nearest positive-definite matrix to input
A Python/Numpy port of John D'Errico's `nearestSPD` MATLAB code [1], which
credits [2].
[1] https://www.mathworks.com/matlabcentral/fileexchange/42885-nearestspd
[2] N.J. Higham, "Computing a nearest symmetric positive semidefinite
matrix" (1988): https://doi.org/10.1016/0024-3795(88)90223-6
"""
B = (A + A.T) / 2
_, s, V = np.linalg.svd(B)
H = np.dot(V.T, np.dot(np.diag(s), V))
A2 = (B + H) / 2
A3 = (A2 + A2.T) / 2
if self.isPD(A3):
return A3
spacing = np.spacing(np.linalg.norm(A))
# The above is different from [1]. It appears that MATLAB's `chol` Cholesky
# decomposition will accept matrixes with exactly 0-eigenvalue, whereas
# Numpy's will not. So where [1] uses `eps(mineig)` (where `eps` is Matlab
# for `np.spacing`), we use the above definition. CAVEAT: our `spacing`
# will be much larger than [1]'s `eps(mineig)`, since `mineig` is usually on
# the order of 1e-16, and `eps(1e-16)` is on the order of 1e-34, whereas
# `spacing` will, for Gaussian random matrixes of small dimension, be on
# othe order of 1e-16. In practice, both ways converge, as the unit test
# below suggests.
I = np.eye(A.shape[0])
k = 1
while not self.isPD(A3):
mineig = np.min(np.real(np.linalg.eigvals(A3)))
A3 += I * (-mineig * k ** 2 + spacing)
k += 1
return A3
@staticmethod
def isPD(B):
"""Returns true when input is positive-definite, via Cholesky"""
try:
_ = np.linalg.cholesky(B)
return True
except np.linalg.LinAlgError:
return False
def load_parameters(self, params, l):
factor = 1
index = []
residuals = []
p = []
for param in params:
par = np.array(param['params'])
sig = np.array(param['sigmas'])
o = param['object']
if 'polynomial' == o:
self.Linear.load_parameters(par, sig, param['t_ref'])
elif 'periodic' == o:
self.Periodic.load_parameters(params=par, sigmas=sig)
elif 'jump' == o:
for jump in self.Jumps.table:
if jump.p.hash == param['hash']:
jump.load_parameters(params=par, sigmas=sig)
elif 'var_factor' == o:
# already a vector in the db
factor = par
x = self.Linear.p.params
s = self.Linear.p.sigmas
for jump in self.Jumps.table:
if jump.fit:
x = np.append(x, jump.p.params, axis=1)
s = np.append(s, jump.p.sigmas, axis=1)
x = np.append(x, self.Periodic.p.params, axis=1)
s = np.append(s, self.Periodic.p.sigmas, axis=1)
for i in range(3):
residuals.append(l[i] - np.dot(self.A(constrains=False), x[i, :]))
ss = np.abs(np.divide(residuals[i], factor[i]))
index.append(ss <= LIMIT)
f = np.ones((l.shape[1],))
sw = np.power(10, LIMIT - ss[ss > LIMIT])
sw[sw < np.finfo(np.float).eps] = np.finfo(np.float).eps
f[ss > LIMIT] = sw
p.append(np.square(np.divide(f, factor[i])))
self.C = x
self.S = s
self.F = np.array(index)
self.R = np.array(residuals)
self.factor = factor
self.P = np.array(p)
def adjust_lsq(self, Ai, Li):
A = Ai(constrains=True)
L = Ai.get_l(Li, constrains=True)
factor = 1
So = 1
dof = (Ai.shape[0] - Ai.shape[1])
X1 = chi2.ppf(1 - 0.05 / 2, dof)
X2 = chi2.ppf(0.05 / 2, dof)
s = np.array([])
v = np.array([])
C = np.array([])
P = Ai.get_p(constrains=True)
for _ in range(11):
W = np.sqrt(P)
Aw = np.multiply(W[:, None], A)
Lw = np.multiply(W, L)
C = np.linalg.lstsq(Aw, Lw, rcond=-1)[0]
v = L - np.dot(A, C)
# unit variance
So = np.sqrt(np.dot(v, np.multiply(P, v)) / dof)
x = np.power(So, 2) * dof
# obtain the overall uncertainty predicted by lsq
factor = factor * So
# calculate the normalized sigmas
s = np.abs(np.divide(v, factor))
if x < X2 or x > X1:
# if it falls in here it's because it didn't pass the Chi2 test
# reweigh by Mike's method of equal weight until 2 sigma
f = np.ones((v.shape[0], ))
# f[s > LIMIT] = 1. / (np.power(10, LIMIT - s[s > LIMIT]))
# do not allow sigmas > 100 m, which is basically not putting
# the observation in. Otherwise, due to a model problem
# (missing jump, etc) you end up with very unstable inversions
# f[f > 500] = 500
sw = np.power(10, LIMIT - s[s > LIMIT])
sw[sw < np.finfo(np.float).eps] = np.finfo(np.float).eps
f[s > LIMIT] = sw
P = np.square(np.divide(f, factor))
else:
break # cst_pass = True
# make sure there are no values below eps. Otherwise matrix becomes singular
P[P < np.finfo(np.float).eps] = 1e-6
# some statistics
SS = np.linalg.inv(np.dot(A.transpose(), np.multiply(P[:, None], A)))
sigma = So*np.sqrt(np.diag(SS))
# mark observations with sigma <= LIMIT
index = Ai.remove_constrains(s <= LIMIT)
v = Ai.remove_constrains(v)
return C, sigma, index, v, factor, P
@staticmethod
def chi2inv(chi, df):
"""Return prob(chisq >= chi, with df degrees of
freedom).
df must be even.
"""
assert df & 1 == 0
# XXX If chi is very large, exp(-m) will underflow to 0.
m = chi / 2.0
sum = term = np.exp(-m)
for i in range(1, df // 2):
term *= m / i
sum += term
# With small chi and large df, accumulated
# roundoff error, plus error in
# the platform exp(), can cause this to spill
# a few ULP above 1.0. For
# example, chi2P(100, 300) on my box
# has sum == 1.0 + 2.0**-52 at this
# point. Returning a value even a teensy
# bit over 1.0 is no good.
return np.min(sum)
@staticmethod
def warn_with_traceback(message, category, filename, lineno, file=None, line=None):
log = file if hasattr(file, 'write') else sys.stderr
traceback.print_stack(file=log)
log.write(warnings.formatwarning(message, category, filename, lineno, line))
def get_outliers_list(self):
"""
Function to obtain the outliers based on the ETMs sigma
:return: a list containing the network code, station code and dates of the outliers in the time series
"""
filt = self.F[0] * self.F[1] * self.F[2]
return [(self.NetworkCode, self.StationCode, pyDate.Date(mjd=mjd))
for mjd in self.soln.mjd[~filt]]
class PPPETM(ETM):
def __init__(self, cnn, NetworkCode, StationCode, plotit=False, no_model=False, interseismic=None):
# load all the PPP coordinates available for this station
# exclude ppp solutions in the exclude table and any solution that is more than 100 meters from the auto coord
self.ppp_soln = PppSoln(cnn, NetworkCode, StationCode)
ETM.__init__(self, cnn, self.ppp_soln, no_model)
# no offset applied
self.L = np.array([self.soln.x,
self.soln.y,
self.soln.z])
# reduced to x y z coordinate of the station
self.l = self.rotate_2neu(np.array([self.ppp_soln.x - self.ppp_soln.auto_x,
self.ppp_soln.y - self.ppp_soln.auto_y,
self.ppp_soln.z - self.ppp_soln.auto_z]))
self.run_adjustment(cnn, self.l, plotit, self.ppp_soln)
# save the parameters to the db
# always save for PPP
if self.A is not None:
self.save_parameters(cnn)
class GamitETM(ETM):
def __init__(self, cnn, NetworkCode, StationCode, plotit=False,
no_model=False, gamit_soln=None, stack_name=None, interseismic=None):
if gamit_soln is None:
self.polyhedrons = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM stacks '
'WHERE "name" = \'%s\' AND "NetworkCode" = \'%s\' AND '
'"StationCode" = \'%s\' '
'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
% (stack_name, NetworkCode, StationCode))
self.gamit_soln = GamitSoln(cnn, self.polyhedrons, NetworkCode, StationCode, stack_name)
else:
# load the GAMIT polyhedrons
self.gamit_soln = gamit_soln
ETM.__init__(self, cnn, self.gamit_soln, no_model, interseismic=interseismic)
# no offset applied
self.L = np.array([self.gamit_soln.x,
self.gamit_soln.y,
self.gamit_soln.z])
# reduced to x y z coordinate of the station
self.l = self.rotate_2neu(np.array([self.gamit_soln.x - self.gamit_soln.auto_x,
self.gamit_soln.y - self.gamit_soln.auto_y,
self.gamit_soln.z - self.gamit_soln.auto_z]))
if interseismic:
self.l -= self.Linear.interseismic
self.run_adjustment(cnn, self.l, plotit, self.gamit_soln)
# save parameters to db
# the object will also save parameters if the list object is invoked
if self.A is not None:
self.save_parameters(cnn)
def get_etm_soln_list(self, use_ppp_model=False, cnn=None):
# this function return the values of the ETM ONLY
stn_id = stationID(self)
if self.A is None:
raise pyETMException_NoDesignMatrix('No design matrix available for %s' % stn_id)
elif not use_ppp_model:
# get residuals from GAMIT solutions to GAMIT model
neu = [np.dot(self.A, self.C[i])
for i in range(3)]
else:
# get residuals from GAMIT solutions to PPP model
etm = PPPETM(cnn, self.NetworkCode, self.StationCode)
# DDG: 20-SEP-2018 compare using MJD not FYEAR to avoid round off errors
index = np.isin(etm.soln.mjds, self.soln.mjd)
# use the etm object to obtain the design matrix that matches the dimensions of self.soln.t
neu = [np.dot(etm.As[index, :], etm.C[i])
for i in range(3)]
del etm
rxyz = self.rotate_2xyz(np.array(neu)) + np.array([self.soln.auto_x,
self.soln.auto_y,
self.soln.auto_z])
return [(stn_id, x, y, z, date.year, date.doy, date.fyear)
for x, y, z, date in
zip(rxyz[0],
rxyz[1],
rxyz[2],
self.gamit_soln.date)]
class DailyRep(ETM):
def __init__(self, cnn, NetworkCode, StationCode, plotit=False,
no_model=False, gamit_soln=None, project=None):
if gamit_soln is None:
self.polyhedrons = cnn.query_float('SELECT "X", "Y", "Z", "Year", "DOY" FROM gamit_soln '
'WHERE "Project" = \'%s\' AND "NetworkCode" = \'%s\' AND '
'"StationCode" = \'%s\' '
'ORDER BY "Year", "DOY", "NetworkCode", "StationCode"'
% (project, NetworkCode, StationCode))
self.gamit_soln = GamitSoln(cnn, self.polyhedrons, NetworkCode, StationCode, project)
else:
# load the GAMIT polyhedrons
self.gamit_soln = gamit_soln
ETM.__init__(self, cnn, self.gamit_soln, no_model, False, False, False)
# the the solution type to dra
self.soln.type = 'dra'
# for repetitivities, vector with difference
self.l = self.rotate_2neu(np.array([self.gamit_soln.x,
self.gamit_soln.y,
self.gamit_soln.z]))
# for repetitivities, same vector for both
self.L = self.l
self.run_adjustment(cnn, self.l, plotit, self.gamit_soln)
# only save the excluded solutions in this module (DailyRep)
if self.A is not None:
self.save_excluded_soln(cnn)
def get_residuals_dict(self):
# this function return the values of the ETM ONLY
if self.A is None:
raise pyETMException_NoDesignMatrix('No design matrix available for %s' % stationID(self))
neu = [np.dot(self.A, self.C[i])
for i in range(3)]
xyz = self.rotate_2xyz(np.array(neu)) + \
np.array([self.soln.auto_x, self.soln.auto_y, self.soln.auto_z])
rxyz = xyz - self.L
px = np.ones(self.P[0].shape)
py = np.ones(self.P[1].shape)
pz = np.ones(self.P[2].shape)
return [(self.NetworkCode, self.StationCode, x, y, z, sigx, sigy, sigz, date.year, date.doy)
for x, y, z, sigx, sigy, sigz, date in
zip(rxyz[0],
rxyz[1],
rxyz[2],
px,
py,
pz,
self.gamit_soln.date)]
class FileETM(ETM):
def __init__(self, cnn, poly_list=None, plotit=False, no_model=False):
ETM.__init__(self, cnn, poly_list, no_model)
self.soln.type = 'file'
# no offset applied
self.L = np.array([self.soln.x,
self.soln.y,
self.soln.z])
# reduced to x y z coordinate of the station
self.l = self.rotate_2neu(np.array([self.soln.x - self.soln.auto_x,
self.soln.y - self.soln.auto_y,
self.soln.z - self.soln.auto_z]))
self.run_adjustment(cnn, self.l, plotit, poly_list)
|
demiangomez/Parallel.GAMIT
|
classes/pyETM.py
|
Python
|
gpl-3.0
| 110,316
|
[
"Gaussian"
] |
b18c67febea5e0fc4e1dedf7b920ab9547e6d22686a8d6ab89822a1d2127af10
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# pofix - perform string fixups on incoming .po files.
#
# The purpose of this script is to save translators from having to
# apply various string fixes needed before stable release by hand. It is
# intended to be run on each incoming .po file as the Lord of
# Translations receives it. However, translators may run it on their
# own .po files to be sure, as a second application will harmlessly do
# nothing.
#
# To use this script, give it one or more paths to .po files as
# command-line arguments. Each file will be tweaked as needed.
# It should work on Windows and MacOS X as well as Linux, provided
# you have Python installed.
#
# This script will emit a report line for each file it modifies,
# and save a backup copy of the original with extension "-bak".
#
# This script will tell you when it is obsolete. Run it against all .po
# files in the main Wesnoth tree; when it says none are older than this script,
# it can be discarded (assunming that it has in fact been used to transform
# all incoming .po files in the meantime).
#
# Example usage:
# utils/pofix.py po/wesnoth*/*.po*
# find data/campaigns/ -name '*.cfg' -print0 | xargs -0 utils/pofix.py
#
# To make use of >1 CPU core, you have to rely on xargs. In this sample 10 files
# are handed over to 4 instances of pofix.py:
# ls po/wesnoth*/*.po* | xargs -P 4 -n 10 ./utils/pofix.py
#
#
# Please do make sure to add a comment before any new blocks of conversions
# that states when it was added (current version number is enough) so that
# the file can be cleaned up more easily every now and then.
# Example:
# # conversion added in 1.9.5+svn
# ("foo addwd bar", "foo added bar"),
# # conversion added in 1.9.8+svn
# ("fooba foo", "foobar foo"),
stringfixes = {
"wesnoth" : (
# Convert hyphen-minuses that are being used as minus signs
# to the Unicode minus sign
# conversion added in 1.9.0-svn
(" -25%", " −25%"),
(" -1%", " −1%"),
(" -100%", " −100%"),
# Fix screw up
(" —100%", " −100%"),
# Convert makeshift dashes/straight apostrophes:
# conversion added in 1.9.0-svn
("however - from these", "however — from these"),
("campaign first - click", "campaign first — click"),
("unit type -", "unit type —"),
("experience - 4 for", "experience — 4 for"),
("both worlds - for", "both worlds — for"),
("respected - or simply", "respected — or simply"),
("feared - leader", "feared — leader"),
("- usually in blood - although", "— usually in blood — although"),
("position - although", "position — although"),
("as advisors -", "as advisors —"),
("don't have to - let it", "don't have to — let it"),
("your attacks - they will", "your attacks — they will"),
("upload statistics - Help", "upload statistics — Help"),
("(A) - admin command", "(A) — admin command"),
("(D) - debug only, (N) - network only, (A) - admin only", "(D) — debug only, (N) — network only, (A) — admin only"),
("not empty - duplicate", "not empty — duplicate"),
("Player Info -", "Player Info —"),
("About to upload statistics - Help us make Wesnoth better for you!", "About to upload statistics — Help us make Wesnoth better for you!"),
#the following rule applies to wesnoth/*.po* and to wesnoth-manual/*.po*
("victory objectives - getting", "victory objectives — getting"),
# Straight apostrophes and quotes to curly ones
# conversion added in 1.9.0-svn
("Ga'ash", "Ga’ash"),
("Gart'lo", "Gart’lo"),
("Mar'Ildian", "Mar’Ildian"),
("Marra Di'lek", "Marra Di’lek"),
("Bzz'Kza", "Bzz’Kza"),
("unit's", "unit’s"),
("side's", "side’s"),
("man's", "man’s"),
("player's", "player’s"),
("elf's", "elf’s"),
("turn's", "turn’s"),
("it's best to click the", "it’s best to click the"),
("Don't send", "Don’t send"),
("RACV's", "RACV’s"),
("If you don't", "If you don’t"),
("you don't already own", "you don’t already own"),
("aren't quite as tough", "aren’t quite as tough"),
("units don't incur", "units don’t incur"),
("plague doesn't", "plague doesn’t"),
("mechanical units don't", "mechanical units don’t"),
("I'm Ready", "I’m Ready"),
# Fix capitalization
# conversion added in 1.9.0-svn
("Icelandic translation", "Icelandic Translation"),
("Miscellaneous contributors", "Miscellaneous Contributors"),
# 'Can not' -> 'cannot'
("directory name and can not be installed", "directory name and cannot be installed"),
# More straight to curly conversions
# conversion added in 1.9.0+svn
("Don't make a wish.", "Don’t make a wish."),
("$name's", "$name|’s"),
("$road's", "$road|’s"),
("$login's", "$login|’s"),
("$name|'s turn", "$name|’s turn"),
("if it hasn't moved this", "if it hasn’t moved this"),
("to activate 'delay shroud updates' in the", "to activate ‘delay shroud updates’ in the"),
("update via 'update shroud now' (or the", "update via ‘update shroud now’ (or the"),
("with a 5-4 attack may", "with a 5–4 attack may"),
("with a 9-2 attack can", "with a 9–2 attack can"),
("with a 5-4 attack does", "with a 5–4 attack does"),
("the unit they're fighting. If", "the unit they’re fighting. If"),
("have many 'maximum level'", "have many ‘maximum level’"),
("it is 'on', set an", "it is ‘on’, set an"),
("it is 'off' set a", "it is ‘off’ set a"),
("they won't be kicked", "they won’t be kicked"),
("doesn't work on Undead", "doesn’t work on Undead"),
("within a drake's body enables", "within a drake’s body enables"),
("than the Elves'", "than the Elves’"),
("it's most commonly", "it’s most commonly"),
("it's master. Only", "it’s master. Only"),
("Don't neglect to", "Don’t neglect to"),
("the enemy can't reach your", "the enemy can’t reach your"),
("what you're facing. In", "what you’re facing. In"),
("remain at death's door until", "remain at death’s door until"),
("into an enemy's zone of", "into an enemy’s zone of"),
("Don't move an", "Don’t move an"),
("if you don't have to", "if you don’t have to"),
("unit, but don't attack or", "unit, but don’t attack or"),
("word in Wesnoth's playing screen", "word in Wesnoth’s playing screen"),
("You don't have a", "You don’t have a"),
("Don't ask me", "Don’t ask me"),
("Don't show again", "Don’t show again"),
("with the 'cures'", "with the ‘cures’"),
("You don't have enough", "You don’t have enough"),
("players that don't", "players that don’t"),
("it's -1.", "it’s −1."),
# Correct spelling
# conversion added in 1.9.0+svn
("has a tool-tip", "has a tooltip"),
# correct spelling
# conversion added in 1.9.3+svn
("threadments", "treatments"),
("text='edd'", "text='egg'"),
# Kill ugly hyphenated version
("Re-initialize fonts", "Reinitialize fonts"),
# add the font for Old English
("DejaVuSans.ttf,Andagii.ttf,wqy-zenhei.ttc", "DejaVuSans.ttf,Andagii.ttf,wqy-zenhei.ttc,Junicode-Regular.ttf"),
("DejaVu Sans,Andagii,WenQuanYi Zen Hei", "DejaVu Sans,Andagii,WenQuanYi Zen Hei,Junicode"),
# switch fonts for CJK translations
# added in 1.9.13+svn
("wqy-zenhei.ttc","DroidSansJapanese.ttf,DroidSansFallbackFull.ttf"),
("WenQuanYi Zen Hei","Droid Sans Japanese,Droid Sans Fallback"),
# Fix apostrophes
("SHOW ALL allies'", "SHOW ALL allies’"),
("HIDE ALL allies'", "HIDE ALL allies’"),
# Fix capitalization
("Joystick: number of the cursor x-axis joystick", "Joystick: number of the cursor X-axis joystick"),
# Fix quotation marks
("The nickname '$nick'", "The nickname ‘$nick’"),
),
"wesnoth-anl" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("4p - A New Land", "4p — A New Land"),
("some underground mushroom mines nearby -", "some underground mushroom mines nearby —"),
("A New Land - Help", "A New Land — Help"),
("Our talks are complete -", "Our talks are complete —"),
("some spider's nests in", "some spiders’ nests in"),
("the spider's web. Maybe", "the spider’s web. Maybe"),
("our chances wouldn't nearly be", "our chances wouldn’t nearly be"),
("reduce the enemy's income by", "reduce the enemy’s income by"),
("of the 'Elvish' style villages", "of the ‘Elvish’ style villages"),
("Let's cut you free!", "Let’s cut you free!"),
("'s farms now produce", "’s farms now produce"),
("'s mines now produce", "’s mines now produce"),
),
"wesnoth-aoi" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("was easy to follow - a wide", "was easy to follow — a wide"),
("unmistakable - tree stumps", "unmistakable — tree stumps"),
("question remained - would he", "question remained — would he"),
("this scenario - you must", "this scenario — you must"),
("worse - an orcish", "worse — an orcish"),
# Straight apostrophes and quotes to curly ones
# conversion added in 1.9.0-svn
("Wesmere's furthermost-faring scouts.", "Wesmere’s furthermost-faring scouts."),
("Two days' travel later, the forward scouts", "Two days’ travel later, the forward scouts"),
("Use Elven Scouts and Linaera's power of teleportation", "Use Elven Scouts and Linaera’s power of teleportation"),
("It's of human design...but we", "It’s of human design...but we"),
("Do it. We'll have a guest soon.", "Do it. We’ll have a guest soon."),
("Lord... I'm... I am filled with grief.", "Lord... I’m... I am filled with grief."),
("else we'll never have peace again.", "else we’ll never have peace again."),
("We will come in numbers... (*cough*) You can't imagine..", "We will come in numbers... (*cough*) You can’t imagine.."),
("I'll be waiting... Among the dead...", "I’ll be waiting... Among the dead..."),
("It's done, lord. No-one escaped. No-one", "It’s done, lord. No-one escaped. No-one"),
("we won't find much forage on the march.", "we won’t find much forage on the march."),
("We can't carry on Lord, the men are to tired.", "We can’t carry on Lord, the men are to tired."),
("we'll try again when reinforcements arrive.", "we’ll try again when reinforcements arrive."),
("planning an invasion, I'm sure of it.", "planning an invasion, I’m sure of it."),
("The sun's fully over the horizon.", "The sun’s fully over the horizon."),
("We'll rest a bit on the other side;", "We’ll rest a bit on the other side;"),
("they won't take long to rally.", "they won’t take long to rally."),
("No! This can't be!", "No! This can’t be!"),
("We can't get through, my Lord.", "We can’t get through, my Lord."),
("Never in my life did I dream I'd be bested by mere trolls.", "Never in my life did I dream I’d be bested by mere trolls."),
("We'll wait for reinforcements.", "We’ll wait for reinforcements."),
("Haldric's", "Haldric’s"),
("believe it's a", "believe it’s a"),
("since then they've been", "since then they’ve been"),
("It's hopeless; we've tried everything, and they're still coming back.", "It’s hopeless; we’ve tried everything, and they’re still coming back."),
("There's", "There’s"),
("we're", "we’re"),
("Lord Erlornas didn't drive", "Lord Erlornas didn’t drive"),
("I've been bested, but the combat wasn't fair", "I’ve been bested, but the combat wasn’t fair"),
("I'll have some answers", "I’ll have some answers"),
("let's focus on the task at hand", "let’s focus on the task at hand"),
("We don't want any more undesirables", "We don’t want any more undesirables"),
("Lord... I'm... I am filled with grief", "Lord... I’m... I am filled with grief"),
("else we'll never have peace again", "else we’ll never have peace again"),
("You can't imagine", "You can’t imagine"),
("I'll be waiting", "I’ll be waiting"),
("It's done, lord. No-one escaped. No-one tried to escape. I'm... disturbed", "It’s done, lord. No-one escaped. No-one tried to escape. I’m... disturbed"),
("we'll move out at dawn", "we’ll move out at dawn"),
("we won't find much forage", "we won’t find much forage"),
("We can't carry on Lord", "We can’t carry on Lord"),
("we'll try again when reinforcements arrive", "we’ll try again when reinforcements arrive"),
("I'm sure of it", "I’m sure of it"),
("The sun's fully over the horizon", "The sun’s fully over the horizon"),
("We'll rest a bit on the other side", "We’ll rest a bit on the other side"),
("they won't take long to rally", "they won’t take long to rally"),
("No! This can't be!", "No! This can’t be!"),
("We can't get through", "We can’t get through"),
("I dream I'd be bested", "I dream I’d be bested"),
("We'll wait for reinforcements", "We’ll wait for reinforcements"),
("not frequented even by Wesmere's", "not frequented even by Wesmere’s"),
("Two days' travel later", "Two days’ travel later"),
("Linaera's power", "Linaera’s power"),
("It's of human design", "It’s of human design"),
("We'll have a guest soon", "We’ll have a guest soon"),
("Without Linaera's help", "Without Linaera’s help"),
("The Ka'lian has deliberated", "The Ka’lian has deliberated"),
("they're tired and afraid", "they’re tired and afraid"),
("I'm... disturbed", "I’m... disturbed"),
# Fixed spelling
("no a chance to issue", "not a chance to issue"),
("the men are to tired", "the men are too tired"),
# Added a missing "a"
("but temporary solution", "but a temporary solution"),
# Added a missing comma
("Soon after Erlornas died the elven party", "Soon after Erlornas died, the elven party"),
),
"wesnoth-did" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("A Small Favor -", "A Small Favor —"),
("running away - my horsemen", "running away — my horsemen"),
# Convert straight apostrophes/quotation marks
# conversion added in 1.9.0-svn
("Kreg'a'shar", "Kreg’a’shar"),
("Parthyn's", "Parthyn’s"),
("orcs'", "orcs’"),
("'Allow me to introduce", "“Allow me to introduce"),
("town for a few days.'", "town for a few days.”"),
("'Surely you know that", "“Surely you know that"),
("only in dark magic.'", "only in dark magic.”"),
("You won't truly banish", "You won’t truly banish"),
("I've no wish to", "I’ve no wish to"),
("you've come", "you’ve come"),
("I won't spare", "I won’t spare"),
("Three days' travel", "Three days’ travel"),
("T'shar", "T’shar"),
("Don't say", "Don’t say"),
("it's ridiculous", "it’s ridiculous"),
("I don't see any. Maybe it's", "I don’t see any. Maybe it’s"),
("'zed'", "‘zee’"), # Use the American spelling; philosopher's quotes are being used here
("So, I've finally", "So, I’ve finally"),
("he's threatening", "he’s threatening"),
("It's time he learned", "It’s time he learned"),
("I've been itching", "I’ve been itching"),
("I'm ready", "I’m ready"),
("transformation they'll begin", "transformation they’ll begin"),
("I won't go down", "I won’t go down"),
("I won't see them", "I won’t see them"),
("orc's", "orc’s"),
("'The spells of necromancy", "“The spells of necromancy"),
("spirit world.'", "spirit world.”"),
("'To become a lich, one must first die.'", "“To become a lich, one must first die.”"),
("Malin's", "Malin’s"),
("I've just got", "I’ve just got"),
("We'll see", "We’ll see"),
("when they didn't", "when they didn’t"),
("You can't", "You can’t"),
("What's in it", "What’s in it"),
("Karres's", "Karres’s"),
("Let's get", "Let’s get"),
("bats won't stand", "bats won’t stand"),
("I'm eager to", "I’m eager to"),
("if ye dinna' want tae be a walking pile o'", "if ye dinna’ want tae be a walking pile o’"),
("they don't understand", "they don’t understand"),
("I've got the rest", "I’ve got the rest"),
("Gron'r Hronk", "Gron’r Hronk"),
("K'rrlar Oban", "K’rrlar Oban"),
("doesn't look very", "doesn’t look very"),
("lake's", "lake’s"),
("'They are quite useful in battle,'", "“They are quite useful in battle,”"),
("'but none of them have even a tenth of your potential power.'", "“but none of them have even a tenth of your potential power.”"),
("P'Gareth", "P’Gareth"),
("K'Vark", "K’Vark"),
("he's escaping", "he’s escaping"),
("Drogan's", "Drogan’s"),
("'A life curse goes beyond a joke,'", "“A life curse goes beyond a joke,”"),
("'Poor judgment,'", "“Poor judgment,”"),
("I'll have my", "I’ll have my"),
("'For your final test", "“For your final test"),
("retrieving a book,'", "retrieving a book,”"),
("'The book was", "“The book was"),
("it from me.'", "it from me.”"),
("'They are no", "“They are no"),
("twice now.'", "twice now.”"),
("'Excellent. We travel", "“Excellent. We travel"),
("book inside.'", "book inside.”"),
("Mage Lord's", "Mage Lord’s"),
("mage lord's", "mage lord’s"),
("won't hold back", "won’t hold back"),
("We've got", "We’ve got"),
("you aren't leaving", "you aren’t leaving"),
("now you've given", "now you’ve given"),
("you've got", "you’ve got"),
("humankind's", "humankind’s"),
("I'm not ready to die", "I’m not ready to die"),
# Fix screw up
# conversion added in 1.9.0+svn
("‘The book was", "“The book was"),
("DID", "DiD"),
),
"wesnoth-dm" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("warn you - a party", "warn you — a party"),
("each other - and you'll", "each other — and you’ll"),
("Night is falling - that's", "Night is falling — that’s"),
("work by now - I did not", "work by now — I did not"),
("seeking you - you see", "seeking you — you see"),
("Of course - do you", "Of course — do you"),
("Knalga - the rumor", "Knalga — the rumor"),
("Worse news - the", "Worse news — the"),
("been to the west - will the", "been to the west — will the"),
("the dead - should", "the dead — should"),
("Illuven - lesser", "Illuven — lesser"),
("need protection - cost", "need protection — cost"),
("No thanks - we'll manage by ourselves...", "No thanks — we’ll manage by ourselves..."),
("Let's move on - the less", "Let’s move on — the less"),
("We should camp for the night now - we", "We should camp for the night now — we"),
("Those standing stones - they summon", "Those standing stones — they summon"),
("possible - I want to get us", "possible — I want to get us"),
("they are woses - tree guardians", "they are woses — tree guardians"),
("no alternative - we must get", "no alternative — we must get"),
("things in Wesnoth - we must fight", "things in Wesnoth — we must fight"),
("stirred on the island -", "stirred on the island —"),
("see my greatest achievement - an", "see my greatest achievement — an"),
("must be informed immediately -", "must be informed immediately —"),
("This forest looks quiet - too quiet.", "This forest looks quiet — too quiet."),
("No - you can't be dead!", "No — you can’t be dead!"),
("of our help too - this", "of our help too — this"),
# Fix screw up
# conversion added in 1.9.0-svn
("each other — and you'll", "each other — and you’ll"),
("Night is falling — that's", "Night is falling — that’s"),
("No thanks — we'll manage by ourselves...", "No thanks — we’ll manage by ourselves..."),
("Let's move on — the less", "Let’s move on — the less"),
("No — you can't be dead!", "No — you can’t be dead!"),
# Correct capitalization
# conversion added in 1.9.0-svn
("Clash at the manor", "Clash at the Manor"),
("Shadows in the dark", "Shadows in the Dark"),
("Face of the enemy", "Face of the Enemy"),
# Straight apostrophes and quotes to curly ones
# conversion added in 1.9.0-svn
("Delfador's Memoirs", "Delfador’s Memoirs"),
("'The Great'", "“The Great”"),
("Don't die!", "Don’t die!"),
("Methor's", "Methor’s"),
("I don't like", "I don’t like"),
("I've told you", "I’ve told you"),
("father's", "father’s"),
("After a night's rest", "After a night’s rest"),
("And if it's archers you need", "And if it’s archers you need"),
("Leollyn's", "Leollyn’s"),
("king's", "king’s"),
("Lionel's", "Lionel’s"),
("I'm honored that", "I’m honored that"),
("Here's", "Here’s"),
("It's been a pleasure", "It’s been a pleasure"),
("You'll", "You’ll"),
("I think that's all", "I think that’s all"),
("a reward for Delfador's bravery, I am", "a reward for Delfador’s bravery, I am"),
("a trace of Iliah-Malal's way between", "a trace of Iliah-Malal’s way between"),
("A wizard's staff of power.", "A wizard’s staff of power."),
("am Ulrek, chieftain o' the clan of", "am Ulrek, chieftain o’ the clan of"),
("aware of the King's", "aware of the King’s"),
("barbarian for you. We're", "barbarian for you. We’re"),
("be left in Iliah-Malal's hands.", "be left in Iliah-Malal’s hands."),
("been sent at Asheviere's", "been sent at Asheviere’s"),
("book's help, it will", "book’s help, it will"),
("But as Delfador's meditation deepened, and", "But as Delfador’s meditation deepened, and"),
("But the elder mage's health was taxed", "But the elder mage’s health was taxed"),
("called 'Heir to the Throne'. In it is", "called ‘Heir to the Throne’. In it is"),
("comrades' screams as they", "comrades’ screams as they"),
("Delfador became Garard II's", "Delfador became Garard II’s"),
("Delfador's head. Had that", "Delfador’s head. Had that"),
("Delfador's last great quest,", "Delfador’s last great quest,"),
("Delfador's troop, hurrying west", "Delfador’s troop, hurrying west"),
("Delfador's wandering time with", "Delfador’s wandering time with"),
("enemy. I'm sure they'll be no match", "enemy. I’m sure they’ll be no match"),
("evil in the reader's", "evil in the reader’s"),
("final blow, for Iliah-Malal's un-", "final blow, for Iliah-Malal’s un-"),
("final blow, for Iliah-Malal's un-life must", "final blow, for Iliah-Malal’s un-life must"),
("Following the alliance's victory and the", "Following the alliance’s victory and the"),
("Garard that way! 'Suffer our", "Garard that way! “Suffer our"),
("go against the King's", "go against the King’s"),
("had already in Delfador's time been a", "had already in Delfador’s time been a"),
("his portal before it's too late, and", "his portal before it’s too late, and"),
("hospitality? I'm lost, and freezing", "hospitality? I’m lost, and freezing"),
("how to close Iliah-Malal's portal to the", "how to close Iliah-Malal’s portal to the"),
("Human, ye ha' fought well. I", "Human, ye ha’ fought well. I"),
("Iliah-Malal's forces. But the", "Iliah-Malal’s forces. But the"),
("in having the King's ear; there was", "in having the King’s ear; there was"),
("It's a long story...", "It’s a long story..."),
("It's hopeless, I've lost all track", "It’s hopeless, I’ve lost all track"),
("It's the only way!", "It’s the only way!"),
("Lionel, the King's most trusted general,", "Lionel, the King’s most trusted general,"),
("Malal's army and Weldyn....", "Malal’s army and Weldyn..."),
("Malal's portal! All is", "Malal’s portal! All is"),
("necromancer's footsteps in a", "necromancer’s footsteps in a"),
("night like that! Let's leave this evil", "night like that! Let’s leave this evil"),
("not obey the King's order.", "not obey the King’s order."),
("one of the garrison's message-riders to him,", "one of the garrison’s message-riders to him,"),
("portal. But with Iliah-Malal's army at large", "portal. But with Iliah-Malal’s army at large"),
("rooted in the land's", "rooted in the land’s"),
("say my kin ha' been ungrateful for", "say my kin ha’ been ungrateful for"),
("some of the Book's least dangerous secrets", "some of the Book’s least dangerous secrets"),
("speak with the King's voice!", "speak with the King’s voice!"),
("still have the son's ear, though being", "still have the son’s ear, though being"),
("Sythan's village.", "Sythan’s village."),
("tale called the 'Legend of", "tale called the ‘Legend of"),
("than I thought. Iliah-Malal's offensive has begun.", "than I thought. Iliah-Malal’s offensive has begun."),
("thank you. Now let's", "thank you. Now let’s"),
("The blow interrupted Delfador's meditation. He began", "The blow interrupted Delfador’s meditation. He began"),
("The book's curse is already", "The book’s curse is already"),
("the road...especially where you'll be", "the road... especially where you’ll be"),
("think so. But...Asheviere, Garard's queen and Eldred's mother,", "think so. But... Asheviere, Garard’s queen and Eldred’s mother,"),
("to the north ye'd need to have ta'en, along the", "to the north ye’d need to have ta’en, along the"),
("to the northeast; I'll lead you there.", "to the northeast; I’ll lead you there."),
("to visit her family's demesne and her", "to visit her family’s demesne and her"),
("unseen they reached Asheviere's family demesne.", "unseen they reached Asheviere’s family demesne."),
("wanna cross it, you'll", "wanna cross it, you’ll"),
("We ha' seen those undead", "We ha’ seen those undead"),
("Wesmere'. Prolonged in life", "Wesmere’. Prolonged in life"),
("with orcs now. We'll take", "with orcs now. We’ll take"),
("wrath' indeed... I'll show him wrath!", "wrath” indeed... I’ll show him wrath!"),
("you, but I don't feel very fated. In fact, I've", "you, but I don’t feel very fated. In fact, I’ve"),
),
"wesnoth-dw" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("hearten the folk. And -", "hearten the folk. And —"),
("if you will permit - you", "if you will permit — you"),
("a week ago - wanted", "a week ago — wanted"),
("disturbing that a Kai - and", "disturbing that a Kai — and"),
("- would run here", "— would run here"),
("to be survive.", "to survive."),
# Straight quotes to curly quotes
# conversion added in 1.9.0+svn
("owner's", "owner’s"),
("I'll try on the ring.", "I’ll try on the ring."),
("I'll take this ring", "I’ll take this ring"),
("were many more on the way. I'm", "were many more on the way. I’m"),
("kai's", "kai’s"),
("Laudiss's", "Laudiss’s"),
("'Lord Ravanal'.", "“Lord Ravanal”."),
("Krellis'", "Krellis’"),
("Mal-Ravanal's Forces", "Mal-Ravanal’s Forces"),
("I've never even met", "I’ve never even met"),
("it's licking my hand", "it’s licking my hand"),
("It's kind of cute", "It’s kind of cute"),
("Aren't you going to kill", "Aren’t you going to kill"),
("Tyegëa's", "Tyegëa’s"),
("Let's hope those", "Let’s hope those"),
("It's a storm", "It’s a storm"),
("I'll take it", "I’ll take it"),
("I'll just leave", "I’ll just leave"),
("Don't let him go too", "Don’t let him go too"),
("I wouldn't do it", "I wouldn’t do it"),
("you're helping me", "you’re helping me"),
("He's one", "He’s one"),
("We don't want", "We don’t want"),
("we'll cut", "we’ll cut"),
("You're still squatting", "You’re still squatting"),
("Now you're in", "Now you’re in"),
("here's a yummy", "here’s a yummy"),
("Don't kill me", "Don’t kill me"),
("I've been so lonely", "I’ve been so lonely"),
("But he's tame", "But he’s tame"),
("He'll do what you", "He’ll do what you"),
("He's a really nice", "He’s a really nice"),
("however, and the residents didn't", "however, and the residents didn’t"),
("Cylanna's", "Cylanna’s"),
("Maudin's", "Maudin’s"),
("Imirna's", "Imirna’s"),
("Tyegëa's", "Tyegëa’s"),
("I think something's", "I think something’s"),
("aren't any more", "aren’t any more"),
("to FIND it isn't the", "to FIND it isn’t the"),
("he's dead of course", "he’s dead of course"),
("Caladon's", "Caladon’s"),
("I'VE COME FOR YOUR", "I’VE COME FOR YOUR"),
("if it doesn't want", "if it doesn’t want"),
("I don't know if that", "I don’t know if that"),
("It's MINE!", "It’s MINE!"),
("it's true that you helped", "it’s true that you helped"),
("I won't kill you.", "I won’t kill you."),
("I don't trust you,", "I don’t trust you,"),
("You haven't left yet? My patience", "You haven’t left yet? My patience"),
("I'll carry", "I’ll carry"),
("This doesn't look too bad.", "This doesn’t look too bad."),
("Bring 'em on!", "Bring ’em on!"),
("pendant on it. I'm sure the shape", "pendant on it. I’m sure the shape"),
("be useful. I'll take it.", "be useful. I’ll take it."),
("destroying Mal-Ravanal's henchmen", "destroying Mal-Ravanal’s henchmen"),
("I can't swim, so", "I can’t swim, so"),
("don't like fish", "don’t like fish"),
("And don't touch the ships", "And don’t touch the ships"),
("I'll give you my", "I’ll give you my"),
("She has a reason. Don't", "She has a reason. Don’t"),
("but he hasn't really cared", "but he hasn’t really cared"),
("I'm sure many townsfolk would", "I’m sure many townsfolk would"),
("It doesn't seem to have helped its previous owner. I don't want it.", "It doesn’t seem to have helped its previous owner. I don’t want it."),
("Let's hope those wild wolves", "Let’s hope those wild wolves"),
("don't get to the feast", "don’t get to the feast"),
# Spelling fix in 1.9.0+svn @ 46436
("truely formidible", "truly formidable"),
# Convert fake em dash
("A king who is skilled at combat commands great respect--especially", "A king who is skilled at combat commands great respect — especially"),
# Convert straight apostrophe
("I'll join you right now!", "I’ll join you right now!"),
# Add missing diaeresis
("Teeloa returned to his home", "Teelöa returned to his home"),
# Fix spelling
("I beleive it is undead", "I believe it is undead"),
# Convert straight apostrophe
("But it isn't attacking <i>me</i>.", "But it isn’t attacking <i>me</i>."),
),
"wesnoth-ei" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("Wait, before we go anywhere - who", "Wait, before we go anywhere — who"),
("This adept is weak - we may", "This adept is weak — we may"),
("onward - we must defeat", "onward — we must defeat"),
("playing a sick game - whenever", "playing a sick game — whenever"),
("to take care of - we must", "to take care of — we must"),
("to help us - but the orcs", "to help us — but the orcs"),
("It looks repairable - we", "It looks repairable — we"),
("Run for your - what the", "Run for your — what the"),
# ... and aint -> ain't
# conversion added in 1.9.0-svn
("I aint charging gold -", "I ain’t charging gold —"),
("'T'aint safe", "’T’ain’t safe"),
# Make it unspaced...
# conversion added in 1.9.0-svn
("may be able to help us in - ", "may be able to help us in—"),
("Wait - what just happened?", "Wait — what just happened?"),
# Fix screw up
# conversion added in 1.9.0-svn
("I ain't charging gold —", "I ain’t charging gold —"),
("'T'ain't safe", "’T’ain’t safe"),
("author of Wesnoth's troubles.", "author of Wesnoth’s troubles."),
("Aye, we'll help ye, for", "Aye, we’ll help ye, for"),
("by the orcs fa' sure!", "by the orcs fa’ sure!"),
("c'n stop runnin'...", "c’n stop runnin’..."),
("can get to Weld'n an' all so I", "can get to Weld’n an’ all so I"),
("can't get across. It'll help you to", "can’t get across. It’ll help you to"),
("caves for centuries. We'll no' be scattered now", "caves for centuries. We’ll no’ be scattered now"),
("Commander of the King's Horse. And take", "Commander of the King’s Horse. And take"),
("Deal. I c'n blow'er up once I", "Deal. I c’n blow’er up once I"),
("go north-east. I'll go north-west.", "go north-east. I’ll go north-west."),
("Gweddry's men retreated swiftly", "Gweddry’s men retreated swiftly"),
("Gweddry, I don't think you should", "Gweddry, I don’t think you should"),
("have crossed. Now let's see if we", "have crossed. Now let’s see if we"),
("He says: 'Well done; you have", "He says: “Well done; you have"),
("He will reach Mal-Ravanal's capital, and I", "He will reach Mal-Ravanal’s capital, and I"),
("his terms? I won't accept anything obviously", "his terms? I won’t accept anything obviously"),
("I'll be followin' yah from now", "I’ll be followin’ yah from now"),
("I'm an enginea'. I s'pect you'll have a need", "I’m an enginea’. I s’pect you’ll have a need"),
("It doesn't matter anyway. The", "It doesn’t matter anyway. The"),
("It doesn't matter. They are", "It doesn’t matter. They are"),
("look impassable. I don't think we will", "look impassable. I don’t think we will"),
("lord's hordes at dawn", "lord’s hordes at dawn"),
("may know the undead's weakness.", "may know the undead’s weakness."),
("My clan ha' lived in these", "My clan ha’ lived in these"),
("my eq'pment is. But ev'ryone not ov'r theah on that", "my eq’pment is. But ev’ryone not ov’r theah on that"),
("no refuge for Gweddry's weary men, and", "no refuge for Gweddry’s weary men, and"),
("No, I don't think so. I", "No, I don’t think so. I"),
("not defeat my Lord's minions, it was", "not defeat my Lord’s minions, it was"),
("point north of Glyn's Forest. If we", "point north of Glyn’s Forest. If we"),
("seems, well merited. Mal-Ravanal's", "seems, well merited. Mal-Ravanal’s"),
("serve in my Master's undead hordes!", "serve in my Master’s undead hordes!"),
("services. I bet you're", "services. I bet you’re"),
("several weeks, and Gweddry's men started to", "several weeks, and Gweddry’s men started to"),
("The Bitter Swamp's ill reputation is,", "The Bitter Swamp’s ill reputation is,"),
("the fate of Gweddry's band, but all", "the fate of Gweddry’s band, but all"),
("the men of Gweddry's troop that they", "the men of Gweddry’s troop that they"),
("These people won't listen to reason,", "These people won’t listen to reason,"),
("they can't escape until our", "they can’t escape until our"),
("this river. Now let's go!", "this river. Now let’s go!"),
("those orcs are chasin' you, and if", "those orcs are chasin’ you, and if"),
("to keep the Crown's", "to keep the Crown’s"),
("to know what you're talking about... but,", "to know what you’re talking about... but,"),
("to that signpost ov'r dere. That’s where", "to that signpost ov’r dere. That’s where"),
("undead. But we can't help you until", "undead. But we can’t help you until"),
("undead. ’T’ain’t safe 'round 'ere!", "undead. ’T’ain’t safe ’round ’ere!"),
("up that bridge ov'r theah.", "up that bridge ov’r theah."),
("Very well, we'll protect you from", "Very well, we’ll protect you from"),
("We didn't capture enough of", "We didn’t capture enough of"),
("We're in the land", "We’re in the land"),
("well once the King's forces arrived. There", "well once the King’s forces arrived. There"),
("Well, we didn't have to blow", "Well, we didn’t have to blow"),
("when it blows w'll", "when it blows w’ll"),
("Why don't we see if", "Why don’t we see if"),
("won't be able to", "won’t be able to"),
("work yet complete. Mal-Ravanal's main host has", "work yet complete. Mal-Ravanal’s main host has"),
("year of Konrad II's reign, and there", "year of Konrad II’s reign, and there"),
("your name.'", "your name.”"),
),
"wesnoth-httt" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("and the support of my men - from", "and the support of my men — from"),
("NE - Dan'Tonk", "NE — Dan’Tonk"),
("SE - Fort Tahn", "SE — Fort Tahn"),
("The Valley of Death - The Princess's Revenge", "The Valley of Death — The Princess’s Revenge"),
("the hills - there are undead about!", "the hills — there are undead about!"),
("those gryphon eggs - they", "those gryphon eggs — they"),
("- Delfador's insistence", "— Delfador’s insistence"),
("Look - orcs are", "Look — orcs are"),
("A frail human - or worse, an elf -", "A frail human — or worse, an elf —"),
("out to the heir - I", "out to the heir — I"),
("gruesome sight - a fleet", "gruesome sight — a fleet"),
("introduce myself - I", "introduce myself — I"),
("my warning - prepare", "my warning — prepare"),
("princess - the heiress", "princess — the heiress"),
("don't try to fight us - you", "don’t try to fight us — you"),
("Princess Li'sar - here?", "Princess Li’sar — here?"),
("Look - you can", "Look — you can"),
("century - a generation", "century — a generation"),
("vast human army - his", "vast human army — his"),
#("mother's", "mother’s"),
#("'Fight no one great", "“Fight no one great"),
#("from his shoulders.'", "from his shoulders.”"),
#("You'd", "You’d"),
# Fix screw up
# conversion added in 1.9.0-svn
("NE — Dan'Tonk", "NE — Dan’Tonk"),
("The Valley of Death — The Princess's Revenge", "The Valley of Death — The Princess’s Revenge"),
("— Delfador's insistence", "— Delfador’s insistence"),
("don't try to fight us — you", "don’t try to fight us — you"),
("Princess Li'sar — here?", "Princess Li’sar — here?"),
# Convert straight apostrophes and quotation marks
# conversion added in 1.9.0+svn
("Don't die, Delfador!", "Don’t die, Delfador!"),
("city's finest defenders", "city’s finest defenders"),
("Usadar Q'kai", "Usadar Q’kai"),
("Can't we strike", "Can’t we strike"),
("Managa'Gwin", "Managa’Gwin"),
("up, worm! I'm the boss!", "up, worm! I’m the boss!"),
("'em!", "’em!"),
("I hope he's safe!", "I hope he’s safe!"),
("you, Delfador? You're coming with", "you, Delfador? You’re coming with"),
("Delfador! I can't do it", "Delfador! I can’t do it"),
("days' travel if", "days’ travel if"),
("with me, aren't you, Delfador?", "with me, aren’t you, Delfador?"),
("only six days' march up", "only six days’ march up"),
("Muff Malal's Peninsula", "Muff Malal’s Peninsula"),
("Let's hope these", "Let’s hope these"),
("It's a shame", "It’s a shame"),
("the Elensefar Thieves' Guild. We", "the Elensefar Thieves’ Guild. We"),
("if you didn't trust us,", "if you didn’t trust us,"),
("across from Elensefar's docks.", "across from Elensefar’s docks."),
("from the city's northern gate.", "from the city’s northern gate."),
("flag if it's dark?", "flag if it’s dark?"),
("Let's expel these", "Let’s expel these"),
("banks of Elensefar's port district,", "banks of Elensefar’s port district,"),
("To Konrad's surprise, they", "To Konrad’s surprise, they"),
("across the river's mouth.", "across the river’s mouth."),
("with the Ka'lian, the Great", "with the Ka’lian, the Great"),
("but surely Asheviere's men will", "but surely Asheviere’s men will"),
("of Fire? What's that?", "of Fire? What’s that?"),
("Wesmere, for Asheviere's orcs have", "Wesmere, for Asheviere’s orcs have"),
("The queen's agents were", "The queen’s agents were"),
("Elensefar's rescue had", "Elensefar’s rescue had"),
("it was, Konrad's army avoided", "it was, Konrad’s army avoided"),
("important to Asheviere's", "important to Asheviere’s"),
("Dan'Tonk, we are", "Dan’Tonk, we are"),
("Force Li'sar's surrender (reduce", "Force Li’sar’s surrender (reduce"),
("Elmar's Crossing", "Elmar’s Crossing"),
("\"Li'sar\"", "\"Li’sar\""),
("the princess, Li'sar. The daughter", "the princess, Li’sar. The daughter"),
("I'm no impostor.", "I’m no impostor."),
("I surrender! Don't hurt me,", "I surrender! Don’t hurt me,"),
("said before I'm not an", "said before I’m not an"),
("near, impostor! I'll not be", "near, impostor! I’ll not be"),
("I'll seize their", "I’ll seize their"),
("You've discovered my", "You’ve discovered my"),
("ploy! It doesn't matter, I'll still rout", "ploy! It doesn’t matter, I’ll still rout"),
("foiled, but I'll still flush", "foiled, but I’ll still flush"),
("Well, let's start climbing", "Well, let’s start climbing"),
("the mother Gryphon's eggs!", "the mother Gryphon’s eggs!"),
("circling the mountain's peak. Several", "circling the mountain’s peak. Several"),
("glared in Konrad's direction, their", "glared in Konrad’s direction, their"),
("by the river's edge!", "by the river’s edge!"),
("Isn't there somewhere", "Isn’t there somewhere"),
("Wesnoth. We can't let them", "Wesnoth. We can’t let them"),
("tore at Konrad's army, signaling", "tore at Konrad’s army, signaling"),
("Halgar Du'nar", "Halgar Du’nar"),
("Eventually Delfador's insistence that", "Eventually Delfador’s insistence that"),
("Who... who's here?", "Who... who’s here?"),
("been? I haven't seen you", "been? I haven’t seen you"),
("We can't get in!", "We can’t get in!"),
("But Konrad's party was", "But Konrad’s party was"),
("It's so dark", "It’s so dark"),
("'Never done us any harm?' Why, I", "‘Never done us any harm?’ Why, I"),
("haven't had such", "haven’t had such"),
("What's this? Gryphons", "What’s this? Gryphons"),
("your anger. Relgorn's manner may", "your anger. Relgorn’s manner may"),
("'Guest quarters'.", "‘Guest quarters’."),
("My uncle's family drowned", "My uncle’s family drowned"),
("What's this, more", "What’s this, more"),
("Some sort o' monster's appendage set", "Some sort o’ monster’s appendage set"),
("I was brawlin' with these", "I was brawlin’ with these"),
("stink, and I'm a", "stink, and I’m a"),
("my drift. Canna' swim past", "my drift. Canna’ swim past"),
("no' for lack o' tryin'.", "no’ for lack o’ tryin’."),
("around here. Let's go.", "around here. Let’s go."),
("last outpost o' civilization in", "last outpost o’ civilization in"),
("sway. You’ll no'", "sway. You’ll no’"),
("Northeast o' my keep,", "Northeast o’ my keep,"),
("Princess. Now let's continue", "Princess. Now let’s continue"),
("I can't believe it", "I can’t believe it"),
("Konrad or Li'sar", "Konrad or Li’sar"),
("An earthquake! We'll be trapped!", "An earthquake! We’ll be trapped!"),
("from the ruby's inner fire.", "from the ruby’s inner fire."),
("old wizard's normally inscrutable", "old wizard’s normally inscrutable"),
("Unan-Ka'tall", "Unan-Ka’tall"),
("thinking... I don't really want", "thinking... I don’t really want"),
("in my people's ancient tongue.", "in my people’s ancient tongue."),
("'The River of Bones'. Great and", "‘The River of Bones’. Great and"),
("come under Konrad's leadership, and", "come under Konrad’s leadership, and"),
("on Queen Asheviere's ill-", "on Queen Asheviere’s ill-"),
("belonged to Ila'alion, a mighty", "belonged to Ila’alion, a mighty"),
("Ila'alion's aid as", "Ila’alion’s aid as"),
("do to one's soul? What", "do to one’s soul? What"),
("became my mother's most trusted", "became my mother’s most trusted"),
("mother's rule, but", "mother’s rule, but"),
("my brothers. Asheviere's masterwork of", "my brothers. Asheviere’s masterwork of"),
("of the Lich's once-mortal", "of the Lich’s once-mortal"),
("of the Lich's", "of the Lich’s"),
("Aimucasur's lordship over", "Aimucasur’s lordship over"),
("does to one's soul? What", "does to one’s soul? What"),
("weeks past. We've found no", "weeks past. We’ve found no"),
("Haven't you found", "Haven’t you found"),
("I don't recommend it.", "I don’t recommend it."),
("am Princess Li'sar, the only", "am Princess Li’sar, the only"),
("Konrad and Li'sar)", "Konrad and Li’sar)"),
("Now when we've", "Now when we’ve"),
("I don't know. Maybe", "I don’t know. Maybe"),
("We drakes don't", "We drakes don’t"),
("go further we'll be forced", "go further we’ll be forced"),
("no choice. We'll fight our", "no choice. We’ll fight our"),
("We've found $random", "We’ve found $random"),
("Hurrah! We've killed their", "Hurrah! We’ve killed their"),
("the drakes haven't finished him", "the drakes haven’t finished him"),
("Princess Li'sar, do I", "Princess Li’sar, do I"),
("help him, I'll do my", "help him, I’ll do my"),
("am Princess Li'sar, and we", "am Princess Li’sar, and we"),
("Truly, I don't recommend entering", "Truly, I don’t recommend entering"),
("I've reached the", "I’ve reached the"),
("El'rien", "El’rien"),
("again! We don't know what", "again! We don’t know what"),
("was afraid that's what you", "was afraid that’s what you"),
("you, Lord El'rien. The hospitality", "you, Lord El’rien. The hospitality"),
("El'rien, we have", "El’rien, we have"),
("Elf, but I'm afraid I", "Elf, but I’m afraid I"),
("is rightfully Konrad's. You have", "is rightfully Konrad’s. You have"),
("Delfador! Li'sar has become", "Delfador! Li’sar has become"),
("friend. I don't want to", "friend. I don’t want to"),
("are mistaken. Li'sar is the", "are mistaken. Li’sar is the"),
("Li'sar should be", "Li’sar should be"),
("go with Li'sar.", "go with Li’sar."),
("Tell me, Li'sar, are you", "Tell me, Li’sar, are you"),
("been with Li'sar in the", "been with Li’sar in the"),
("dangerous! I didn't even think", "dangerous! I didn’t even think"),
("Kalenz's champion courtiers", "Kalenz’s champion courtiers"),
("Konrad's small band", "Konrad’s small band"),
("Konrad's heart leapt", "Konrad’s heart leapt"),
("the traitor Li'sar, with the", "the traitor Li’sar, with the"),
("Delfador, don't you think", "Delfador, don’t you think"),
("but it isn't to be.", "but it isn’t to be."),
("Konrad and Li'sar, to", "Konrad and Li’sar, to"),
("their forces! Let's see if", "their forces! Let’s see if"),
("knew that Asheviere's wrath would", "knew that Asheviere’s wrath would"),
("little prince's broken body,", "little prince’s broken body,"),
("if it weren't for her", "if it weren’t for her"),
("all of Asheviere's evil,", "all of Asheviere’s evil,"),
("this Li'sar is fit", "this Li’sar is fit"),
("mother. The land's blood is", "mother. The land’s blood is"),
("Li'sar strikes a", "Li’sar strikes a"),
("at last! Li'sar! You will", "at last! Li’sar! You will"),
("but I won't ever forget", "but I won’t ever forget"),
("was ended. Li'sar, daughter of", "was ended. Li’sar, daughter of"),
("Delfador became Li'sar's High Counselor,", "Delfador became Li’sar’s High Counselor,"),
("noble in Li'sar's court. He married Li'sar, and together", "noble in Li’sar’s court. He married Li’sar, and together"),
("is sleeping! You'd better not", "is sleeping! You’d better not"),
("reach it, Li'sar. I hope", "reach it, Li’sar. I hope"),
("know what I'm doing. Come,", "know what I’m doing. Come,"),
("of battle, Eldred's men turned", "of battle, Eldred’s men turned"),
("Garard's queen, Asheviere,", "Garard’s queen, Asheviere,"),
("than her husband's had.", "than her husband’s had."),
("But Garard's arch-mage, Delfador,", "But Garard’s arch-mage, Delfador,"),
("with his mother's advice", "with his mother’s advice"),
("'Fight no one", "“Fight no one"),
("from his shoulders.'", "from his shoulders.”"),
("the end Delfador's men were", "the end Delfador’s men were"),
("son's lifeless body,", "son’s lifeless body,"),
("the great mage's staff.", "the great mage’s staff."),
("news of Asheviere's orders reached", "news of Asheviere’s orders reached"),
("youngest of Garard's nephews, thereby", "youngest of Garard’s nephews, thereby"),
("sadly as Asheviere's reign of", "sadly as Asheviere’s reign of"),
# fix added in 1.10.0+svn
("Second, who you most", "Second, whom you most"),
),
"wesnoth-l" : (
# Typo fix at r44124
("devestating", "devastating"),
# Convert makeshift dashes and other stuff:
# conversion added in 1.9.0-svn
("are on the hunt - and", "are on the hunt — and"),
("and ruthlessness - and their", "and ruthlessness — and their"),
("of death - would only", "of death — would only"),
("my father - your grandfather - brought", "my father — your grandfather — brought"),
("catacombs - cover me.", "catacombs — cover me."),
("Liberty - Epilogue", "Liberty — Epilogue"),
("'If you wish to find us, head southwest. When you reach the land's end, just keep going. See you in the Three Sisters, old friend. - Relnan'", "“If you wish to find us, head southwest. When you reach the land’s end, just keep going. See you in the Three Sisters, old friend. ― Relnan”"),
# Fix screw up
# conversion added in 1.9.0-svn
("'If you wish to find us, head southwest. When you reach the land's end, just keep going. See you in the Three Sisters, old friend. — Relnan'", "“If you wish to find us, head southwest. When you reach the land’s end, just keep going. See you in the Three Sisters, old friend. ― Relnan”"),
# Straight apostrophes and quotes to curly ones
# conversion added in 1.9.0-svn
("By the sound of the wolves, the scent trail of Fal Khag's killers ends here.", "By the sound of the wolves, the scent trail of Fal Khag’s killers ends here."),
("But when the effects fall on mere peasants, the wrong of it may not seem so obvious from a noble's chair.", "But when the effects fall on mere peasants, the wrong of it may not seem so obvious from a noble’s chair."),
("Then they'll be back in force.", "Then they’ll be back in force."),
("Well, it won't be long until they report back to the local garrison with the details of your encounter.", "Well, it won’t be long until they report back to the local garrison with the details of your encounter."),
("In your many years as Delwyn's magistrate I have never known your judgment to err.", "In your many years as Delwyn’s magistrate I have never known your judgment to err."),
("there would be no help for the villagers, ground beneath the wheels of Asheviere's wrath.", "there would be no help for the villagers, ground beneath the wheels of Asheviere’s wrath."),
("After more than a week of fierce fighting, the main body of Asheviere's", "After more than a week of fierce fighting, the main body of Asheviere’s"),
("I think it's working!", "I think it’s working!"),
("It's done.", "It’s done."),
("I'm inside the tower! I'm going", "I’m inside the tower! I’m going"),
("I think... I think they're trying to storm Halstead itself... the fools!", "I think... I think they’re trying to storm Halstead itself... the fools!"),
("Baldras, Gwydion is Lord Maddock's son. This battle isn't just about Annuvin anymore.", "Baldras, Gwydion is Lord Maddock’s son. This battle isn’t just about Annuvin anymore."),
("They're in for a surprise.", "They’re in for a surprise."),
("They must think Wesnoth's army is spread out right now.", "They must think Wesnoth’s army is spread out right now."),
("I hope you're right.", "I hope you’re right."),
("I don't see how we can bring it down.", "I don’t see how we can bring it down."),
("the base of Halstead's frozen waves of stone.", "the base of Halstead’s frozen waves of stone."),
("from the earth's living rock.", "from the earth’s living rock."),
("338 years before Asheviere's betrayal,", "338 years before Asheviere’s betrayal,"),
("I'm the crazy one. Let's finish this folly.", "I’m the crazy one. Let’s finish this folly."),
("I haven't run this much in years.", "I haven’t run this much in years."),
("They've seen us, RUN!", "They’ve seen us, RUN!"),
("we're all in big trouble...", "we’re all in big trouble..."),
("so if we're careful enough we", "so if we’re careful enough we"),
("They're looking for us.", "They’re looking for us."),
("I'm not sure where we're supposed to go now.", "I’m not sure where we’re supposed to go now."),
("from sleep... we're not exactly sure.", "from sleep... we’re not exactly sure."),
("We don't. You have sought us out.", "We don’t. You have sought us out."),
("The former King's magic ministry", "The former King’s magic ministry"),
("It's not noble work, but our pursuits require plenty of gold.", "It’s not noble work, but our pursuits require plenty of gold."),
("Rest well tonight, because tomorrow's battle", "Rest well tonight, because tomorrow’s battle"),
("It's an ambush!", "It’s an ambush!"),
("I am seen so far from my Lord's borders.", "I am seen so far from my Lord’s borders."),
("It's this or nothing.", "It’s this or nothing."),
("You just said we can't beat their entire army!", "You just said we can’t beat their entire army!"),
("They mustn't take one step without", "They mustn’t take one step without"),
("What's worse is that she appears to", "What’s worse is that she appears to"),
("'We fled like criminals in the night, but we made sure that no one would harass us anymore.'", "“We fled like criminals in the night, but we made sure that no one would harass us anymore.”"),
("'Baldras, You would have been proud. We gave 'em hell. But in the end, it wasn't enough.'", "“Baldras, You would have been proud. We gave ’em hell. But in the end, it wasn’t enough.”"),
("with bitter humor he realized that Lord Maddock's men", "with bitter humor he realized that Lord Maddock’s men"),
("convinced Asheviere's second in command", "convinced Asheviere’s second in command"),
("As the main body of Asheviere's army", "As the main body of Asheviere’s army"),
("The spectacle of Halstead's destruction stunned them into", "The spectacle of Halstead’s destruction stunned them into"),
("Indeed, Asheviere's armies", "Indeed, Asheviere’s armies"),
("a second time, we're going to", "a second time, we’re going to"),
("I don't understand.", "I don’t understand."),
("Unless you want me to round up the city's", "Unless you want me to round up the city’s"),
("the King's son and betrayer.", "the King’s son and betrayer."),
("Queen's", "Queen’s"),
("the King's rule", "the King’s rule"),
("But we'll need to chase them all down if we're going to stop them.", "But we’ll need to chase them all down if we’re going to stop them."),
("And Harper... don't get yourself killed. I'm responsible for you now that your father is gone, and I won't dishonor his memory by breaking my promise to keep you safe.", "And Harper... don’t get yourself killed. I’m responsible for you now that your father is gone, and I won’t dishonor his memory by breaking my promise to keep you safe."),
("All right now, let's", "All right now, let’s"),
("They're getting closer", "They’re getting closer"),
("They'll come out when they see those riders, or us, approaching, but there aren't many of them... I wouldn't count on them being able to protect the village alone for long.", "They’ll come out when they see those riders, or us, approaching, but there aren’t many of them... I wouldn’t count on them being able to protect the village alone for long."),
("What's happening here?", "What’s happening here?"),
("Some mages are thrown out of the mage's", "Some mages are thrown out of the mage’s"),
("In the process they pick up a good deal of the Knight's", "In the process they pick up a good deal of the Knight’s"),
("take responsibility for the community's", "take responsibility for the community’s"),
("the village's wisest and most", "the village’s wisest and most"),
("but as Asheviere's grip", "but as Asheviere’s grip"),
("I don't want to touch it.", "I don’t want to touch it."),
("With bitter humor he realized that Lord Maddock's", "With bitter humor he realized that Lord Maddock’s"),
# Insert a diaeresis
("I am not so naive", "I am not so naïve"),
),
"wesnoth-low" : (
# Spelling fixes required at r44124
("engaged wit the defenders", "engaged with the defenders"),
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("follow you, Kalenz - but", "follow you, Kalenz — but"),
("Kalenz - lead us", "Kalenz — lead us"),
("them aid - it's clear", "them aid — it’s clear"),
# Fix screw up
# conversion added in 1.9.0-svn
("them aid — it's clear", "them aid — it’s clear"),
# Correct capitalization
# conversion added in 1.9.0-svn
("Breaking the siege", "Breaking the Siege"),
("Council ruling", "Council Ruling"),
("The Chief must die", "The Chief Must Die"),
("News from the front", "News from the Front"),
("Battle of the book", "Battle of the Book"),
("Council of hard choices", "Council of Hard Choices"),
("Ka'lian under attack", "Ka’lian under Attack"), # Also convert apostrophe to its curly form
("Hostile mountains", "Hostile Mountains"),
("Let's storm one of the orcs", "Let’s storm one of the orcs"),
("Let's see if ever the payment", "Let’s see if ever the payment"),
("Hasn't enough blood been", "Hasn’t enough blood been"),
("Don't dare to hurt her!", "Don’t dare to hurt her!"),
# Experimental conversion - apostrophes and em dashes
# Legend of Wesmere only
# Simons Mith, 23/08/2010, build 45968
("\"T'baran\"", "\"T’baran\""),
("Absolutely. Let's do it!", "Absolutely. Let’s do it!"),
("After the council's decision, Kalenz", "After the council’s decision, Kalenz"),
("and trolls dinna' go there", "and trolls dinna’ go there"),
("are but children's tales...still, be", "are but children’s tales...still, be"),
("are fresher; I'll go.", "are fresher; I’ll go."),
("at the Ka'lian took an", "at the Ka’lian took an"),
("behind me; I'll guard you", "behind me; I’ll guard you"),
("Believing that Wesmere's seeming safety", "Believing that Wesmere’s seeming safety"),
("both their mother's healing gifts", "both their mother’s healing gifts"),
("But Kalenz's story was", "But Kalenz’s story was"),
("But the Ka'lian's gold is the Ka'lian's. Would you", "But the Ka’lian’s gold is the Ka’lian’s. Would you"),
("by the council's passivity. But", "by the council’s passivity. But"),
("carry out Crelanu's dangerous but", "carry out Crelanu’s dangerous but"),
("Chieftain? But he's guarded better", "Chieftain? But he’s guarded better"),
("Cleodil's doubt is", "Cleodil’s doubt is"),
("council in Ka'lian and enlist", "council in Ka’lian and enlist"),
("Crelanu's book ...", "Crelanu’s book ..."),
("Crelanu's philter affecting", "Crelanu’s philter affecting"),
("Crelanu's place should", "Crelanu’s place should"),
("Dancer's Green", "Dancer’s Green"),
("different matter; I'm sure I", "different matter; I’m sure I"),
("do. I ha' heard tale", "do. I ha’ heard tale"),
("Don't flee. We", "Don’t flee. We"),
("Don't worry overmuch", "Don’t worry overmuch"),
("dose of Crelanu's invisibility philter. I'll go back", "dose of Crelanu’s invisibility philter. I’ll go back"),
("elves and I'll get da", "elves and I’ll get da"),
("Elves' Last Stand", "Elves’ Last Stand"),
("eventually, Landar's increasingly harsh", "eventually, Landar’s increasingly harsh"),
("find some o' my people", "find some o’ my people"),
("for Landar's revolt. But", "for Landar’s revolt. But"),
("from the Ka'lian.", "from the Ka’lian."),
("from the wolves-", "from the wolves —"),
("Gentlemen, don't squabble. We", "Gentlemen, don’t squabble. We"),
("get it. I'll catch up", "get it. I’ll catch up"),
("go, Kalenz, I'll guard the Ka'lian till our", "go, Kalenz, I’ll guard the Ka’lian till our"),
("gold and we'll get", "gold and we’ll get"),
("hate in Landar's heart. Something", "hate in Landar’s heart. Something"),
("heads! They ha' been too", "heads! They ha’ been too"),
("horse-pokers!! Run, let's get out", "horse-pokers!! Run, let’s get out"),
("How? Don't tell me", "How? Don’t tell me"),
("humans fall, we'll be", "humans fall, we’ll be"),
("I agree. What's done is", "I agree. What’s done is"),
("I dinna' think it", "I dinna’ think it"),
("If it weren't for Kalenz", "If it weren’t for Kalenz"),
("If the Ka'lian is too", "If the Ka’lian is too"),
("is the council's decision.", "is the council’s decision."),
("It willna' be easy.", "It willna’ be easy."),
("It's an ancient", "It’s an ancient"),
("It's not a dastone, it's the stone:", "It’s not a dastone, it’s the stone:"),
("It's not much", "It’s not much"),
("It's Olurf! He", "It’s Olurf! He"),
("It's the source", "It’s the source"),
("It's...it's monstrous!", "It’s...it’s monstrous!"),
("I'm sorry I", "I’m sorry I"),
("Kalenz. The Ka'lian council should", "Kalenz. The Ka’lian council should"),
("Kalenz's long childhood", "Kalenz’s long childhood"),
("Kalenz's veterans joined", "Kalenz’s veterans joined"),
("Ka'lian do...How do", "Ka’lian do...How do"),
("Landar! You're back! All", "Landar! You’re back! All"),
("Landar's remaining followers' lives were", "Landar’s remaining followers’ lives were"),
("learn of Kalenz's last and", "learn of Kalenz’s last and"),
("left the Ka'lian he had", "left the Ka’lian he had"),
("life by Crelanu's philter, he", "life by Crelanu’s philter, he"),
("make a dwarf's hackles rise;", "make a dwarf’s hackles rise;"),
("Maybe they don't", "Maybe they don’t"),
("minus expenses that's about even.", "minus expenses that’s about even."),
("my kin willna' be happy", "my kin willna’ be happy"),
("my men ha' all kept", "my men ha’ all kept"),
("No, don't leave your", "No, don’t leave your"),
("Now that we've retrieved the", "Now that we’ve retrieved the"),
("of the Kalenz's elves!", "of the Kalenz’s elves!"),
("of the Ka'lian, for it", "of the Ka’lian, for it"),
("on the Ka'lian! But if", "on the Ka’lian! But if"),
("one of Aquagar's creatures. Who", "one of Aquagar’s creatures. Who"),
("orcs alone! It's all over!", "orcs alone! It’s all over!"),
("orcs are everybody's enemy. When", "orcs are everybody’s enemy. When"),
("Our wolves' teeth will", "Our wolves’ teeth will"),
("o' this river", "o’ this river"),
("Perhaps the dwarves' intransigence can", "Perhaps the dwarves’ intransigence can"),
("poorly. The Ka'lian will need", "poorly. The Ka’lian will need"),
("raid on Kalenz's home, it", "raid on Kalenz’s home, it"),
("remnant evil Crelanu's philter had", "remnant evil Crelanu’s philter had"),
("responded to Cleodil's", "responded to Cleodil’s"),
("stinking trolls ha' stepped on", "stinking trolls ha’ stepped on"),
("take back what's ours!", "take back what’s ours!"),
("take the Ka'lian, all is", "take the Ka’lian, all is"),
("the bright moon's face again!", "the bright moon’s face again!"),
("the fun wi' the orcs", "the fun wi’ the orcs"),
("the fun. I'd ha' brought a", "the fun. I’d ha’ brought a"),
("The orcs ha' been defeated.", "The orcs ha’ been defeated."),
("the rest o'", "the rest o’"),
("their Great Chief's place. You", "their Great Chief’s place. You"),
("there! Now, let's", "there! Now, let’s"),
("They are no' fighting for gold, they're here for", "They are no’ fighting for gold, they’re here for"),
("they seem. Landar's", "they seem. Landar’s"),
("to the Ka'lian was already", "to the Ka’lian was already"),
("to the Ka'lian!", "to the Ka’lian!"),
("to the Ka'lian. Thanks to", "to the Ka’lian. Thanks to"),
("to the Ka'lian. There is", "to the Ka’lian. There is"),
("to the Ka'lian...", "to the Ka’lian..."),
("to the saurians' treasury was", "to the saurians’ treasury was"),
("Too late, we've taken all", "Too late, we’ve taken all"),
("treasury's garrison before", "treasury’s garrison before"),
("us. We ha' been forced", "us. We ha’ been forced"),
("Very soon, Landar's army appeared...", "Very soon, Landar’s army appeared..."),
("we have what's left of", "we have what’s left of"),
("We'll crush those", "We’ll crush those"),
("We'll need to", "We’ll need to"),
("What 'stone', foul and", "What ‘stone’, foul and"),
("What's more, King", "What’s more, King"),
("What's that, and", "What’s that, and"),
("What's this? It", "What’s this? It"),
("Where there's a contract,", "Where there’s a contract,"),
("WHo murdered El'Isomithir!", "WHo murdered El’Isomithir!"),
("With Crelanu's book in", "With Crelanu’s book in"),
("with Kalenz's army.", "with Kalenz’s army."),
("won! The Ka'lian is safe!", "won! The Ka’lian is safe!"),
("ye think we'd let you", "ye think we’d let you"),
("Yesss! It's the elves", "Yesss! It’s the elves"),
("yet over. Aquagar's curse was", "yet over. Aquagar’s curse was"),
("you may. They'll be back.", "you may. They’ll be back."),
("you mean, 'diverted their attention'?", "you mean, ‘diverted their attention’?"),
("you our army's vanguard? Hurry,", "you our army’s vanguard? Hurry,"),
("You won't get very", "You won’t get very"),
("'dastone'. I am", "‘dastone’. I am"),
# Fix capitalization
("\"LOW\"", "\"LoW\""),
# Fix apostrophes...
("while the foes'", "while the foes’"),
("Don't let them lure", "Don’t let them lure"),
# Spelling fix added in 1.9.9
("brake the treaty!", "break the treaty!"),
),
"wesnoth-multiplayer" : (
# Spelling fix required at r44124.
("helps alot", "helps a lot"),
# Convert makeshift dashes
# conversion added in 1.9.0-svn
("2p -", "2p —"),
("3p -", "3p —"),
("4p -", "4p —"),
("5p -", "5p —"),
("6p -", "6p —"),
("8p -", "8p —"),
("9p -", "9p —"),
("- - Seven Stones and Eleven", "— Seven Stones and Eleven"),
("- - Seven Stones - and the Elven", "— Seven Stones — and the Elven"),
("Bramwythl was left behind - in their haste, no one had remembered to find", "Bramwythl was left behind — in their haste, no one had remembered to find"),
("treasure that had been lost in these watery caves- a spear whose head was", "treasure that had been lost in these watery caves— a spear whose head was"),
("Single player mode - uses the reduced strength spawns", "Single player mode — uses the reduced strength spawns"),
("Two player mode - uses the full strength spawns.", "Two player mode — uses the full strength spawns."),
("Dark Forecast - a random survival scenario", "Dark Forecast — a random survival scenario"),
("They appear along the north, south and west map edges - though they are most", "They appear along the north, south and west map edges — though they are most"),
("- map design, spawn groups and scenario concept", "— map design, spawn groups and scenario concept"),
("- WML implementation", "— WML implementation"),
("- So this is the accursed valley - a land ours for the taking.", "So this is the accursed valley — a land ours for the taking."),
("watery caves- a spear whose", "watery caves— a spear whose"),
("-- map design, spawn groups and scenario concept", "— map design, spawn groups and scenario concept"),
("- WML implementation", "— WML implementation"),
("lair of Chak'kso Ney'yks, a legendary", "lair of Chak’kso Ney’yks, a legendary"),
("steal the Basilisk's gold.", "steal the Basilisk’s gold."),
("the Basilisk's cave,", "the Basilisk’s cave,"),
("(translation: ''The Hunts-Foes-In-", "(translation: “The Hunts-Foes-In-"),
("Boiling-Rivers Clan'').", "Boiling-Rivers Clan”)."),
("text: ''After he was", "text: “After he was"),
("Upper World, Chak'kso's enraged", "Upper World, Chak’kso’s enraged"),
("through ''pathways of stone''", "through ‘pathways of stone’"),
("of Chak'kso's hatred.'' Confused,", "of Chak’kso’s hatred.” Confused,"),
("was entitled ''Ney'yks of", "was entitled “Ney’yks of"),
("Gaze,'' and told", "Gaze,” and told"),
("as the ''Chak'kso Ney'yks,'' a name", "as the “Chak’kso Ney’yks,” a name"),
("tongue, meant ''Eldest Child of Chak'kso.''", "tongue, meant “Eldest Child of Chak’kso.”"),
("Chak'kso was believed", "Chak’kso was believed"),
("as a ''great demon of the earth'' in the", "as a ‘great demon of the earth’ in the"),
("histories, Chak'kso had once", "histories, Chak’kso had once"),
("''The Ney'yks,'' or ''The Eldest Child,'' in the", "“The Ney’yks,” or “The Eldest Child,” in the"),
("'mongst the Statues here was slain", "’mongst the Statues here was slain"),
("kept the area's goblins alive", "kept the area’s goblins alive"),
("wouldn't move. As the", "wouldn’t move. As the"),
("as ''The Dragonbane.'' It was", "as ‘The Dragonbane’. It was"),
("initiated a ''counterattack''", "initiated a ‘counterattack’"),
("lake, no other's, and though", "lake, no other’s, and though"),
("the Serpent's eyes. And the", "the Serpent’s eyes. And the"),
("hero of Wesnoth's Golden Age,", "hero of Wesnoth’s Golden Age,"),
("in the sun's glory.", "in the sun’s glory."),
("warmed by the sun's rays,", "warmed by the sun’s rays,"),
("Aethec Corryn: ''Talael Ryndoc!", "Aethec Corryn: “Talael Ryndoc!"),
("the death I'd have given you!''", "the death I’d have given you!”"),
("tooth of Chak'kso Ney'yks, an Elder", "tooth of Chak’kso Ney’yks, an Elder"),
("very-much-alive Chak'kso Ney'yks, who was, oddly", "very-much-alive Chak’kso Ney’yks, who was, oddly"),
("she hasn't moved her eyes", "she hasn’t moved her eyes"),
("Lose all your team's leaders", "Lose all your team’s leaders"),
("2p — Sulla's Ruins", "2p — Sulla’s Ruins"),
("many of Wesnoth's greatest", "many of Wesnoth’s greatest"),
("4p — Isar's Cross", "4p — Isar’s Cross"),
("6p — Crusaders' Field", "6p — Crusaders’ Field"),
("Frankly, I haven't got a", "Frankly, I haven’t got a"),
# Fix spelling
("enemy spawnpoints and types", "enemy spawn points and types"),
("dog, are yer legs made of stone?!?", "dog, are yer legs made of stone?!"),
),
"wesnoth-nr" : (
# Correct some English usage at revision 44124
("fortifications and siege them", "fortifications and besiege them"),
("Form, up men!", "Form up, men!"),
("the sunlit word", "the sunlit world"),
("bows and a cudgels", "bows and cudgels"),
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("afar -- hence they kept their weapons sharp, and reckoned themselves well ", "afar — hence they kept their weapons sharp, and reckoned themselves well "),
("forgetting that their ancestors had been free - and might have completely ", "forgetting that their ancestors had been free — and might have completely "),
("lives of the people of Dwarven Doors - forever.", "lives of the people of Dwarven Doors — forever."),
("It was an early spring day like any other; the humans - joyless beneath the ", "It was an early spring day like any other; the humans — joyless beneath the "),
("orcish whip - were dispiritedly planting the yearly crop. All at once, the ", "orcish whip — were dispiritedly planting the yearly crop. All at once, the "),
("The orcs have gotten careless - look how easily I stole these weapons and ", "The orcs have gotten careless — look how easily I stole these weapons and "),
("help, unlooked for, arrived in the form of a small band of woodsrunners -- ", "help, unlooked for, arrived in the form of a small band of woodsrunners — "),
("of them to hell! Oh, and just between me and you, it is actually good fun - ", "of them to hell! Oh, and just between me and you, it is actually good fun — "),
("Tallin, this situation is hopeless - there are endless monsters swarming ", "Tallin, this situation is hopeless — there are endless monsters swarming "),
("We are in a dire situation indeed - but just see - the trolls and the ", "We are in a dire situation indeed — but just see — the trolls and the "),
("replacement - whereas for every monster we kill, it seems that two more come ", "replacement — whereas for every monster we kill, it seems that two more come "),
("creatures and they know their caves backwards and forwards - so I am sure at ", "creatures and they know their caves backwards and forwards — so I am sure at "),
("Och, the dwarves of Knalga are themselves in desperate straits - but we ", "Och, the dwarves of Knalga are themselves in desperate straits — but we "),
("this sorry state - where are they now? From what I have seen, it is mostly ", "this sorry state — where are they now? From what I have seen, it is mostly "),
("not before we surprised and slew their leader - the cursed Khazg Black-Tusk. ", "not before we surprised and slew their leader — the cursed Khazg Black-Tusk. "),
("after we were trapped here - by ones and twos in the beginning, and now by ", "after we were trapped here — by ones and twos in the beginning, and now by "),
("our axes. So, it is not their existence I fear - for we have faced far worse ", "our axes. So, it is not their existence I fear — for we have faced far worse "),
("trials - but their rising numbers gives cause for worry...", "trials — but their rising numbers gives cause for worry..."),
("the mines! Let the guardsmen stay behind along with the noncombatants - for ", "the mines! Let the guardsmen stay behind along with the noncombatants — for "),
("But my my, what do we have here - Tallin.", "But my my, what do we have here — Tallin."),
("minions - gorge on the flesh of these scurrying little rats!", "minions — gorge on the flesh of these scurrying little rats!"),
("Tallin. He's lucky, he is - or he makes his own luck.", "Tallin. He's lucky, he is — or he makes his own luck."),
("Black-Tusk, but we survived the orcs and trolls -- only to be captured by ", "Black-Tusk, but we survived the orcs and trolls — only to be captured by "),
("What was that? Oh, woe - two big slabs of rock cutting off our retreat!", "What was that? Oh, woe — two big slabs of rock cutting off our retreat!"),
("Well, if you don't mind me saying - that certainly isn't the state of ", "Well, if you don’t mind me saying — that certainly isn’t the state of "),
("Interesting. I wonder who - or what - could have created such a powerful ", "Interesting. I wonder who — or what — could have created such a powerful "),
("Thus, compelled by some strange and irresistible force, Tallin - eyes glazed ", "Thus, compelled by some strange and irresistible force, Tallin — eyes glazed "),
("and mind unthinking - did the will of his new master.", "and mind unthinking — did the will of his new master."),
("dwarves - it's because of you that we ha' made a start rebuilding Knalga in ", "dwarves — it’s because of you that we ha’ made a start rebuilding Knalga in "),
("I am here simply finishing the job Khazg Black-Tusk started years ago - the ", "I am here simply finishing the job Khazg Black-Tusk started years ago — the "),
("And I am grateful - but what should we do from here? The number of humans ", "And I am grateful — but what should we do from here? The number of humans "),
("Orcs are fickle; if we negotiated a treaty with them - assuming they would ", "Orcs are fickle; if we negotiated a treaty with them — assuming they would "),
("accept - it would just be broken as soon as the next warlord arose among ", "accept — it would just be broken as soon as the next warlord arose among "),
("I have heard all about him from my good friend Stalrag. He is - or was ", "I have heard all about him from my good friend Stalrag. He is — or was "),
("anyway, haven't heard from him in years - the chief o' the villages that lay ", "anyway, haven’t heard from him in years — the chief o’ the villages that lay "),
("As Tallin sat staring blankly at the body of his former friend - loathing ", "As Tallin sat staring blankly at the body of his former friend — loathing "),
("and hating what he had become - he received an urgent summons from his ", "and hating what he had become — he received an urgent summons from his "),
("Pew! Nick of time too - I almost bought it there!", "Pew! Nick of time too — I almost bought it there!"),
("elves - for the sorceress was, in fact, a princess of the highest rank.", "elves — for the sorceress was, in fact, a princess of the highest rank."),
("our help in rescuing their Princess? You heard their message - those elves ", "our help in rescuing their Princess? You heard their message — those elves "),
("Well, suppose we do join up with the elves - assuming they will let us - and ", "Well, suppose we do join up with the elves — assuming they will let us — and "),
("Just our luck to be here when they're mustering a field force - probably to ", "Just our luck to be here when they’re mustering a field force — probably to "),
("Later, princess, first let's get outta -- uh, let us take our leave of this ", "Later, princess, first let’s get outta — uh, let us take our leave of this "),
("him, and so he fled. As he did so, visions of his dying friends - friends ", "him, and so he fled. As he did so, visions of his dying friends — friends "),
("that he had slain - flashed before his eyes, while their voices chided him ", "that he had slain — flashed before his eyes, while their voices chided him "),
("under his control, I have done unthinkable deeds - I have studied the most ", "under his control, I have done unthinkable deeds — I have studied the most "),
("never would have risked their lives - nay, if I were not there they wouldn't ", "never would have risked their lives — nay, if I were not there they wouldn’t "),
("Ruler of Men to ensure peace, harmony and above all - justice. For hundreds ", "Ruler of Men to ensure peace, harmony and above all — justice. For hundreds "),
("Tallin, accompanied by the ghost of the king - whose name he soon discovered ", "Tallin, accompanied by the ghost of the king — whose name he soon discovered "),
("to be Abhai - set off down the tunnels in search of the Rod of Justice. ", "to be Abhai — set off down the tunnels in search of the Rod of Justice. "),
("The Amulet of Strength, how fascinating. However - though it may be a useful ", "The Amulet of Strength, how fascinating. However — though it may be a useful "),
("place - nothing there but a bunch of bats.", "place — nothing there but a bunch of bats."),
("I found a way to the surface - that is if you feel like climbing straight up ", "I found a way to the surface — that is if you feel like climbing straight up "),
("We do not wish to fight you, Great One, we simply seek the Rod of Justice - ", "We do not wish to fight you, Great One, we simply seek the Rod of Justice — "),
("countless foes and raised countless generations of drakes - and now I am ", "countless foes and raised countless generations of drakes — and now I am "),
("The only problem - or the major one, anyway - is these blasted orcs. They ", "The only problem — or the major one, anyway — is these blasted orcs. They "),
("determined to make Knalga into an orcish stronghold - enslaving or killing ", "determined to make Knalga into an orcish stronghold — enslaving or killing "),
("There is a large elvish force not far from here who - we believe anyway - ", "There is a large elvish force not far from here who — we believe anyway — "),
("hesitated and found himself unable to proceed. Abhai came forth - his ", "hesitated and found himself unable to proceed. Abhai came forth — his "),
("defenders - and fearlessly lifted the Rod in his ghostly hands. He paused ", "defenders — and fearlessly lifted the Rod in his ghostly hands. He paused "),
("Tallin raised his eyes to look into Abhai's and Abhai met his gaze - ", "Tallin raised his eyes to look into Abhai’s and Abhai met his gaze — "),
("your doubts at rest.' Tallin held the gaze for one more long moment - and ", "your doubts at rest.” Tallin held the gaze for one more long moment — and "),
("Yes, Tallin - it is I.", "Yes, Tallin — it is I."),
("How did you get here - I thought you you were dead... and for that matter, ", "How did you get here — I thought you you were dead... and for that matter, "),
("of unimaginable splendor, glory and transcendence - the very realm of the ", "of unimaginable splendor, glory and transcendence — the very realm of the "),
("They are all here, princess! Hand picked by your father - the finest and ", "They are all here, princess! Hand picked by your father — the finest and "),
("They are all here, princess! Hand picked by your father - the finest and ", "They are all here, princess! Hand picked by your father — the finest and "),
("and leave them a small guard. Then they pressed onward to rescue Hidel - and ", "and leave them a small guard. Then they pressed onward to rescue Hidel — and "),
("Cheer up -- you won't have to live with your failure for long...*snicker*", "Cheer up — you won’t have to live with your failure for long...*snicker*"),
("have a few scores to settle with you. Take them, troops - I want no orc left ", "have a few scores to settle with you. Take them, troops — I want no orc left "),
("been crushed. This relieves a blight on our land - but if we do not address ", "been crushed. This relieves a blight on our land — but if we do not address "),
("what race they come from - even orcs.", "what race they come from — even orcs."),
("Tallin, as you know, I have been separated from my race and kinsmen - well, ", "Tallin, as you know, I have been separated from my race and kinsmen — well, "),
("except Eryssa - for many years now. I wish to go back to the elvish forests ", "except Eryssa — for many years now. I wish to go back to the elvish forests "),
("Lords of Light - or Darkness - guide you on your path. For those of you who ", "Lords of Light — or Darkness — guide you on your path. For those of you who "),
("are staying - come, we have much to do.", "are staying — come, we have much to do."),
("- and unknown - world to trade with the Dwarves of Knalga.", "— and unknown — world to trade with the Dwarves of Knalga."),
("Thus, from a small, enslaved community, the people of Dwarven Doors - by ", "Thus, from a small, enslaved community, the people of Dwarven Doors — by "),
("their fortitude, valor, and wisdom - brought the Northlands out of the ", "their fortitude, valor, and wisdom — brought the Northlands out of the "),
# Fix screw up
# conversion added in 1.9.0-svn
("Cheer up — you won't have to live with your failure for long...*snicker*", "Cheer up — you won’t have to live with your failure for long...*snicker*"),
("your doubts at rest.' Tallin held the gaze for one more long moment — and ", "your doubts at rest.” Tallin held the gaze for one more long moment — and "),
("Tallin raised his eyes to look into Abhai's and Abhai met his gaze — ", "Tallin raised his eyes to look into Abhai’s and Abhai met his gaze — "),
("never would have risked their lives — nay, if I were not there they wouldn't ", "never would have risked their lives — nay, if I were not there they wouldn’t "),
("Later, princess, first let's get outta — uh, let us take our leave of this ", "Later, princess, first let’s get outta — uh, let us take our leave of this "),
("Just our luck to be here when they're mustering a field force — probably to ", "Just our luck to be here when they’re mustering a field force — probably to "),
("anyway, haven't heard from him in years — the chief o' the villages that lay ", "anyway, haven’t heard from him in years — the chief o’ the villages that lay "),
("dwarves — it's because of you that we ha' made a start rebuilding Knalga in ", "dwarves — it’s because of you that we ha’ made a start rebuilding Knalga in "),
("Well, if you don't mind me saying — that certainly isn't the state of ", "Well, if you don’t mind me saying — that certainly isn’t the state of "),
("...Don't tax yourself... princess....", "... Don’t tax yourself... princess..."),
("Abhai: I don't think death will", "Abhai: I don’t think death will"),
("Abhai: You haven't? It was an", "Abhai: You haven’t? It was an"),
("about Tallin's will grew stronger.", "about Tallin’s will grew stronger."),
("accomplished, men! Now let's crush the rest", "accomplished, men! Now let’s crush the rest"),
("again took up Rakshas's trail. They soon", "again took up Rakshas’s trail. They soon"),
("again, Morvin? Well, I'll", "again, Morvin? Well, I’ll"),
("all of a necromancer's creations, I must", "all of a necromancer’s creations, I must"),
("all, most people can't hold the Rod", "all, most people can’t hold the Rod"),
("and for all. Let's give", "and for all. Let’s give"),
("and he moved awa' up here to", "and he moved awa’ up here to"),
("And if you don't keep your mouth", "And if you don’t keep your mouth"),
("and looked to Hidel's ward as their", "and looked to Hidel’s ward as their"),
("and return here, we'll ha' made a", "and return here, we’ll ha’ made a"),
("and started down Rakshas's", "and started down Rakshas’s"),
("and the mage Ro'Arthian and his brother,", "and the mage Ro’Arthian and his brother,"),
("apprehension. 'I have faith in you, my boy,' Abhai said, 'Take this, and put", "apprehension. “I have faith in you, my boy,” Abhai said, “Take this, and put"),
("are bonny fighters wi' a sword as", "are bonny fighters wi’ a sword as"),
("are heavy-footed creatures; Rakshas's trail was readily", "are heavy-footed creatures; Rakshas’s trail was readily"),
("aren't ready to give", "aren’t ready to give"),
("as other parts o' Knalga.", "as other parts o’ Knalga."),
("At Hidel's call Tallin approached", "At Hidel’s call Tallin approached"),
("attacking her won't make the situation", "attacking her won’t make the situation"),
("away, Tallin's former friends had attacked Malifor's study with overwhelming", "away, Tallin’s former friends had attacked Malifor’s study with overwhelming"),
("Awww, aren't they the most", "Awww, aren’t they the most"),
("Aye! We'll have to lay", "Aye! We’ll have to lay"),
("Aye, it's true we do", "Aye, it’s true we do"),
("Aye. And we dinna' just want to", "Aye. And we dinna’ just want to"),
("Aye. Word ha' spread, and dwarves", "Aye. Word ha’ spread, and dwarves"),
("back to back, don't let them penetrate", "back to back, don’t let them penetrate"),
("bargain. If you don't agree to", "bargain. If you don’t agree to"),
("become legends! 'The two terrible mages,", "become legends! “The two terrible mages,"),
("body and I don't know how to", "body and I don’t know how to"),
("boys, that weakling Al'Tar can't even keep his", "boys, that weakling Al’Tar can’t even keep his"),
("brain has rotted awa' entire. Think on", "brain has rotted awa’ entire. Think on"),
("buddy, and now it's", "buddy, and now it’s"),
("But I'm sore vexed. We", "But I’m sore vexed. We"),
("but if you don't hurry up and", "but if you don’t hurry up and"),
("by himself. We don't know", "by himself. We don’t know"),
("can hear me, Ro'Arthian. Please, the fate", "can hear me, Ro’Arthian. Please, the fate"),
("certainty, those skeletons won't like us getting", "certainty, those skeletons won’t like us getting"),
("Come on boys, let's chop them to", "Come on boys, let’s chop them to"),
("Come on men, let's go", "Come on men, let’s go"),
("Come on Thera, let's destroy that old", "Come on Thera, let’s destroy that old"),
("Come on, Ro'Sothian, we really don't have to be", "Come on, Ro’Sothian, we really don’t have to be"),
("Come what may, we'll handle them. Come on now, let's find those", "Come what may, we’ll handle them. Come on now, let’s find those"),
("couldna' take it and", "couldna’ take it and"),
("Dang it! They're gone, and the", "Dang it! They’re gone, and the"),
("desecrated my life's work, now I", "desecrated my life’s work, now I"),
("destroy its arms we'll be relatively safe", "destroy its arms we’ll be relatively safe"),
("DISGUSTING LITTLE VERMIN! DON'T YOU KNOW HOW", "DISGUSTING LITTLE VERMIN! DON’T YOU KNOW HOW"),
("do business here: 'I do this for", "do business here: “I do this for"),
("do I want? I'll tell you what", "do I want? I’ll tell you what"),
("do this for me.' No, we seek", "do this for me.” No, we seek"),
("do to survive. We'll get more food", "do to survive. We’ll get more food"),
("Don't bother heading down", "Don’t bother heading down"),
("don't get it treated", "don’t get it treated"),
("Don't give in to", "Don’t give in to"),
("don't say anything to", "don’t say anything to"),
("Don't worry, Thera, you", "Don’t worry, Thera, you"),
("Don't! Please, my friend,", "Don’t! Please, my friend,"),
("down here, but ye'll not likely see", "down here, but ye’ll not likely see"),
("dragon. If you don't hand it over", "dragon. If you don’t hand it over"),
("enough. They taste unco' foul but that's how", "enough. They taste unco’ foul but that’s how"),
("Even if we don't catch them then", "Even if we don’t catch them then"),
("Even if we don't catch them, we", "Even if we don’t catch them, we"),
("everywhere, we simply didna'", "everywhere, we simply didna’"),
("extremely rich nation. Malifor's treasury is hardly", "extremely rich nation. Malifor’s treasury is hardly"),
("fact I think it's best that we", "fact I think it’s best that we"),
("flank, and now it's your", "flank, and now it’s your"),
("Fools! Don't think it's so easy to", "Fools! Don’t think it’s so easy to"),
("from Malifor's treasury and we", "from Malifor’s treasury and we"),
("from them. Now they're killing each other.", "from them. Now they’re killing each other."),
("Gee, what's with all these", "Gee, what’s with all these"),
("Geez, what a fool's quest. Everyone in", "Geez, what a fool’s quest. Everyone in"),
("go to his Master's aid. When he", "go to his Master’s aid. When he"),
("gold cannot. Now let's run down Rakshas", "gold cannot. Now let’s run down Rakshas"),
("got from my guards' boastings and foul", "got from my guards’ boastings and foul"),
("Great Chamber eh? Doesn't look like there", "Great Chamber eh? Doesn’t look like there"),
("Great. Now let's get back to", "Great. Now let’s get back to"),
("Grrr, Ha'Tuil has never failed", "Grrr, Ha’Tuil has never failed"),
("ha' been as much", "ha’ been as much"),
("ha' the manpower.", "ha’ the manpower."),
("Hamel's held open their", "Hamel’s held open their"),
("have died.... a warrior's death....", "have died... a warrior’s death..."),
("havena' forgotten the old", "havena’ forgotten the old"),
("He didn't keep us alive", "He didn’t keep us alive"),
("he disappeared again. Let's find him and", "he disappeared again. Let’s find him and"),
("help, I guess we'll nae have any", "help, I guess we’ll nae have any"),
("her was a fool's quest to begin", "her was a fool’s quest to begin"),
("Hey look, it's an ancient door.", "Hey look, it’s an ancient door."),
("Hey, that's not right. The good guys aren't supposed to die. Oh well, I'll", "Hey, that’s not right. The good guys aren’t supposed to die. Oh well, I’ll"),
("Hidel's death was a", "Hidel’s death was a"),
("Hiera'Shirsha?", "Hiera’Shirsha?"),
("his prisoners? He didn't seem to be", "his prisoners? He didn’t seem to be"),
("his prisoners? He doesn't really", "his prisoners? He doesn’t really"),
("Hmmm, you aren't as bad as", "Hmmm, you aren’t as bad as"),
("Hmmm...I think you've the right of", "Hmmm... I think you’ve the right of"),
("Hold! We aren't friends of Malifor", "Hold! We aren’t friends of Malifor"),
("host. But we can't", "host. But we can’t"),
("I am sorry, Ro'Arthian. I should have", "I am sorry, Ro’Arthian. I should have"),
("I didn't know that you", "I didn’t know that you"),
("I don't know what it", "I don’t know what it"),
("I don't think so, my", "I don’t think so, my"),
("I don't think so, you", "I don’t think so, you"),
("I don't think there is", "I don’t think there is"),
("I know you're impressive wi' a pitchfork, and", "I know you’re impressive wi’ a pitchfork, and"),
("I sense the Rod's power coming from", "I sense the Rod’s power coming from"),
("if the elves dinna' fight and pay", "if the elves dinna’ fight and pay"),
("If you don't mind me asking,", "If you don’t mind me asking,"),
("if you know what's good for you.", "if you know what’s good for you."),
("in disrepair. It doesn't look like", "in disrepair. It doesn’t look like"),
("in line. I don't know why you", "in line. I don’t know why you"),
("in mission, but Al'tar dead by peasant", "in mission, but Al’tar dead by peasant"),
("in the meantime, I'm leaving!", "in the meantime, I’m leaving!"),
("in the way. Let's give them a", "in the way. Let’s give them a"),
("In time, Knalga's caverns were refurbished,", "In time, Knalga’s caverns were refurbished,"),
("In truth, we ha' no idea. They", "In truth, we ha’ no idea. They"),
("is a risk we'll have to take.", "is a risk we’ll have to take."),
("is great...but we dinna' ha' much of weapons", "is great... but we dinna’ ha’ much of weapons"),
("is so, then it's time to end", "is so, then it’s time to end"),
("is up, men, let's hurry up and", "is up, men, let’s hurry up and"),
("it shall be. You've my leave and", "it shall be. You’ve my leave and"),
("it to 'em!", "it to ’em!"),
("It's an escape tunnel. The hole wasn't big enough for", "It’s an escape tunnel. The hole wasn’t big enough for"),
("It's too late, I", "It’s too late, I"),
("It's very easy, Tallin,", "It’s very easy, Tallin,"),
("it, Father! We don't help no-one. Not", "it, Father! We don’t help no-one. Not"),
("Knalga. And d'ye ken those two", "Knalga. And d’ye ken those two"),
("lay amidst the Dragon's hoarded", "lay amidst the Dragon’s hoarded"),
("let's get back at", "let’s get back at"),
("like someone really doesn't want us going", "like someone really doesn’t want us going"),
("little vermin and I'll be right over.", "little vermin and I’ll be right over."),
("looks of things, they've busked themselves for", "looks of things, they’ve busked themselves for"),
("Lord Hamel, some o' us would like", "Lord Hamel, some o’ us would like"),
("lost and don't come back, unless you're tired of life!", "lost and don’t come back, unless you’re tired of life!"),
("made easier by Malifor's ending.", "made easier by Malifor’s ending."),
("Malifor's evil summons, but", "Malifor’s evil summons, but"),
("me, lich, and you'll find yourself dangling", "me, lich, and you’ll find yourself dangling"),
("money they don't have.", "money they don’t have."),
("more then two days' march from here.", "more then two days’ march from here."),
("must do both. I'm thinking it might", "must do both. I’m thinking it might"),
("my friend. You won't believe what a", "my friend. You won’t believe what a"),
("nae! Ye'll need yon gold", "nae! Ye’ll need yon gold"),
("Never! If you ha' been sent by", "Never! If you ha’ been sent by"),
("none of any dwarf's or human’s business,", "none of any dwarf’s or human’s business,"),
("not so sure it's that simple Tallin.", "not so sure it’s that simple Tallin."),
("not suppress the people's joy in their", "not suppress the people’s joy in their"),
("now, anyway. Now let's get to the", "now, anyway. Now let’s get to the"),
("Now, let's get back to", "Now, let’s get back to"),
("Of course we won't do that. As", "Of course we won’t do that. As"),
("of Darkness, I haven't seen you in", "of Darkness, I haven’t seen you in"),
("of Light! Elenia! It's you! It has", "of Light! Elenia! It’s you! It has"),
("of orcs, belike they'd kill the princess,", "of orcs, belike they’d kill the princess,"),
("off many of it's", "off many of it’s"),
("Oh, dinna' mind him, that'd be Camerin. He", "Oh, dinna’ mind him, that’d be Camerin. He"),
("On Father Morvin's advice, the Council", "On Father Morvin’s advice, the Council"),
("only make the Master's punishment for you", "only make the Master’s punishment for you"),
("or at least, what's left o'", "or at least, what’s left o’"),
("orcish stronghold. We can't let that happen!", "orcish stronghold. We can’t let that happen!"),
("orcs then, princess? Don't you wish to", "orcs then, princess? Don’t you wish to"),
("our gold. Now let's run", "our gold. Now let’s run"),
("our side. Now let's plan our way", "our side. Now let’s plan our way"),
("ourselves, Tallin. We ha' failed the trust", "ourselves, Tallin. We ha’ failed the trust"),
("ourselves. We're but a remnant", "ourselves. We’re but a remnant"),
("out of his mother's arms, when Konrad", "out of his mother’s arms, when Konrad"),
("passed, but eventually Malifor's", "passed, but eventually Malifor’s"),
("ploy. Nae sooner ha' we thinned the", "ploy. Nae sooner ha’ we thinned the"),
("powerful magician called Ro'Arthian and his brother Ro'Sothian.", "powerful magician called Ro’Arthian and his brother Ro’Sothian."),
("Price?!? Don't insult us, Tallin.", "Price?! Don’t insult us, Tallin."),
("problem lad, we ha' been stranded in", "problem lad, we ha’ been stranded in"),
("quickly cut off it's arms we may", "quickly cut off it’s arms we may"),
("regenerate over time; it's doubtful we can", "regenerate over time; it’s doubtful we can"),
("Remember, Ro'Arthian, we need willing", "Remember, Ro’Arthian, we need willing"),
("Ro'Arthian and Ro'Sothian, you two have", "Ro’Arthian and Ro’Sothian, you two have"),
("Ro'Arthian's brother, Ro'Sothian went south with", "Ro’Arthian’s brother, Ro’Sothian went south with"),
("Ro'Arthian, I am not", "Ro’Arthian, I am not"),
("Ro'Arthian, listen to me. We don't want to fight", "Ro’Arthian, listen to me. We don’t want to fight"),
("Ro'Sothian! How are you", "Ro’Sothian! How are you"),
("said that I wasn't going to repay", "said that I wasn’t going to repay"),
("saying something about 'The Master'.", "saying something about ‘The Master’."),
("seems the rescue ha' any chance of", "seems the rescue ha’ any chance of"),
("Seriously Hamel, I don't know if we’re", "Seriously Hamel, I don’t know if we’re"),
("shall be, Tallin, I'll hold the caves", "shall be, Tallin, I’ll hold the caves"),
("simply ha' not the manpower.", "simply ha’ not the manpower."),
("small amount of Al'Tar's gold. He fled", "small amount of Al’Tar’s gold. He fled"),
("snot! If it wasn't for you we wouldn't have lost our", "snot! If it wasn’t for you we wouldn’t have lost our"),
("Sobbing against Tallin's chest, Eryssa told", "Sobbing against Tallin’s chest, Eryssa told"),
("Some monster that Malifor's minions greatly fear", "Some monster that Malifor’s minions greatly fear"),
("some room and you'll", "some room and you’ll"),
("south and we haven't found Malifor yet...", "south and we haven’t found Malifor yet..."),
("swords. We couldna' claim to be expert weaponsmiths wi'out kenning how to", "swords. We couldna’ claim to be expert weaponsmiths wi’out kenning how to"),
("Tallin's proposal, the endless", "Tallin’s proposal, the endless"),
("Tallin. He's lucky, he is", "Tallin. He’s lucky, he is"),
("thanks, Stalrag and Ro'Arthian. However, I have", "thanks, Stalrag and Ro’Arthian. However, I have"),
("that Hidel's elves had been", "that Hidel’s elves had been"),
("that way... We don't like it, but if that's what", "that way... We don’t like it, but if that’s what"),
("the air they canna' see over them.", "the air they canna’ see over them."),
("the dead. I don't", "the dead. I don’t"),
("the door to Malifor's study. Are we", "the door to Malifor’s study. Are we"),
("the elf! We can't let that happen!", "the elf! We can’t let that happen!"),
("the first place. Dinna' ye speak of", "the first place. Dinna’ ye speak of"),
("the orcish hordes!'", "the orcish hordes!”"),
("The orcs still haven't given up their", "The orcs still haven’t given up their"),
("the orcs. It's time to give", "the orcs. It’s time to give"),
("the rest here. We'll store it in", "the rest here. We’ll store it in"),
("the time you've laid him to", "the time you’ve laid him to"),
("the tunnels into dawn's early light.", "the tunnels into dawn’s early light."),
("The war council's thoughts quickly turned", "The war council’s thoughts quickly turned"),
("them if you can't.", "them if you can’t."),
("them: 'Hold fast your hope,", "them: “Hold fast your hope,"),
("Then one day Al'Tar, the current 'master' of Dwarven Doors,", "Then one day Al’Tar, the current ‘master’ of Dwarven Doors,"),
("then that Princess Li'sar was in hot", "then that Princess Li’sar was in hot"),
("then we thought, let's go", "then we thought, let’s go"),
("then ye could ha' done in", "then ye could ha’ done in"),
("Thera, don't you think that this isn't really a good", "Thera, don’t you think that this isn’t really a good"),
("there are, we dinna' yet ken of", "there are, we dinna’ yet ken of"),
("there skills beyond swingin' a weapon", "there skills beyond swingin’ a weapon"),
("these orcs! IN HIDEL'S", "these orcs! IN HIDEL’S"),
("these waters we can't destroy the creature.", "these waters we can’t destroy the creature."),
("Think of it, Ro'Arthian, it is a", "Think of it, Ro’Arthian, it is a"),
("This isn't looking good. Not", "This isn’t looking good. Not"),
("this stuff. But that's probably why they", "this stuff. But that’s probably why they"),
("tight. I wonder what's behind it.", "tight. I wonder what’s behind it."),
("to cut off it's arms fast enough.", "to cut off it’s arms fast enough."),
("to help us. What's more, if Stalrag", "to help us. What’s more, if Stalrag"),
("to him. Nope, we'll never hurt you.", "to him. Nope, we’ll never hurt you."),
("Together we'll rip them to", "Together we’ll rip them to"),
("training of you hasna' been for naught.", "training of you hasna’ been for naught."),
("trolls. But it ha' been", "trolls. But it ha’ been"),
("Tusk's troops, but we", "Tusk’s troops, but we"),
("uncoordinated and erratic, wi' the orcs as", "uncoordinated and erratic, wi’ the orcs as"),
("underwater creature. They'll try to pummel", "underwater creature. They’ll try to pummel"),
("up positions outside Malifor's study door.", "up positions outside Malifor’s study door."),
("up, father, he isn't answering. I am", "up, father, he isn’t answering. I am"),
("us you WON'T come back.", "us you <i>won’t</i> come back."), # New <i> tags
("wants to take 'em and fight with", "wants to take ’em and fight with"),
("was a piece o' good work, lad,", "was a piece o’ good work, lad,"),
("was decided that Tallin's men should sally", "was decided that Tallin’s men should sally"),
("was led by Ro'Arthian and was composed", "was led by Ro’Arthian and was composed"),
("water. And there doesn't seem to be", "water. And there doesn’t seem to be"),
("we are to ha' anything", "we are to ha’ anything"),
("we ha' survived all these", "we ha’ survived all these"),
("we rose against Al'Tar and defeated his", "we rose against Al’Tar and defeated his"),
("we would gain o' great tactical", "we would gain o’ great tactical"),
("We'll have to kill", "We’ll have to kill"),
("Well, that certainly isn't the state of", "Well, that certainly isn’t the state of"),
("well. Why, it can't have", "well. Why, it can’t have"),
("What's more, some of", "What’s more, some of"),
("What's with this whole 'Master' business? It's starting to make", "What’s with this whole ‘Master’ business? It’s starting to make"),
("When Tallin's party had shaken", "When Tallin’s party had shaken"),
("where we couldna' reach while the", "where we couldna’ reach while the"),
("while you and Stalrag's", "while you and Stalrag’s"),
("Whoa! Maybe he isn't so friendly after", "Whoa! Maybe he isn’t so friendly after"),
("with Hidel's head in her", "with Hidel’s head in her"),
("With Malifor's death, the magical controls over Tallin's will vanished.", "With Malifor’s death, the magical controls over Tallin’s will vanished."),
("with the orcs I'll send you a", "with the orcs I’ll send you a"),
("with you. I wouldn't even think", "with you. I wouldn’t even think"),
("wood-knot, I'd lay odds on", "wood-knot, I’d lay odds on"),
("would emerge and we'd find oursel' back in the", "would emerge and we’d find oursel’ back in the"),
("would we could ha' done more.", "would we could ha’ done more."),
("Wouldn't miss it. Maybe", "Wouldn’t miss it. Maybe"),
("wouldna' ally themselves with the likes o' us.", "wouldna’ ally themselves with the likes o’ us."),
("Wow, that's a long way", "Wow, that’s a long way"),
("wrong with pitchforks, I'll give these orcish", "wrong with pitchforks, I’ll give these orcish"),
("ye'll need and leave", "ye’ll need and leave"),
("Ye'll need to travel", "Ye’ll need to travel"),
("years, however, I ha' noticed that there", "years, however, I ha’ noticed that there"),
("you shall be free!' Then he was", "you shall be free!” Then he was"),
("your kind that won't leave US in", "your kind that won’t leave <i>us</i> in"),
# Minor usage fixes added in 1.9.9
("one you little guys you might fit", "one of you little guys might fit"),
("yours and your brothers", "your and your brother’s"),
("What in the world gave you that idea.", "What in the world gave you that idea?"),
# Converted a period to a question marke
("into disarray once more.", "into disarray once more?"),
),
"wesnoth-thot" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("then living -- all", "then living — all"),
("the Hammer -- dropped", "the Hammer — dropped"),
("to my eyes - an", "to my eyes — an"),
("captain -- but", "captain — but"),
("prisoners - which", "prisoners — which"),
("seen again - I began", "seen again — I began"),
("tightly locked - and, I", "tightly locked — and, I"),
("'Our' valley, says Master", "‘Our’ valley, says Master"),
("<i>is</i> Karrag? We can't have been more", "<i>is</i> Karrag? We can’t have been more"),
("A book. Thursagan's book, in a", "A book. Thursagan’s book, in a"),
("a teleportation device. What's more, the", "a teleportation device. What’s more, the"),
("Aiglondur, what ye ha' found is disturbing,", "Aiglondur, what ye ha’ found is disturbing,"),
("am Ratheln. If you're traveling down the", "am Ratheln. If you’re traveling down the"),
("an instructor, and they're expecting me.", "an instructor, and they’re expecting me."),
("and your precious 'Alliance'.", "and your precious ‘Alliance’."),
("Aye. And I'd not leave anyone", "Aye. And I’d not leave anyone"),
("Aye. But I've a grim feeling", "Aye. But I’ve a grim feeling"),
("Aye. Ye'll recall that in", "Aye. Ye’ll recall that in"),
("Aye? Well, you'd know such things", "Aye? Well, you’d know such things"),
("be of the Alliance's making, right enough,", "be of the Alliance’s making, right enough,"),
("be the Northern Alliance's arm today, and", "be the Northern Alliance’s arm today, and"),
("cliffs near the valley's end. They were", "cliffs near the valley’s end. They were"),
("close to the Alliance's border? That", "close to the Alliance’s border? That"),
("Don't you wonder what", "Don’t you wonder what"),
("east. I believe we've found", "east. I believe we’ve found"),
("eyes of the dwarves' history, a deep", "eyes of the dwarves’ history, a deep"),
("Fellow dwarves, hail! We're looking for a", "Fellow dwarves, hail! We’re looking for a"),
("hear it with one's own ears? It", "hear it with one’s own ears? It"),
("hills, from whom we've heard nothing since", "hills, from whom we’ve heard nothing since"),
("his victims. The 'dirtgrubbers' he seems to", "his victims. The ‘dirtgrubbers’ he seems to"),
("I thought Karrag's fight was against", "I thought Karrag’s fight was against"),
("I was told I'd find Aiglondur of", "I was told I’d find Aiglondur of"),
("I'll grant that was", "I’ll grant that was"),
("I'm surprised myself that", "I’m surprised myself that"),
("In case you haven't noticed, there is", "In case you haven’t noticed, there is"),
("It's been...it's been years since I've been down here.", "It’s been... it’s been years since I’ve been down here."),
("Karrag's will, and his", "Karrag’s will, and his"),
("Let's take him with", "Let’s take him with"),
("Nae sign o' bandits I can", "Nae sign o’ bandits I can"),
("none such since Thursagan's day. And for", "none such since Thursagan’s day. And for"),
("now and we wouldn't know it", "now and we wouldn’t know it"),
("of the Northern Alliance's lands.", "of the Northern Alliance’s lands."),
("of the pass. They've an opening", "of the pass. They’ve an opening"),
("of this shack. I've had nothing to", "of this shack. I’ve had nothing to"),
("on the dais, they're all", "on the dais, they’re all"),
("presence of Master Perrin's journeymen gives you", "presence of Master Perrin’s journeymen gives you"),
("see to my troop's care. Food will", "see to my troop’s care. Food will"),
("something doesn't feel right here.", "something doesn’t feel right here."),
("The Alliance's road ended at", "The Alliance’s road ended at"),
("the Alliance's tribes if we", "the Alliance’s tribes if we"),
("the Kal Karthans we'll have to", "the Kal Karthans we’ll have to"),
("the rank, and ye've shown the wits", "the rank, and ye’ve shown the wits"),
("the so called 'dirtgrubbers'. Why would", "the so called ‘dirtgrubbers’. Why would"),
("their journey. I'll return with your", "their journey. I’ll return with your"),
("to it. It won't do to have", "to it. It won’t do to have"),
("to the east; we've had few reports", "to the east; we’ve had few reports"),
("to the power o' the Hammer —", "to the power o’ the Hammer —"),
("us that the Hammer's power will smash", "us that the Hammer’s power will smash"),
("users from Master Perrin's academy in your", "users from Master Perrin’s academy in your"),
("was in my grandsire's time.", "was in my grandsire’s time."),
("we will regret Perrin's passing. There is", "we will regret Perrin’s passing. There is"),
("We're grateful for your assistance. And I'd like to send", "We’re grateful for your assistance. And I’d like to send"),
("We're snowed in. Our", "We’re snowed in. Our"),
("What's this? Large orcish", "What’s this? Large orcish"),
("Ye're young and not", "Ye’re young and not"),
# Fixed capitalization
("At The East Gate", "At the East Gate"),
("Reclaiming The Past", "Reclaiming the Past"),
),
"wesnoth-trow" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("Nay! Off with your hea- - -", "Nay! Off with your hea—"),
("is time - but I'll", "is time — but I’ll"),
("SE - The River Road.", "SE — The River Road."),
("SW - The", "SW — The"),
("SW - Southbay.", "SW — Southbay."),
("Hold - I see", "Hold — I see"),
("The River Road -", "The River Road —"),
("I'm a tinkin- -", "I’m a tinkin—"),
("NW - Southbay", "NW — Southbay"),
# Also, "Like" -> "like"
("More Like NW - Every orc", "More like NW — Every orc"),
("Rarlg - argh", "Rarlg — argh"),
("Sewer - Danger", "Sewer — Danger"),
("Legend has it- -", "Legend has it—"),
("your fate you- -", "your fate you—"),
("Compan- - What?", "Compan— What?"),
("your services again- -", "your services again—"),
("people - to ally", "people — to ally"),
("meet your son -", "meet your son —"),
("- As we agreed.", "— As we agreed."),
("and your people -", "and your people —"),
("their aid at all -", "their aid at all —"),
("me - I'll be dead", "me — I’ll be dead"),
("to say it - but", "to say it — but"),
("is ridiculous! I -", "is ridiculous! I —"),
("all I need - prepare", "all I need — prepare"),
("much -- from both", "much — from both"),
("a Lord -- to join", "a Lord — to join"),
("best of times - so", "best of times — so"),
# Convert another makeshift dash
# conversion added in 1.9.0+svn
("I- I die?", "I— I die?"),
("We come in peace. May our peoples-", "We come in peace. May our peoples—"),
("By the right of treaty, ancient but still true-", "By the right of treaty, ancient but still true—"),
("That tree, it speaks! No- No- We are tree-friends, really!", "That tree, it speaks! No— No— We are tree-friends, really!"),
# Fix screw up
# conversion added in 1.9.0-svn
("is time — but I'll", "is time — but I’ll"),
("I'm a tinkin—", "I’m a tinkin—"),
("me — I'll be dead", "me — I’ll be dead"),
("'Bring these Wesfolk too?", "“Bring these Wesfolk too?"),
("a century ago. I've never found my", "a century ago. I’ve never found my"),
("a fight, sir. They'll stand to the", "a fight, sir. They’ll stand to the"),
("a terrible price. Let's put Commander Aethyr", "a terrible price. Let’s put Commander Aethyr"),
("After a week's steady sailing eastward,", "After a week’s steady sailing eastward,"),
("against my people! Shek'kahan,", "against my people! Shek’kahan,"),
("Ahh, it's great to be home! I'm not much for", "Ahh, it’s great to be home! I’m not much for"),
("always think they're so clever!", "always think they’re so clever!"),
("amusement. Oh, and what's this?", "amusement. Oh, and what’s this?"),
("And now, I'm as much a", "And now, I’m as much a"),
("And why east? We've", "And why east? We’ve"),
("are, but you can't continue down this", "are, but you can’t continue down this"),
("aren't you setting us", "aren’t you setting us"),
("arrives on the elves' cursed Isle of", "arrives on the elves’ cursed Isle of"),
("arriving, and we haven't secured the frontier! We're dead!", "arriving, and we haven’t secured the frontier! We’re dead!"),
("at least we don't have to contend", "at least we don’t have to contend"),
("at the moment. I'm", "at the moment. I’m"),
("axe. Heh, heh. It's all I need", "axe. Heh, heh. It’s all I need"),
("Bah! I'm not dying today!", "Bah! I’m not dying today!"),
("be rich, and I'm not the first", "be rich, and I’m not the first"),
("bearing news. Some 'old friends’ have decided", "bearing news. Some ‘old friends’ have decided"),
("being invaded? Okay, we'll sort out our", "being invaded? Okay, we’ll sort out our"),
("Besides, I'd rather enjoy meeting", "Besides, I’d rather enjoy meeting"),
("best if you didn't mention our little", "best if you didn’t mention our little"),
("big bosses said we'd be in da", "big bosses said we’d be in da"),
("blocked. I guess we'll have to", "blocked. I guess we’ll have to"),
("blocked... I guess we'll have", "blocked... I guess we’ll have"),
("building a true 'Kingdom of Wesnoth'. I..I think a", "building a true ‘Kingdom of Wesnoth’. I..I think a"),
("but the elves aren't accustomed to war.", "but the elves aren’t accustomed to war."),
("but who knows what's going on", "but who knows what’s going on"),
("can not pass. You've already stumbled on", "can not pass. You’ve already stumbled on"),
("careful, these islands aren't safe. We're in Naga territory.", "careful, these islands aren’t safe. We’re in Naga territory."),
("Caror's Book of Fire", "Caror’s Book of Fire"),
("chatty! Get them, it's a long boat", "chatty! Get them, it’s a long boat"),
("comes behind us. You'd be slaves, or you'd be dead.", "comes behind us. You’d be slaves, or you’d be dead."),
("compromise with the Dwarves.'", "compromise with the Dwarves.”"),
("conflict is over I'll have to suffer", "conflict is over I’ll have to suffer"),
("crossing humans! Ahh, I've been with you", "crossing humans! Ahh, I’ve been with you"),
("days. I guess that's it for", "days. I guess that’s it for"),
("death'?", "death’?"),
("Don't forget about some", "Don’t forget about some"),
("down here. This isn't an old maid's drain pipe, this", "down here. This isn’t an old maid’s drain pipe, this"),
("earned both. We can't make this work", "earned both. We can’t make this work"),
("elves. But that won't", "elves. But that won’t"),
("emerged from the Lich-Lord's tomb, with the", "emerged from the Lich-Lord’s tomb, with the"),
("Enough of this, let's go get some", "Enough of this, let’s go get some"),
("far south. I don't know what's going on here. I'll stick", "far south. I don’t know what’s going on here. I’ll stick"),
("far. Who'd have thought, a", "far. Who’d have thought, a"),
("fighting. It's not stopping. We're trapped.", "fighting. It’s not stopping. We’re trapped."),
("Fire so we wouldn't have to bear", "Fire so we wouldn’t have to bear"),
("fleet is starving, we'll never pacify this", "fleet is starving, we’ll never pacify this"),
("fleet' has departed for", "fleet’ has departed for"),
("Fool of a boy's just gonna burn", "Fool of a boy’s just gonna burn"),
("for a while. It'll make a good", "for a while. It’ll make a good"),
("Free! I'm free at last!", "Free! I’m free at last!"),
("from me there'll be nothing but", "from me there’ll be nothing but"),
("go on ahead, I'll catch", "go on ahead, I’ll catch"),
("Haldric! I won't do it... I won't help. What is", "Haldric! I won’t do it... I won’t help. What is"),
("Haldric, it's bad. The elves", "Haldric, it’s bad. The elves"),
("Haldric, that's devious. I kind", "Haldric, that’s devious. I kind"),
("has translated Lich-Lord Caror's Book of Fire", "has translated Lich-Lord Caror’s Book of Fire"),
("hasn't been done in", "hasn’t been done in"),
("have been the Isle's largest city save", "have been the Isle’s largest city save"),
("have no idea what's going on outside", "have no idea what’s going on outside"),
("He's a large part", "He’s a large part"),
("here on purpose. You'd all still be", "here on purpose. You’d all still be"),
("hills, but I can't promise that my", "hills, but I can’t promise that my"),
("his merry men! We'll teach you a", "his merry men! We’ll teach you a"),
("Hmm. 'Wesnoth'. I like it. Yes, 'Wesnoth' would be fine.", "Hmm. ‘Wesnoth’. I like it. Yes, ‘Wesnoth’ would be fine."),
("Hmmmm- my kind were", "Hmmmm— my kind were"),
("homes! Heh...Tan-Vragar, I 'm", "homes! Heh... Tan-Vragar, I’m"),
("huts if it wasn't for my people.", "huts if it wasn’t for my people."),
("I can smell 'em!", "I can smell ’em!"),
("I decree a 'Pact of Mutual Aid' between our peoples.", "I decree a ‘Pact of Mutual Aid’ between our peoples."),
("I don't know who you", "I don’t know who you"),
("I go! I won't be denied!", "I go! I won’t be denied!"),
("I suspect you won't have to worry", "I suspect you won’t have to worry"),
("I were paranoid I'd say", "I were paranoid I’d say"),
("I wonder what they'll do when it", "I wonder what they’ll do when it"),
("I'd drop the sentiment", "I’d drop the sentiment"),
("I'll be dead. I mean we can't just say that", "I’ll be dead. I mean we can’t just say that"),
("I'll be glad when", "I’ll be glad when"),
("I'll begin my translation.", "I’ll begin my translation."),
("I'll return to the", "I’ll return to the"),
("I'm a dwarf, you", "I’m a dwarf, you"),
("I'm back, things went", "I’m back, things went"),
("I'm glad that's over! We have", "I’m glad that’s over! We have"),
("I'm glad that's over. The elves certainly aren't taking it easy on us. It's", "I’m glad that’s over. The elves certainly aren’t taking it easy on us. It’s"),
("I'm going to face", "I’m going to face"),
("I'm Haldric, my father", "I’m Haldric, my father"),
("I'm Haldric; my father", "I’m Haldric; my father"),
("I'm Prince Haldric, we", "I’m Prince Haldric, we"),
("I'm sure they'll eventually evacuate. Just", "I’m sure they’ll eventually evacuate. Just"),
("if Prince Haldric isn't the one to", "if Prince Haldric isn’t the one to"),
("in my pack. Let's work out what", "in my pack. Let’s work out what"),
("in other lands. It's a big world", "in other lands. It’s a big world"),
("In truth, I don't know if the", "In truth, I don’t know if the"),
("is no way I'm letting you go", "is no way I’m letting you go"),
("Is that snow?... We're doomed!", "Is that snow?... We’re doomed!"),
("is the Elder Prince's island all right.", "is the Elder Prince’s island all right."),
("is the last I'll see of my", "is the last I’ll see of my"),
("It isn't long before Haldric", "It isn’t long before Haldric"),
("it looks like they've rearmed!", "it looks like they’ve rearmed!"),
("it to Southbay. I'll", "it to Southbay. I’ll"),
("It wasn't long after he", "It wasn’t long after he"),
("It would be...highly irregular.' Bah! I'll show", "It would be... highly irregular.” Bah! I’ll show"),
("it — but that's not the", "it — but that’s not the"),
("It's better than the swamp. I'm under 5 feet", "It’s better than the swamp. I’m under 5 feet"),
("It's Commander Aethyr, of", "It’s Commander Aethyr, of"),
("It's funny that the lich-lord didn't have this on", "It’s funny that the lich-lord didn’t have this on"),
("It's not very Heroic", "It’s not very Heroic"),
("It's the Haldric! Jevyan,", "It’s the Haldric! Jevyan,"),
("Jessene, if you won't accept crown or", "Jessene, if you won’t accept crown or"),
("Jevyan himself. We can't have an illiterate", "Jevyan himself. We can’t have an illiterate"),
("Jevyan- I spit upon", "Jevyan— I spit upon"),
("just outside of Southbay-", "just outside of Southbay—"),
("kingdom be called 'Wesnoth',", "kingdom be called ‘Wesnoth’,"),
("leads to the Isle's second largest city,", "leads to the Isle’s second largest city,"),
("left for me. I'll stay. By the", "left for me. I’ll stay. By the"),
("lich is free! Let's bash him and", "lich is free! Let’s bash him and"),
("Lord Aryad said, 'If", "Lord Aryad said, “If"),
("Lords will follow. It's because of that", "Lords will follow. It’s because of that"),
("love, my family, I'll be there", "love, my family, I’ll be there"),
("mangled corpse. Then, you're going to destroy", "mangled corpse. Then, you’re going to destroy"),
("matters little now. What's this? You bring", "matters little now. What’s this? You bring"),
("maybe I won't even come to", "maybe I won’t even come to"),
("me. I'm no wizard. All", "me. I’m no wizard. All"),
("meet again, I won't be so", "meet again, I won’t be so"),
("more isolated villages, we'll need the gold!", "more isolated villages, we’ll need the gold!"),
("More of the trolls' kin are arriving", "More of the trolls’ kin are arriving"),
("my power. What! What's this? A treaty.", "my power. What! What’s this? A treaty."),
("my wrath, Jevyan, I'm coming for you!", "my wrath, Jevyan, I’m coming for you!"),
("my young prince. You've done", "my young prince. You’ve done"),
("nice thing is, they'll go back to", "nice thing is, they’ll go back to"),
("No! I don't believe it. Only", "No! I don’t believe it. Only"),
("No, Sir! I'm the sole survivor", "No, Sir! I’m the sole survivor"),
("of orcs. Apparently they're bogged down in", "of orcs. Apparently they’re bogged down in"),
("of the dragon I'm sure, who are", "of the dragon I’m sure, who are"),
("of these tree-folk we'll be trapped in", "of these tree-folk we’ll be trapped in"),
("Okay. Let's un-petrify that lich", "Okay. Let’s un-petrify that lich"),
("on convincing Jevyan, it's the only way", "on convincing Jevyan, it’s the only way"),
("on the Green Isle- we will return", "on the Green Isle— we will return"),
("orc thing, it wasn't such a bad", "orc thing, it wasn’t such a bad"),
("orcish scum, I can't bring myself to", "orcish scum, I can’t bring myself to"),
("Orcs! It's been a long", "Orcs! It’s been a long"),
("orcs. The ships aren't ready yet. We", "orcs. The ships aren’t ready yet. We"),
("our dear old 'tree-friend' might have known", "our dear old ‘tree-friend’ might have known"),
("our Naga friendsss- You’ll pay! We", "our Naga friendsss— You’ll pay! We"),
("out of time... We'll be trapped in", "out of time... We’ll be trapped in"),
("outside of Southbay, we'll be trapped. Of", "outside of Southbay, we’ll be trapped. Of"),
("person. Since I don't", "person. Since I don’t"),
("pile of dust, let's get out of", "pile of dust, let’s get out of"),
("point. Jessene, you're going to forge", "point. Jessene, you’re going to forge"),
("points and fishhooks. It's difficult to be", "points and fishhooks. It’s difficult to be"),
("power are true he's going to", "power are true he’s going to"),
("Prince Haldric, I'll concentrate on holding", "Prince Haldric, I’ll concentrate on holding"),
("Push 'em into the sea! They're in our winter", "Push ’em into the sea! They’re in our winter"),
("quickly-", "quickly—"),
("refugees, you'll be slaughtered.", "refugees, you’ll be slaughtered."),
("road follows the Isle's greatest river to", "road follows the Isle’s greatest river to"),
("rushing to a fool's death? Who'll lead these people?", "rushing to a fool’s death? Who’ll lead these people?"),
("said they would 'deal with the survivors'.", "said they would ‘deal with the survivors’."),
("seem rather intelligent. They're certainly belligerent and", "seem rather intelligent. They’re certainly belligerent and"),
("She's clever! Maybe I", "She’s clever! Maybe I"),
("ships rescued me. I'll not be", "ships rescued me. I’ll not be"),
("should 'deal' with the survivors,", "should ‘deal’ with the survivors,"),
("so are you. Aren't you the king", "so are you. Aren’t you the king"),
("so long I don't imagine he's too happy.", "so long I don’t imagine he’s too happy."),
("soldier there. The city's sewer", "soldier there. The city’s sewer"),
("Soldier, aren't you on the", "Soldier, aren’t you on the"),
("something! I think I'll save it for", "something! I think I’ll save it for"),
("soo-", "soo—"),
("Southbay won't fall without one", "Southbay won’t fall without one"),
("Southbay. We clearly can't go that way.", "Southbay. We clearly can’t go that way."),
("Southbay. We've been busy evacuating", "Southbay. We’ve been busy evacuating"),
("Southbay. You clearly can't go that way.", "Southbay. You clearly can’t go that way."),
("SWAMP! I'm under 5 feet", "<i>Swamp</i>! I’m under 5 feet"), # New <i> tags
("tall, and I don't float! Argh, have", "tall, and I don’t float! Argh, have"),
("tall, and I don't float!", "tall, and I don’t float!"),
("than any army I've ever seen just", "than any army I’ve ever seen just"),
("that bat. He won't let such a", "that bat. He won’t let such a"),
("that the elves 'have' the Ruby of", "that the elves ‘have’ the Ruby of"),
("that the elves wouldn't have sent us", "that the elves wouldn’t have sent us"),
("that the your 'third of a", "that the your ‘third of a"),
("that we've failed. We're trapped.", "that we’ve failed. We’re trapped."),
("The elves don't have the Ruby", "The elves don’t have the Ruby"),
("the Lich-Lords' dark pact with", "the Lich-Lords’ dark pact with"),
("the orcs haven't caught up with", "the orcs haven’t caught up with"),
("The orcs that didn't escape in the", "The orcs that didn’t escape in the"),
("the politics, but it's great to", "the politics, but it’s great to"),
("themselves before the Ka'lian, or the", "themselves before the Ka’lian, or the"),
("then we should 'deal' with the survivors,", "then we should ‘deal’ with the survivors,"),
("there. Failing that, there's always", "there. Failing that, there’s always"),
("they haven't met the main", "they haven’t met the main"),
("They look like they've been damaged by", "They look like they’ve been damaged by"),
("they won't be screaming at", "they won’t be screaming at"),
("they'll go looking for", "they’ll go looking for"),
("They're already holding the", "They’re already holding the"),
("They're Wesfolk, reavers and", "They’re Wesfolk, reavers and"),
("They've undoubtedly spotted the", "They’ve undoubtedly spotted the"),
("This isn't so bad. Haldric’s", "This isn’t so bad. Haldric’s"),
("this thing does, I'll just put it", "this thing does, I’ll just put it"),
("to dust. If you're", "to dust. If you’re"),
("to their aid. We'll have to weigh", "to their aid. We’ll have to weigh"),
("tongue. It means 'People of the West'. The", "tongue. It means ‘People of the West’. The"),
("trolls? I think it's", "trolls? I think it’s"),
("Trust me, you can't continue down this", "Trust me, you can’t continue down this"),
("Umm, we'll talk about it later. I'm just a little", "Umm, we’ll talk about it later. I’m just a little"),
("until the end. You've been a good", "until the end. You’ve been a good"),
("up sees that there's still", "up sees that there’s still"),
("war, but you're the real monster!", "war, but you’re the real monster!"),
("We can't all stay here", "We can’t all stay here"),
("We don't need some magic", "We don’t need some magic"),
("we fail here they'll doubtless be slaughtered.", "we fail here they’ll doubtless be slaughtered."),
("we groom the forest.'", "we groom the forest.”"),
("we prevailed. Now, I'm having a problem.", "we prevailed. Now, I’m having a problem."),
("We retrieved the Lich-Lord's book, and I", "We retrieved the Lich-Lord’s book, and I"),
("We'll have to be", "We’ll have to be"),
("We'll have to go", "We’ll have to go"),
("We're going to convince", "We’re going to convince"),
("We're only holding the", "We’re only holding the"),
("We're trapped in the", "We’re trapped in the"),
("We've defeated Jevyan's forces. The ships", "We’ve defeated Jevyan’s forces. The ships"),
("We've defeated their vanguard.", "We’ve defeated their vanguard."),
("We've escaped from the", "We’ve escaped from the"),
("We've escaped the pass.", "We’ve escaped the pass."),
("Well, I certainly don't mind the thought", "Well, I certainly don’t mind the thought"),
("well, if you can't be deterred. Remember,", "well, if you can’t be deterred. Remember,"),
("Well, we can't go back, and", "Well, we can’t go back, and"),
("What is your kind's infatuation with rushing to an 'honorable", "What is your kind’s infatuation with rushing to an ‘honorable"),
("What's that! No! The", "What’s that! No! The"),
("where it is! We've paid our friends", "where it is! We’ve paid our friends"),
("Who knows, maybe they'll be of some", "Who knows, maybe they’ll be of some"),
("winter stores. They've gone to Southbay,", "winter stores. They’ve gone to Southbay,"),
("with the elves. It's not like you", "with the elves. It’s not like you"),
("Without the Commander's noble sacrifice we", "Without the Commander’s noble sacrifice we"),
("would be 'Wes Noth'. So I suggest", "would be ‘Wes Noth’. So I suggest"),
("would sing the 'The Breaking of the Pact', an epic", "would sing the ‘The Breaking of the Pact’, an epic"),
("You can’t defeat me- Hmm, what's this? You've learned to", "You can’t defeat me— Hmm, what’s this? You’ve learned to"),
("you know what you're doing.", "you know what you’re doing."),
("You monster! You've betrayed our people", "You monster! You’ve betrayed our people"),
("You namby-pamby elves: 'We don't cut the trees,", "You namby-pamby elves: “We don’t cut the trees,"),
("You said we wouldn't see any action", "You said we wouldn’t see any action"),
("You should go 'greet' them.", "You should go ‘greet’ them."),
("you'll be joining him", "you’ll be joining him"),
("You're a fool, Jevyan.", "You’re a fool, Jevyan."),
("You're showing initiative, son! I'm proud of you!", "You’re showing initiative, son! I’m proud of you!"),
("You've been on this", "You’ve been on this"),
("You've denied me honorable", "You’ve denied me honorable"),
("your information? I don't even know your", "your information? I don’t even know your"),
("your people...and I don't think", "your people... and I don’t think"),
("your son's navigator and companion.", "your son’s navigator and companion."),
# Fix another screw up
("monolith was erected by me, ——", "monolith was erected by me, ―"),
# Eliminated a Britishism
("a fight in their favour", "a fight in their favor")
),
"wesnoth-tsg" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("My lord! The dawn is breaking - now is the time for us to attack and drive ", "My lord! The dawn is breaking — now is the time for us to attack and drive "),
("reinforcements - Did Sir Loris send you from Westin?", "reinforcements — Did Sir Loris send you from Westin?"),
("We will do our best to hold the city - you lead your men across the river ", "We will do our best to hold the city — you lead your men across the river "),
("Westin has fallen! This is a problem too great for me to handle - I must ", "Westin has fallen! This is a problem too great for me to handle — I must "),
("I have lost two brothers to Deoran - I shall not allow him to reach the ", "I have lost two brothers to Deoran — I shall not allow him to reach the "),
("My mount will not help me in these rocky paths - I will leave him here at ", "My mount will not help me in these rocky paths — I will leave him here at "),
("Also, soldiers with the -quick- trait will be useful in the dark.", "Also, soldiers with the — quick — trait will be useful in the dark."),
("We have escaped the great forest, but look! The undead are chasing us - we ", "We have escaped the great forest, but look! The undead are chasing us — we "),
("I too will make my stand here - I owe you my life and must atone for my ", "I too will make my stand here — I owe you my life and must atone for my "),
("Minister Hylas, ride with me to Westin - you must summon the Council of ", "Minister Hylas, ride with me to Westin — you must summon the Council of "),
("Far from home I fall - but not in vain! Guard our people, Deoran!", "Far from home I fall — but not in vain! Guard our people, Deoran!"),
("when they have an ally directly behind the unit they are attacking - they'll ", "when they have an ally directly behind the unit they are attacking — they’ll "),
("magic. They are slow and weak - any of your units is more than a match for ", "magic. They are slow and weak — any of your units is more than a match for "),
("Finally Ethiliel told the men to halt and pitch camp - they had reached the ", "Finally Ethiliel told the men to halt and pitch camp — they had reached the "),
# Fix screw up
# conversion added in 1.9.0-svn
("when they have an ally directly behind the unit they are attacking — they'll ", "when they have an ally directly behind the unit they are attacking — they’ll "),
# Convert apostrophes and quotation marks
# conversion added in 1.9.0-svn
("I've seen your", "I’ve seen your"),
("We'll surely all", "We’ll surely all"),
("Ithelden's", "Ithelden’s"),
("We won't be able", "We won’t be able"),
("Mal M'Brin", "Mal M’Brin"),
("Brin's", "Brin’s"),
("Gerrick's", "Gerrick’s"),
("men's", "men’s"),
("I prepare the city's defenses", "I prepare the city’s defenses"),
("works if it's the last", "works if it’s the last"),
("I've broken through", "I’ve broken through"),
("I'm lighting it", "I’m lighting it"),
("lich's", "lich’s"),
("we won't kill you", "we won’t kill you"),
("shouldn't", "shouldn’t"),
("I wouldn't have", "I wouldn’t have"),
("I've seen many", "I’ve seen many"),
("you've earned", "you’ve earned"),
("We're almost there", "We’re almost there"),
("I don't think they'll", "I don’t think they’ll"),
("Mebrin's", "Mebrin’s"),
("I'll kill you", "I’ll kill you"),
("they don't look very friendly", "they don’t look very friendly"),
("She won't listen", "She won’t listen"),
("I didn't have a choice", "I didn’t have a choice"),
("if we hadn't fallen", "if we hadn’t fallen"),
("I'm afraid it is not", "I’m afraid it is not"),
("it'd", "it’d"),
("undead aren't so tough", "undead aren’t so tough"),
("You're alive", "You’re alive"),
("You're too late", "You’re too late"),
("Mal A'kai", "Mal A’kai"),
("I hope that's the last", "I hope that’s the last"),
("Hylas's", "Hylas’s"),
("We're too late", "We’re too late"),
("They'll feel the deadly", "They’ll feel the deadly"),
("It's me", "It’s me"),
("they're in trouble", "they’re in trouble"),
("Mathin's", "Mathin’s"),
("While you're in your camp", "While you’re in your camp"),
("citadel's", "citadel’s"),
("'my kind'?", "“my kind”?"),
("I've seen your", "I’ve seen your"),
("Deoran's", "Deoran’s"),
("the capital of Wesnoth's", "the capital of Wesnoth’s"),
("Haldiel's", "Haldiel’s"),
("'Stop! Enough blood has", "“Stop! Enough blood has"),
("your service also was worthy.'", "your service also was worthy.”"),
("'chaotic', which means that", "<i>chaotic</i>, which means that"), # new <i> tags
("'Civilian' difficulty level is", "‘Civilian’ difficulty level is"),
("and I'll defend them to", "and I’ll defend them to"),
("Named 'Eyestalks' for obvious reasons,", "Named ‘Eyestalks’ for obvious reasons,"),
("your units are 'lawful'. This means that", "your units are <i>lawful</i>. This means that"), # new <i> tags
# Fix capitalization
("Return To Kerlath", "Return to Kerlath"),
("Into The Depths", "Into the Depths"),
("Proven By The Sword", "Proven by the Sword"),
),
"wesnoth-sof" : (
# Typo fixes required at r44124.
("going to back to the mines", "going back to the mines"),
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("So now I tell from whence it came -", "So now I tell from whence it came —"),
("The Fire-sceptre great -", "The Fire-sceptre great —"),
("Uh, no, wait! Lets talk business - how much will you pay us to do this for ", "Uh, no, wait! Lets talk business — how much will you pay us to do this for "),
("Fine then - ten thousand... now, what exactly do you want us to make the ", "Fine then — ten thousand... now, what exactly do you want us to make the "),
("Hey! You can't do that - this road is an elvish road. We're not letting you ", "Hey! You can’t do that — this road is an elvish road. We’re not letting you "),
("Ah, I see - you are one of the rebels. Our treaty with the elves does not ", "Ah, I see — you are one of the rebels. Our treaty with the elves does not "),
("We will see about that - if you ever get underground, which I doubt. Ha!", "We will see about that — if you ever get underground, which I doubt. Ha!"),
("Here - I'll go warn the council. You stay here and fight.", "Here — I’ll go warn the council. You stay here and fight."),
("dwarf-made stonecraft. We refuse - now let's close these gates!", "dwarf-made stonecraft. We refuse — now let’s close these gates!"),
("Just watch. The gates wi' close very soon. Then the elves outside - and, ", "Just watch. The gates wi’ close very soon. Then the elves outside — and, "),
("unfortunately, our dwarves who are still out there - wi' become irrelevant.", "unfortunately, our dwarves who are still out there — wi’ become irrelevant."),
("for the runesmith named Thursagan - the sage of fire.", "for the runesmith named Thursagan — the sage of fire."),
("members of their party - one expected, and one not.", "members of their party — one expected, and one not."),
("In the treasury. And leave it there until you're ready to work with it - I ", "In the treasury. And leave it there until you’re ready to work with it — I "),
("the finest coal, and many of the finest jewels in the land - we'll need ", "the finest coal, and many of the finest jewels in the land — we’ll need "),
("to fight them; also be prepared to spend quite some time here - mining can ", "to fight them; also be prepared to spend quite some time here — mining can "),
("Yes, although we will have to hire the miners - they don't work for free. ", "Yes, although we will have to hire the miners — they don’t work for free. "),
("were others - many others. Thus I present Theganli, the jeweler. His role is ", "were others — many others. Thus I present Theganli, the jeweler. His role is "),
("were no exception. They were crafters of crafters - they made tools. The ", "were no exception. They were crafters of crafters — they made tools. The "),
("How about this - I help you defeat them, and you let me keep the ruby when ", "How about this — I help you defeat them, and you let me keep the ruby when "),
("Well, back to the battle - we are all in the caves, but there are still ", "Well, back to the battle — we are all in the caves, but there are still "),
("Well, back to the battle - we need everyone to get into the Shorbear caves. ", "Well, back to the battle — we need everyone to get into the Shorbear caves. "),
("If ye'll permit me to say so, sir, ye're wrong. We could - ", "If ye’ll permit me to say so, sir, ye’re wrong. We could — "),
("If you'll permit me to say so, sir, you're wrong. We could - ", "If you’ll permit me to say so, sir, you’re wrong. We could — "),
("went south - back to the Wesnoth border.", "went south — back to the Wesnoth border."),
("And thus Rugnur died - a glorious death, in the eyes of the dwarven sages. ", "And thus Rugnur died — a glorious death, in the eyes of the dwarven sages. "),
("And Krawg - well, Krawg followed him. I know not how. But Krawg made his way ", "And Krawg — well, Krawg followed him. I know not how. But Krawg made his way "),
("But before it was found, legends grew up around it, and around its makers - ", "But before it was found, legends grew up around it, and around its makers — "),
# Straight apostrophes and quotes to curly ones
# conversion added in 1.9.0-svn
# (NOT YET)
#("The land of Wesnoth's banner bold", "The land of Wesnoth’s banner bold"),
#("Made by a runesmith's hand.", "Made by a runesmith’s hand."),
# Fix screw up:
# conversion added in 1.9.0-svn
("Yes, although we will have to hire the miners v they don't work for free. ", "Yes, although we will have to hire the miners — they don’t work for free. "),
("Hey! You can't do that — this road is an elvish road. We're not letting you ", "Hey! You can’t do that — this road is an elvish road. We’re not letting you "),
("If you'll permit me to say so, sir, you're wrong. We could — ", "If you’ll permit me to say so, sir, you’re wrong. We could — "),
("If ye'll permit me to say so, sir, ye're wrong. We could — ", "If ye’ll permit me to say so, sir, ye’re wrong. We could — "),
("the finest coal, and many of the finest jewels in the land — we'll need ", "the finest coal, and many of the finest jewels in the land — we’ll need "),
("In the treasury. And leave it there until you're ready to work with it — I ", "In the treasury. And leave it there until you’re ready to work with it — I "),
("unfortunately, our dwarves who are still out there — wi' become irrelevant.", "unfortunately, our dwarves who are still out there — wi’ become irrelevant."),
("Just watch. The gates wi' close very soon. Then the elves outside — and, ", "Just watch. The gates wi’ close very soon. Then the elves outside — and, "),
("dwarf-made stonecraft. We refuse — now let's close these gates!", "dwarf-made stonecraft. We refuse — now let’s close these gates!"),
("Here — I'll go warn the council. You stay here and fight.", "Here — I’ll go warn the council. You stay here and fight."),
("'Ruby of Fire', and told us", "‘Ruby of Fire’, and told us"),
("a conclusion. He can't make the sceptre", "a conclusion. He can’t make the sceptre"),
("a deal, but I'm not sure. I'll have to ask", "a deal, but I’m not sure. I’ll have to ask"),
("Aha! I've spent years looking", "Aha! I’ve spent years looking"),
("And I'll be doing the", "And I’ll be doing the"),
("And if we don't like what you", "And if we don’t like what you"),
("apparently. I think we'll have to", "apparently. I think we’ll have to"),
("Argh! I'm a retired warrior...", "Argh! I’m a retired warrior..."),
("artifact, not a child's playtoy!", "artifact, not a child’s playtoy!"),
("away, eh? I dinna' like that, but it seems it's our only", "away, eh? I dinna’ like that, but it seems it’s our only"),
("Baglur's. Krawg's done more for", "Baglur’s. Krawg’s done more for"),
("can tell I don't need the money", "can tell I don’t need the money"),
("can't run as fast", "can’t run as fast"),
("Can't you tell a", "Can’t you tell a"),
("captured! If I can't", "captured! If I can’t"),
("castle here... I don't much like fighting.", "castle here... I don’t much like fighting."),
("caves to learn Rugnur's fate.", "caves to learn Rugnur’s fate."),
("caves, but I can't! What am I", "caves, but I can’t! What am I"),
("caves, so they can't escape when it", "caves, so they can’t escape when it"),
("city and report. I'm", "city and report. I’m"),
("constitutes treason, wouldn't you?", "constitutes treason, wouldn’t you?"),
("craftsmen, and he wouldn't have gone to", "craftsmen, and he wouldn’t have gone to"),
("Don't be a fool! We can't take the elves,", "Don’t be a fool! We can’t take the elves,"),
("don't stop me if", "don’t stop me if"),
("down, or we'll force you.", "down, or we’ll force you."),
("Durstorn's rash action led", "Durstorn’s rash action led"),
("Durstorn. And I wouldn't give the Sceptre", "Durstorn. And I wouldn’t give the Sceptre"),
("dwarves! Now, if you'll excuse me, I think I'll be", "dwarves! Now, if you’ll excuse me, I think I’ll be"),
("dwarves, but now I've found you!", "dwarves, but now I’ve found you!"),
("eastern mines. Me, I'll stay here. No", "eastern mines. Me, I’ll stay here. No"),
("enough! Or, if they're not 'good enough'", "enough! Or, if they’re not ‘good enough’"),
("even if you hadn't, you've seen enough combat.", "even if you hadn’t, you’ve seen enough combat."),
("Even the elves won't challenge us here.", "Even the elves won’t challenge us here."),
("evil things. We can't let it", "evil things. We can’t let it"),
("for those, though. I'll also need to", "for those, though. I’ll also need to"),
("for us, for they'll go as fast", "for us, for they’ll go as fast"),
("Getting this cut isn't worth that much; what with Rugnur's mistake", "Getting this cut isn’t worth that much; what with Rugnur’s mistake"),
("gold he needs. Shouldn't we leave now?", "gold he needs. Shouldn’t we leave now?"),
("Good idea, Baglur! He'll need some help,", "Good idea, Baglur! He’ll need some help,"),
("Good. Now, let's get out of", "Good. Now, let’s get out of"),
("Ha! There hasn't been a border", "Ha! There hasn’t been a border"),
("He doesn't need to come here, I'll take the jewel", "He doesn’t need to come here, I’ll take the jewel"),
("He doesn't want to talk", "He doesn’t want to talk"),
("his sceptre intact he'd best send troops", "his sceptre intact he’d best send troops"),
("I don't know if we", "I don’t know if we"),
("I don't know what is", "I don’t know what is"),
("I think you'd better let me", "I think you’d better let me"),
("I'd rather face one", "I’d rather face one"),
("I'll see what I", "I’ll see what I"),
("I'm a dragoon, with Haldric II's personal bodyguard. I", "I’m a dragoon, with Haldric II’s personal bodyguard. I"),
("I'm Lord Durstorn, king", "I’m Lord Durstorn, king"),
("I'm not going to", "I’m not going to"),
("If he fails, it's on his head.", "If he fails, it’s on his head."),
("If that's what you're thinking, you're not fit to", "If that’s what you’re thinking, you’re not fit to"),
("if you did. It's not yours to", "if you did. It’s not yours to"),
("if you won't give the Sceptre", "if you won’t give the Sceptre"),
("in advance, and he'll give us five", "in advance, and he’ll give us five"),
("in any case, I've been with you", "in any case, I’ve been with you"),
("it looks like I'll ha' to come out", "it looks like I’ll ha’ to come out"),
("it seems... it can't be", "it seems... it can’t be"),
("it's not like we", "it’s not like we"),
("it, but they didn't offer", "it, but they didn’t offer"),
("it, eh? Well I'm not going", "it, eh? Well I’m not going"),
("its mine, and Rugnur's, and", "its mine, and Rugnur’s, and"),
("killed, but it isn't", "killed, but it isn’t"),
("Look, I've found something here.", "Look, I’ve found something here."),
("Made by a runesmith's hand.", "Made by a runesmith’s hand."),
("me, but I won't die with you!", "me, but I won’t die with you!"),
("miners — they don't work for free.", "miners — they don’t work for free."),
("occupied, the gates wi' close. Then the", "occupied, the gates wi’ close. Then the"),
("Och, well that's the difficulty. It's a magic gate.", "Och, well that’s the difficulty. It’s a magic gate."),
("of the trolls' territory.", "of the trolls’ territory."),
("offer. If you won't", "offer. If you won’t"),
("out, we'll make our way", "out, we’ll make our way"),
("payment, but it can't be helped. Our", "payment, but it can’t be helped. Our"),
("period of time. We'll only be", "period of time. We’ll only be"),
("pieces of silver. I'm going with you", "pieces of silver. I’m going with you"),
("property secure, the deal's off.", "property secure, the deal’s off."),
("ruby, then what? They'll probably kill us", "ruby, then what? They’ll probably kill us"),
("runes infuse the runesmith's blows with power", "runes infuse the runesmith’s blows with power"),
("so the lava doesn't kill us.", "so the lava doesn’t kill us."),
("something or I can't make the sceptre!", "something or I can’t make the sceptre!"),
("still don't trust you.", "still don’t trust you."),
("sure... well, it doesn't matter why.", "sure... well, it doesn’t matter why."),
("take their property. We'll have to buy,", "take their property. We’ll have to buy,"),
("than a thousand we'll", "than a thousand we’ll"),
("than life! So, I'm ordering", "than life! So, I’m ordering"),
("that was not Rugnur's.", "that was not Rugnur’s."),
("the Arkan-thoria. Trolls ha' overrun them, but", "the Arkan-thoria. Trolls ha’ overrun them, but"),
("the elves will no' be able to", "the elves will no’ be able to"),
("the far northlands. They're", "the far northlands. They’re"),
("The land of Wesnoth's banner bold", "The land of Wesnoth’s banner bold"),
("the last chance we'll get to do", "the last chance we’ll get to do"),
("the sceptre. Why don't we send him", "the sceptre. Why don’t we send him"),
("the stone's power, but contain", "the stone’s power, but contain"),
("the surface world. He's late!", "the surface world. He’s late!"),
("the Wesnothian army; I'd prefer to", "the Wesnothian army; I’d prefer to"),
("then go north. He's", "then go north. He’s"),
("Then maybe we'll walk out of", "Then maybe we’ll walk out of"),
("Then no deal. I'll gather my things", "Then no deal. I’ll gather my things"),
("these dwarves too? They're attacking my clan", "these dwarves too? They’re attacking my clan"),
("This plan, it isn't worse than staying", "This plan, it isn’t worse than staying"),
("those Surghan mercenaries. I'll bet those elves", "those Surghan mercenaries. I’ll bet those elves"),
("though, and we can't stay here forever.", "though, and we can’t stay here forever."),
("time! Haldric killed Shek'kahan my", "time! Haldric killed Shek’kahan my"),
("to do? I don't see how this", "to do? I don’t see how this"),
("to go. Alanin, you've served in the", "to go. Alanin, you’ve served in the"),
("to his shop. He'll need", "to his shop. He’ll need"),
("to plan the sceptre's design... what in", "to plan the sceptre’s design... what in"),
("to the elves, I'll take it from", "to the elves, I’ll take it from"),
("Uh, technically it's Haldric’s, and I", "Uh, technically it’s Haldric’s, and I"),
("up, Baglur! You don't know what you're talking about. I've analyzed the", "up, Baglur! You don’t know what you’re talking about. I’ve analyzed the"),
("up, Rugnur! You don't know what you're talking about. I've analyzed the", "up, Rugnur! You don’t know what you’re talking about. I’ve analyzed the"),
("Wait! Rugnur, dinna' they tell you", "Wait! Rugnur, dinna’ they tell you"),
("Wait, what's going on? The", "Wait, what’s going on? The"),
("We can't just go in", "We can’t just go in"),
("we wanted to. We're surrounded!", "we wanted to. We’re surrounded!"),
("Well, I think we've gotten a good", "Well, I think we’ve gotten a good"),
("Well, Thursagan, we've reached the eastern", "Well, Thursagan, we’ve reached the eastern"),
("Well, we've succeeded in stopping", "Well, we’ve succeeded in stopping"),
("Wesnothian army. Why don't you rejoin it?", "Wesnothian army. Why don’t you rejoin it?"),
("we’re here, we'll be able to", "we’re here, we’ll be able to"),
("whatever it is can't be worse than", "whatever it is can’t be worse than"),
("where I can't help!", "where I can’t help!"),
("Where's that fool Rugnur gotten to? It's time for him", "Where’s that fool Rugnur gotten to? It’s time for him"),
("with us. He's somewhere up here.", "with us. He’s somewhere up here."),
("would stop the stone's", "would stop the stone’s"),
("year. Oh well, what's done is done. We'll have to work", "year. Oh well, what’s done is done. We’ll have to work"),
("yes, but I don't know where it", "yes, but I don’t know where it"),
("Yes, of course, let's just all be", "Yes, of course, let’s just all be"),
("You don't dictate what I", "You don’t dictate what I"),
("You don't have authority over", "You don’t have authority over"),
("You don't want to make", "You don’t want to make"),
("You know yours aren't acceptable, but you", "You know yours aren’t acceptable, but you"),
("you missed. Now, I'd say that attacking", "you missed. Now, I’d say that attacking"),
("You're a king?! Must", "You’re a king?! Must"),
("You're not my ally. You're more against me", "You’re not my ally. You’re more against me"),
("you, else we'll be overrun by", "you, else we’ll be overrun by"),
# Convert apostrophe
("Don't forget to recruit the miners.", "Don’t forget to recruit the miners."),
# Usage fixes in 1.9.9
("Och, its some o’ them cave-dwarves.", "Och, it’s some o’ them cave-dwarves."),
("I guess you would to plan", "I guess you would want to plan"),
),
"wesnoth-sotbe" :(
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("the offer. However, on the way to the city of Dwarven Doors - the ", "the offer. However, on the way to the city of Dwarven Doors — the "),
("headquarters of the Northern Alliance - Karun and his small group of ", "headquarters of the Northern Alliance — Karun and his small group of "),
("The peace treaty still held however, partially because the treaty - although ", "The peace treaty still held however, partially because the treaty — although "),
("arranged by the Northern Alliance - was between the local earls and the ", "arranged by the Northern Alliance — was between the local earls and the "),
("Alliance. The other - and perhaps the main - reason why the treaty held was ", "Alliance. The other — and perhaps the main — reason why the treaty held was "),
("followed were relatively peaceful and prosperous - at least, for the humans.", "followed were relatively peaceful and prosperous — at least, for the humans."),
("In the thirteenth year of the Lord Protectorship of Howgarth III - the ", "In the thirteenth year of the Lord Protectorship of Howgarth III — the "),
("successor of Rahul I - tension began to rise between orcish tribes and human ", "successor of Rahul I — tension began to rise between orcish tribes and human "),
("on their lands. Then, Earl Lanbec'h - the most powerful human warlord of the ", "on their lands. Then, Earl Lanbec’h — the most powerful human warlord of the "),
("North - determined to abolish the orcish menace raised an army and conferred ", "North — determined to abolish the orcish menace raised an army and conferred "),
("Who is this unlicked whelp? Grunts - kill him and bring me his head!", "Who is this unlicked whelp? Grunts — kill him and bring me his head!"),
("Wise decision, Kapou'e. By the size of that army, this is no mere raid - it ", "Wise decision, Kapou’e. By the size of that army, this is no mere raid — it "),
("And this is that stupid human who dares march against Kapou'e - Son of the ", "And this is that stupid human who dares march against Kapou’e — Son of the "),
("Hey, Chief, I was thinking - dwarves are pretty slow; why don't we just ", "Hey, Chief, I was thinking — dwarves are pretty slow; why don’t we just "),
("As soon as the sandstorm died down, Kapou'e and his men - eager to escape ", "As soon as the sandstorm died down, Kapou’e and his men — eager to escape "),
("the searing heat - left the oasis and continued on their trek.", "the searing heat — left the oasis and continued on their trek."),
("and all the natural sounds of the forest died away - leaving everything ", "and all the natural sounds of the forest died away — leaving everything "),
("Leave your people here Kapou'e - they are safe for the moment - and go ", "Leave your people here Kapou’e — they are safe for the moment — and go "),
("friends - get them!", "friends — get them!"),
("After a fair amount of squabbling - for some of the older warlords were ", "After a fair amount of squabbling — for some of the older warlords were "),
("reluctant to let this young upstart lead them - and a few consequent ", "reluctant to let this young upstart lead them — and a few consequent "),
("Kapou'e placed one force under the leadership of Shan Taum the Smug, who - ", "Kapou’e placed one force under the leadership of Shan Taum the Smug, who — "),
("despite his obnoxious nature - was a fierce and capable leader. He was sent ", "despite his obnoxious nature — was a fierce and capable leader. He was sent "),
("He sent the shamans, Pirk, Gork and Vraurk back to Borstep - a city just ", "He sent the shamans, Pirk, Gork and Vraurk back to Borstep — a city just "),
("north of the Mourned Hills - to organize any remaining orcish forces as well ", "north of the Mourned Hills — to organize any remaining orcish forces as well "),
("Kapou'e himself - desiring to settle this business once and for all - led ", "Kapou’e himself — desiring to settle this business once and for all — led "),
("The orcs are making headway. The city must not fall - call the reserves!", "The orcs are making headway. The city must not fall — call the reserves!"),
("fall. A few days after the first snowfall a goblin rider - half dead from ", "fall. A few days after the first snowfall a goblin rider — half dead from "),
("exhaustion - raced into Dorset with the news that Borstep was under siege by ", "exhaustion — raced into Dorset with the news that Borstep was under siege by "),
("give him a good chance to clobber the orcs together again - the old ", "give him a good chance to clobber the orcs together again — the old "),
("Kapou'e's exploits had been widely circulated - from his rescue of the ", "Kapou’e’s exploits had been widely circulated — from his rescue of the "),
("horde was dispersed he appointed three more shamans - with the consent of ", "horde was dispersed he appointed three more shamans — with the consent of "),
("all - to make the Great Council complete again.", "all — to make the Great Council complete again."),
# Fix screw up
# conversion added in 1.9.0-svn
("Kapou'e's exploits had been widely circulated — from his rescue of the ", "Kapou’e’s exploits had been widely circulated — from his rescue of the "),
("Kapou'e himself — desiring to settle this business once and for all — led ", "Kapou’e himself — desiring to settle this business once and for all — led "),
("Kapou'e placed one force under the leadership of Shan Taum the Smug, who — ", "Kapou’e placed one force under the leadership of Shan Taum the Smug, who — "),
("Leave your people here Kapou'e — they are safe for the moment — and go ", "Leave your people here Kapou’e — they are safe for the moment — and go "),
("As soon as the sandstorm died down, Kapou'e and his men — eager to escape ", "As soon as the sandstorm died down, Kapou’e and his men — eager to escape "),
("Hey, Chief, I was thinking — dwarves are pretty slow; why don't we just ", "Hey, Chief, I was thinking — dwarves are pretty slow; why don’t we just "),
("And this is that stupid human who dares march against Kapou'e — Son of the ", "And this is that stupid human who dares march against Kapou’e — Son of the "),
("Wise decision, Kapou'e. By the size of that army, this is no mere raid — it ", "Wise decision, Kapou’e. By the size of that army, this is no mere raid — it "),
("on their lands. Then, Earl Lanbec'h — the most powerful human warlord of the ", "on their lands. Then, Earl Lanbec’h — the most powerful human warlord of the "),
("A human-worm's dog. Animal meat", "A human-worm’s dog. Animal meat"),
("a shock to Kapou'e to", "a shock to Kapou’e to"),
("after Kapou'e's arrival, riders caught", "after Kapou’e’s arrival, riders caught"),
("after this desert, we'll have to cross", "after this desert, we’ll have to cross"),
("Ahhh, it's 'the Prince' from the Silent", "Ahhh, it’s ‘the Prince’ from the Silent"),
("all the shamans 'suck up' to him just", "all the shamans ‘suck up’ to him just"),
("an invitation to Kapou'e to join", "an invitation to Kapou’e to join"),
("And if that isn't", "And if that isn’t"),
("and if we aren't", "and if we aren’t"),
("and occupying Dorset, Kapou'e and his men", "and occupying Dorset, Kapou’e and his men"),
("and your people won't be safe. You", "and your people won’t be safe. You"),
("apprehension and rage, Kapou'e sent his riders", "apprehension and rage, Kapou’e sent his riders"),
("as his son, it's up to you", "as his son, it’s up to you"),
("As Kapou'e expected, by the", "As Kapou’e expected, by the"),
("As Kapou'e saw the hardened", "As Kapou’e saw the hardened"),
("at Barag Gór, Kapou'e, accompanied by the", "at Barag Gór, Kapou’e, accompanied by the"),
("at last. We don't have much time,", "at last. We don’t have much time,"),
("automatically assume that Kapou'e tricked and killed", "automatically assume that Kapou’e tricked and killed"),
("back in the tribe's homeland.", "back in the tribe’s homeland."),
("battle, my grunts! We'll show these humans", "battle, my grunts! We’ll show these humans"),
("been led by Kapou'e this far had", "been led by Kapou’e this far had"),
("before. Flar'Tar was sent directly", "before. Flar’Tar was sent directly"),
("Black-Eye will never 'flee'. But making a 'strategic", "Black-Eye will never ‘flee’. But making a ‘strategic"),
("Black-Eye. If you hadn't arrived when you did, I don't", "Black-Eye. If you hadn’t arrived when you did, I don’t"),
("But, Father, we don't know what they", "But, Father, we don’t know what they"),
("came from or you'll find your head", "came from or you’ll find your head"),
("Catch up when you're done.", "Catch up when you’re done."),
("cave or something. We'll be through here", "cave or something. We’ll be through here"),
("Cease your lies, Lanbec'h. We all know", "Cease your lies, Lanbec’h. We all know"),
("Chief, Inarix hasn't yet made it", "Chief, Inarix hasn’t yet made it"),
("count on the assassins' guild to help", "count on the assassins’ guild to help"),
("crossed the mountains. We're close to the", "crossed the mountains. We’re close to the"),
("Don't be afraid. If", "Don’t be afraid. If"),
("Don't be so cowardly.", "Don’t be so cowardly."),
("Earl Lanbec'h, you slimy coward,", "Earl Lanbec’h, you slimy coward,"),
("Earl's forces and withdrew", "Earl’s forces and withdrew"),
("eastern Mourned Hills. Kapou'e himself made a", "eastern Mourned Hills. Kapou’e himself made a"),
("eighth year of Kapou'e's reign, and then", "eighth year of Kapou’e’s reign, and then"),
("Exasperated, Kapou'e launched an attack", "Exasperated, Kapou’e launched an attack"),
("Finally we've done it! Now let's wait out this", "Finally we’ve done it! Now let’s wait out this"),
("For his part Kapou'e", "For his part Kapou’e"),
("force if necessary. Al'Brock was sent west,", "force if necessary. Al’Brock was sent west,"),
("Forest. Unfortunately, I don't think", "Forest. Unfortunately, I don’t think"),
("formally bestowed upon Kapou'e.", "formally bestowed upon Kapou’e."),
("Fortunately, they didn't encounter any giant", "Fortunately, they didn’t encounter any giant"),
("found his generals, Al'Brock and Flar'Tar had already arrived", "found his generals, Al’Brock and Flar’Tar had already arrived"),
("friends! Those orcs don't imagine they are", "friends! Those orcs don’t imagine they are"),
("giant scorpion yet. I'm sure it would", "giant scorpion yet. I’m sure it would"),
("Good work, men, we've done it! Now we don't have to worry", "Good work, men, we’ve done it! Now we don’t have to worry"),
("green hills! We've reached the lands", "green hills! We’ve reached the lands"),
("had dealings with Kapou'e's", "had dealings with Kapou’e’s"),
("having been settled, Kapou'e began to inquire", "having been settled, Kapou’e began to inquire"),
("hell. Your beloved Kapou'e will be", "hell. Your beloved Kapou’e will be"),
("Hey Chief, it's pretty odd. Shan Taum doesn't seem to have", "Hey Chief, it’s pretty odd. Shan Taum doesn’t seem to have"),
("his word, once Lanbec'h had been slain,", "his word, once Lanbec’h had been slain,"),
("Hold, Earl Lanbec'h. I have reached", "Hold, Earl Lanbec’h. I have reached"),
("how to dissemble. I'll bet he sent", "how to dissemble. I’ll bet he sent"),
("Hurry-hurry, friends. Lanbec'h and his kind", "Hurry-hurry, friends. Lanbec’h and his kind"),
("I am Kapou'e, son of the", "I am Kapou’e, son of the"),
("I can't leave these renegades", "I can’t leave these renegades"),
("I don't know if you", "I don’t know if you"),
("I don't know. Orcs have", "I don’t know. Orcs have"),
("I don't really know, one", "I don’t really know, one"),
("I hope we won't find one! They", "I hope we won’t find one! They"),
("I wouldn't worry about the", "I wouldn’t worry about the"),
("I'm hungry and we", "I’m hungry and we"),
("I'm not that sure.", "I’m not that sure."),
("I've been told the", "I’ve been told the"),
("I've never pushed so", "I’ve never pushed so"),
("III demanded that Kapou'e give Dorset", "III demanded that Kapou’e give Dorset"),
("implications of the shamans' deaths begin to", "implications of the shamans’ deaths begin to"),
("in this fight. I'm sorry for the", "in this fight. I’m sorry for the"),
("is done then Kapou'e will give you", "is done then Kapou’e will give you"),
("is going well, let's surround them on", "is going well, let’s surround them on"),
("It's been a chilly", "It’s been a chilly"),
("It's dawn!", "It’s dawn!"),
("it's time to show", "it’s time to show"),
("its own weakness. Prestim's walls are", "its own weakness. Prestim’s walls are"),
("Kapou'e agreed to meet", "Kapou’e agreed to meet"),
("Kapou'e and his followers", "Kapou’e and his followers"),
("Kapou'e and the shamans", "Kapou’e and the shamans"),
("Kapou'e had cowed into", "Kapou’e had cowed into"),
("Kapou'e refused to do", "Kapou’e refused to do"),
("Kapou'e ruled for many", "Kapou’e ruled for many"),
("Kapou'e suspected some sort", "Kapou’e suspected some sort"),
("Kapou'e vowed that this", "Kapou’e vowed that this"),
("Kapou'e, both attacks were", "Kapou’e, both attacks were"),
("Kapou'e, but he decided", "Kapou’e, but he decided"),
("Kapou'e, if Prestim falls,", "Kapou’e, if Prestim falls,"),
("Kapou'e, it is essential", "Kapou’e, it is essential"),
("Kapou'e, rumors tell of", "Kapou’e, rumors tell of"),
("kid for me. I'll be in my", "kid for me. I’ll be in my"),
("killing the shamans, Kapou'e was requested to", "killing the shamans, Kapou’e was requested to"),
("Lanbec'h flatly refused to", "Lanbec’h flatly refused to"),
("Lanbec'h led the army", "Lanbec’h led the army"),
("Lanbec'h was no doubt", "Lanbec’h was no doubt"),
("Let's slay them and", "Let’s slay them and"),
("made his plans, Kapou'e set out for", "made his plans, Kapou’e set out for"),
("messenger brought Rahul Karun's reply.", "messenger brought Rahul Karun’s reply."),
("miles away. They won't be", "miles away. They won’t be"),
("mountain pass. Kapou'e began to recognize", "mountain pass. Kapou’e began to recognize"),
("my good graces. I'll", "my good graces. I’ll"),
("My son, you're old enough to", "My son, you’re old enough to"),
("no choice ... I've never tasted scorpions", "no choice ... I’ve never tasted scorpions"),
("No really, it's true. He is", "No really, it’s true. He is"),
("of the Black-Eye. I'm Jetto, master of", "of the Black-Eye. I’m Jetto, master of"),
("of turmoil, we don't", "of turmoil, we don’t"),
("Oh, so it's another group o' stinkin' orcs, eh. Begone,", "Oh, so it’s another group o’ stinkin’ orcs, eh. Begone,"),
("on us! They can't be more then", "on us! They can’t be more then"),
("on your victory, Kapou'e. You do honor", "on your victory, Kapou’e. You do honor"),
("orcish leader Kapou'e, the son of", "orcish leader Kapou’e, the son of"),
("orcish tribes and didn't have any direct", "orcish tribes and didn’t have any direct"),
("Orcs, bring me Kapou'e's head. I must", "Orcs, bring me Kapou’e’s head. I must"),
("out of the traitor's skull, Kapou'e gathered his men", "out of the traitor’s skull, Kapou’e gathered his men"),
("over three months, Kapou'e and his exhausted", "over three months, Kapou’e and his exhausted"),
("pathetic humans come, we'll deal with", "pathetic humans come, we’ll deal with"),
("pint-sized idiot, we aren't out to kill", "pint-sized idiot, we aren’t out to kill"),
("sight of Earl Lanbec'h's army.", "sight of Earl Lanbec’h’s army."),
("snows melted and Kapou'e readied his defenses.", "snows melted and Kapou’e readied his defenses."),
("So the 'great and mighty' elves are now", "So the ‘great and mighty’ elves are now"),
("that Grüü, you don't look so good.", "that Grüü, you don’t look so good."),
("that, if Earl Lanbec'h continued in", "that, if Earl Lanbec’h continued in"),
("that? I've not forgotten what", "that? I’ve not forgotten what"),
("That’s right! Now let's drive those mincing", "That’s right! Now let’s drive those mincing"),
("The hell I don't, Earl Lanbec'h, the Northern Alliance", "The hell I don’t, Earl Lanbec’h, the Northern Alliance"),
("the hour. If you're not ready by", "the hour. If you’re not ready by"),
("the human however, Kapou'e", "the human however, Kapou’e"),
("the humans, pfff. You're one to talk.", "the humans, pfff. You’re one to talk."),
("the land well. Flar'Tar and Al'Brock, both of you", "the land well. Flar’Tar and Al’Brock, both of you"),
("the leadership of Kapou'e, the", "the leadership of Kapou’e, the"),
("the Northern Alliance. Kapou'e declined. Instead he", "the Northern Alliance. Kapou’e declined. Instead he"),
("the route that Kapou'e had used to", "the route that Kapou’e had used to"),
("the shamans. It won't work", "the shamans. It won’t work"),
("the Silent Forest, Kapou'e and his party", "the Silent Forest, Kapou’e and his party"),
("then never. Now it's time to kill!", "then never. Now it’s time to kill!"),
("Then, Kapou'e launched his desperate", "Then, Kapou’e launched his desperate"),
("there, old chap. You're deemed too insignificant", "there, old chap. You’re deemed too insignificant"),
("Throughout this nightmare Kapou'e drove himself and", "Throughout this nightmare Kapou’e drove himself and"),
("to a minimum, Kapou'e also", "to a minimum, Kapou’e also"),
("to counterattack. We can't defeat them by", "to counterattack. We can’t defeat them by"),
("to deal with. He's left that pleasure", "to deal with. He’s left that pleasure"),
("to him. Delighted, Kapou'e sent detachments off", "to him. Delighted, Kapou’e sent detachments off"),
("to know why you're here,", "to know why you’re here,"),
("to our one. We'll just have", "to our one. We’ll just have"),
("to steal from Kapou'e, Son of the", "to steal from Kapou’e, Son of the"),
("told us that that's where the shamans", "told us that that’s where the shamans"),
("troll, so you don't know. These lands", "troll, so you don’t know. These lands"),
("us if we don't have to worry", "us if we don’t have to worry"),
("wasn't joking about making", "wasn’t joking about making"),
("we destroy Earl Lanbec'h. Smite, stab, slay!", "we destroy Earl Lanbec’h. Smite, stab, slay!"),
("we meet any, we'll cut them to", "we meet any, we’ll cut them to"),
("we'll discuss it.", "we’ll discuss it."),
("We'll head for Barag", "We’ll head for Barag"),
("We've received messengers from", "We’ve received messengers from"),
("We've succeeded! We've passed through the", "We’ve succeeded! We’ve passed through the"),
("well known than Kapou'e.", "well known than Kapou’e."),
("Well let's see, first he", "Well let’s see, first he"),
("which Earl Lanbec'h had retreated.", "which Earl Lanbec’h had retreated."),
("Who is that? Kapou'e! What are you", "Who is that? Kapou’e! What are you"),
("who weren't killed in combat", "who weren’t killed in combat"),
("withdrawal' is another matter.", "withdrawal’ is another matter."),
("won't meet them.", "won’t meet them."),
("yield to fear, Kapou'e rallied his warriors", "yield to fear, Kapou’e rallied his warriors"),
("You don't understand. Orcs have", "You don’t understand. Orcs have"),
("you miserable coward?! I'll make", "you miserable coward?! I’ll make"),
("you so why don't you just", "you so why don’t you just"),
("You were right, Kapou'e, the", "You were right, Kapou’e, the"),
("You're right. I can", "You’re right. I can"),
("young upstart called Kapou'e. He", "young upstart called Kapou’e. He"),
("eaten that Grüü,", "eaten that, Grüü"),
# Fix capitalization
("\"Shan Taum The Smug\"", "\"Shan Taum the Smug\""),
("The Siege Of Barag Gór", "The Siege of Barag Gór"),
("End Of Peace", "End of Peace"),
# Convert apostrophes
("must know they can't break through", "must know they can’t break through"),
("Great Horde. We can't", "Great Horde. We can’t"),
("Defeat Earl Lanbec'h", "Defeat Earl Lanbec’h"),
("the other tribes won't think", "the other tribes won’t think"),
("What's this? This weakling", "What’s this? This weakling"),
("Bah! I'm the only", "Bah! I’m the only"),
("amongst themselves! Now's our", "amongst themselves! Now’s our"),
),
"wesnoth-tb" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("A Tale of Two Brothers - Epilogue", "A Tale of Two Brothers — Epilogue"),
# Convert straight apostrophes and quotation marks
## conversion added in 1.9.0-svn
("The village's mage Bjarn", "The village’s mage Bjarn"),
("'Fear and obey Mordak the Mage!'", "“Fear and obey Mordak the Mage!”"),
("Mordak's", "Mordak’s"),
("more than two days' ride", "more than two days’ ride"),
("brother's", "brother’s"),
("We're chasing after", "We’re chasing after"),
("master's", "master’s"),
("let's catch those", "let’s catch those"),
("We think there's", "We think there’s"),
("They've captured my brother", "They’ve captured my brother"),
("Arne's", "Arne’s"),
("let's kill some orcs", "let’s kill some orcs"),
("you're supposed", "you’re supposed"),
("isn't it", "isn’t it"),
("aren't our relief", "aren’t our relief"),
("they're holding Bjarn", "they’re holding Bjarn"),
("day's", "day’s"),
("I don't think we can rescue", "I don’t think we can rescue"),
# Fix capitalization
# conversion added in 1.9.0+svn
("Rooting Out A Mage", "Rooting Out a Mage"),
# Punctuation error.
# fixed in 1.9.1+svn
("brother still And", "brother still. And"),
),
"wesnoth-units" : (
# Spelling fixes required at r44124
("diminuitive", "diminutive"),
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("allowed - grudgingly - to", "allowed—grudgingly—to"),
("horseback - in fact", "horseback — in fact"),
("is quite justified -", "is quite justified —"),
("their own race - this power", "their own race — this power"),
("archer - though the heft", "archer — though the heft"),
("dangerous enough - the deadliness", "dangerous enough — the deadliness"),
("in battle - if only", "in battle — if only"),
("accompany it - it is a sin", "accompany it — it is a sin"),
("rarely seen - standing", "rarely seen — standing"),
# Fix spacing of old results
# conversion added in 1.9.0+svn
("allowed — grudgingly — to", "allowed—grudgingly—to"),
# Convert straight quotes to curly quotes
# conversion added in 1.9.0+svn
("particularly good at ""resolving"" territorial", "particularly good at “resolving” territorial"),
("the power they've learned to focus", "the power they’ve learned to focus"),
("challenge his tribe's leader for", "challenge his tribe’s leader for"),
("although it's most commonly", "although it’s most commonly"),
("one's own goals", "one’s own goals"),
("commands of it's master", "commands of it’s master"),
("This doesn't work", "This doesn’t work"),
("target's", "target’s"),
("enemies'", "enemies’"),
("known as 'Armageddon Drakes' are", "known as ‘Armageddon Drakes’ are"),
("protect their fellows' honor, and", "protect their fellows’ honor, and"),
("to the drake's traditional metal", "to the drake’s traditional metal"),
("good at \"resolving\" territorial disputes", "good at “resolving” territorial disputes"),
("challenge his tribe's", "challenge his tribe’s"),
("enemy where it's least", "enemy where it’s least"),
("supplement their bodies' momentum, adding", "supplement their bodies’ momentum, adding"),
("tribe's army. No", "tribe’s army. No"),
("of these 'thundersticks' is terrifying", "of these ‘thundersticks’ is terrifying"),
("from these 'thundersticks' can take", "from these ‘thundersticks’ can take"),
("any of humanity's fresh", "any of humanity’s fresh"),
("the elvish 'Avengers' comes from", "the elvish ‘Avengers’ comes from"),
("titled as 'sorceresses' by other", "titled as ‘sorceresses’ by other"),
("'Dire wolves' differ from", "‘Dire wolves’ differ from"),
("from these beasts' paws into", "from these beasts’ paws into"),
("disrupt the enemy's battle", "disrupt the enemy’s battle"),
("The Goblin 'Knights' have little", "The Goblin ‘Knights’ have little"),
("Nonetheless, a rouser's frenzied goading", "Nonetheless, a rouser’s frenzied goading"),
("are called 'Goblins' and are", "are called ‘Goblins’ and are"),
("for the paladins' loyalty is", "for the paladins’ loyalty is"),
("liege's apparent virtue.", "liege’s apparent virtue."),
("as the 'Grand Knights'", "as the ‘Grand Knights’"),
("custom of Wesnoth's nobility that", "custom of Wesnoth’s nobility that"),
("Magi as 'Red Magi'. Likewise, the", "Magi as ‘Red Magi’. Likewise, the"),
("signify the mage's ability to", "signify the mage’s ability to"),
("warriors, the 'Red Magi' have a", "warriors, the ‘Red Magi’ have a"),
("service, former 'footpads' rise up", "service, former ‘footpads’ rise up"),
("victim's belongings. Masters", "victim’s belongings. Masters"),
("of outlaw, they're not comfortable", "of outlaw, they’re not comfortable"),
("derisively called 'footpads' by their", "derisively called ‘footpads’ by their"),
("against their employer's enemies, often", "against their employer’s enemies, often"),
("known as 'Ruffians'. With enough", "known as ‘Ruffians’. With enough"),
("the various 'guilds' of", "the various ‘guilds’ of"),
("in a 'fair fight'.", "in a ‘fair fight’."),
("However, if you're hurling peasants", "However, if you’re hurling peasants"),
("your foes, you're clearly out", "your foes, you’re clearly out"),
("nets, mermen can't give any", "nets, mermen can’t give any"),
("are called 'Slayers' by their", "are called ‘Slayers’ by their"),
("arbalest or 'slurbow'; a", "arbalest or ‘slurbow’; a"),
("whatever 'societal' function they", "whatever ‘societal’ function they"),
("as large 'sticks and stones' serve them", "as large ‘sticks and stones’ serve them"),
("necrophage, or 'devourer of the dead', is a", "necrophage, or ‘devourer of the dead’, is a"),
("- a process", "— a process"),
("sight of one's", "sight of one’s"),
("immortality from nature's grasp, the", "immortality from nature’s grasp, the"),
("of black magic's first goal:", "of black magic’s first goal:"),
("is considered 'black magic' is the", "is considered ‘black magic’ is the"),
("alone caused humanity's condemnation of", "alone caused humanity’s condemnation of"),
("a dark sorcerer's creations, some", "a dark sorcerer’s creations, some"),
("dubbed simply 'Bone-Shooters' by their", "dubbed simply ‘Bone-Shooters’ by their"),
("known as 'Deathblades' were masters", "known as ‘Deathblades’ were masters"),
("from the 'skeletons' which often", "from the ‘skeletons’ which often"),
("In their masters' hands, these", "In their masters’ hands, these"),
("masters' powers. They", "masters’ powers. They"),
("taking a foe's head clean off.", "taking a foe’s head clean off."),
("poor horses' stamina", "poor horses’ stamina"),
# "naptha" -> "naphtha"
("naptha attack", "naphtha attack"),
),
"wesnoth-utbs" : (
# Convert makeshift dashes:
# conversion added in 1.9.0-svn
("Strike hard and fast and also be careful--right, this is going to be fun.", "Strike hard and fast and also be careful—right, this is going to be fun."),
("There might be, but I don't--", "There might be, but I don’t—"),
("You idiot--", "You idiot—"),
("unfortunately we found your men dead--", "unfortunately we found your men dead—"),
("Well, actually they were fleeing from--", "Well, actually they were fleeing from—"),
("Look, if you'll just let me explain--", "Look, if you’ll just let me explain—"),
("This is--", "This is—"),
("This entire journey has been based on a lie--", "This entire journey has been based on a lie—"),
("the last few generations--if anything the land had grown even more", "the last few generations—if anything the land had grown even more"),
# Fix screw up
# conversion added in 1.9.0-svn
("Look, if you'll just let me explain—", "Look, if you’ll just let me explain—"),
("There might be, but I don't—", "There might be, but I don’t—"),
# Convert straight apostrophes and quotation marks
# conversion added in 1.9.0-svn
("What's happened? Oh Eloh, the craters are everywhere, everything is gone, ruined. I can hardly recognize our village. I didn't think it could be this bad.", "What’s happened? Oh Eloh, the craters are everywhere, everything is gone, ruined. I can hardly recognize our village. I didn’t think it could be this bad."),
("C'mon", "Come on"),
("Tanuil's", "Tanuil’s"),
("That's", "That’s"),
("Uria's", "Uria’s"),
("If we don't stop them", "If we don’t stop them"),
("Then let's join the battle!", "Then let’s join the battle!"),
("let's get rid of them", "let’s get rid of them"),
("If we don't", "If we don’t"),
("things won't be", "things won’t be"),
("we can't dwell on the dead", "we can’t dwell on the dead"),
("Let's keep exploring the wreckage.", "Let’s keep exploring the wreckage."),
("They're destroyed at last.", "They’re destroyed at last."),
("But there's no time", "But there’s no time"),
("so we'll have", "so we’ll have"),
("I'm fine. I'm afraid only", "I’m fine. I’m afraid only"),
("Maybe they're hiding in the stables. Let's go check.", "Maybe they’re hiding in the stables. Let’s go check."),
("We'll need your help", "We’ll need your help"),
("They've agreed to", "They’ve agreed to"),
("couldn't", "couldn’t"),
("Eloh's", "Eloh’s"),
("I've fought", "I’ve fought"),
("We'll just have to find out.", "We’ll just have to find out."),
("I suppose we wouldn't", "I suppose we wouldn’t"),
("there'll be one heck", "there’ll be one heck"),
("You've been working on", "You’ve been working on"),
("Hey Kaleh, how's", "Hey Kaleh, how’s"),
("I'm not quite", "I’m not quite"),
("Yechnagoth's", "Yechnagoth’s"),
("Go'hag", "Go’hag"),
("she's", "she’s"),
("Looks like that's the", "Looks like that’s the"),
("it's a long story", "it’s a long story"),
("you're a mage", "you’re a mage"),
("We'd", "We’d"),
("Let's cleanse", "Let’s cleanse"),
("if it's a fight they want, it's a fight they'll", "if it’s a fight they want, it’s a fight they’ll"),
("That's the last", "That’s the last"),
("there's still dried blood on the stones. It's", "there’s still dried blood on the stones. It’s"),
("they're going to be sorry", "they’re going to be sorry"),
("It's a good", "It’s a good"),
("don't kill me", "don’t kill me"),
("We're just", "We’re just"),
("It's Holy Water.", "It’s Holy Water."),
("we won't be able", "we won’t be able"),
("we've made it", "we’ve made it"),
("I've been searching", "I’ve been searching"),
("I'm searching for the", "I’m searching for the"),
("No, I haven't.", "No, I haven’t."),
("I'm not sure. I've read various references to it, but nothing specific. I've been searching for it for a long time. All I know is that it was a very powerful magical wand and that it was some sort of symbol of royalty in the old empire, but I have no idea where it might be. So I scour the land, learning all I can about the olden days. I'm sure it must be somewhere.", "I’m not sure. I’ve read various references to it, but nothing specific. I’ve been searching for it for a long time. All I know is that it was a very powerful magical wand and that it was some sort of symbol of royalty in the old empire, but I have no idea where it might be. So I scour the land, learning all I can about the olden days. I’m sure it must be somewhere."),
("exhausted. We've taken", "exhausted. We’ve taken"),
("I don't...", "I don’t..."),
("or what's left", "or what’s left"),
("I'm impressed.", "I’m impressed."),
("You've been working", "You’ve been working"),
("couldn't be worse", "couldn’t be worse"),
("they haven't had the", "they haven’t had the"),
("I don't think we've explored", "I don’t think we’ve explored"),
("We've explored the village and I think we've", "We’ve explored the village and I think we’ve"),
("glad you're here", "glad you’re here"),
("'You must be strong, young elf", "“You must be strong, young elf"),
("you and protect you.'", "you and protect you.”"),
("We can't leave them", "We can’t leave them"),
("I don't want to tarry", "I don’t want to tarry"),
("What's that to", "What’s that to"),
("won't be forgotten", "won’t be forgotten"),
("Zhul's", "Zhul’s"),
("friends'", "friends’"),
("you don't recruit", "you don’t recruit"),
("I don't see a thing", "I don’t see a thing"),
("poor person's body", "poor person’s body"),
("There doesn't seem", "There doesn’t seem"),
("wait...what's this", "wait...what’s this"),
("I've heard tales", "I’ve heard tales"),
("Traveler's Ring", "Traveler’s Ring"),
# Convert more apostrophes/quotes and makeshift dashes
# conversion added in 1.9.0+svn
("I'll be back soon!", "I’ll be back soon!"),
("Let's aid the dwarves.", "Let’s aid the dwarves."),
("Let's aid the trolls.", "Let’s aid the trolls."),
("Kaleh, a quick question-", "Kaleh, a quick question—"),
("'traders' used to come", "‘traders’ used to come"),
("'Yechnagoth' and 'Zhangor'. She said that", "‘Yechnagoth’ and ‘Zhangor’. She said that"),
("a land of 'kill or be killed'. Outlaws, ogres", "a land of ‘kill or be killed’. Outlaws, ogres"),
("a lesson they won't soon forget. Attack!", "a lesson they won’t soon forget. Attack!"),
("a minute, I don't see any targets", "a minute, I don’t see any targets"),
("after your enemies fall' Grave robbing is", "after your enemies fall” Grave robbing is"),
("again, Kaleh, this isn't the desert. The", "again, Kaleh, this isn’t the desert. The"),
("again. You numbskulls aren't getting the easy", "again. You numbskulls aren’t getting the easy"),
("Ah, it's a hard knock", "Ah, it’s a hard knock"),
("all contact. I don't know if any", "all contact. I don’t know if any"),
("all this way, I'm not going to", "all this way, I’m not going to"),
("also declared that Zhangor's name and his", "also declared that Zhangor’s name and his"),
("And besides, we can't afford to lose", "And besides, we can’t afford to lose"),
("and better. We haven't seen one of", "and better. We haven’t seen one of"),
("and joining Tanstafaal. I'm afraid that while", "and joining Tanstafaal. I’m afraid that while"),
("and protect you.” I'm", "and protect you.” I’m"),
("and so I wasn't told the location.", "and so I wasn’t told the location."),
("Anyway you've really gotten us", "Anyway you’ve really gotten us"),
("anyway? I'm going to have", "anyway? I’m going to have"),
("are close. Let's be far away", "are close. Let’s be far away"),
("are hard pressed. We're going to have", "are hard pressed. We’re going to have"),
("are pitch black! It's as bad as", "are pitch black! It’s as bad as"),
("are the words: 'May you have the", "are the words: “May you have the"),
("are, and we don't much care. Tremble", "are, and we don’t much care. Tremble"),
("aren't safe. Can you", "aren’t safe. Can you"),
("as a hermit crab's shell. It must", "as a hermit crab’s shell. It must"),
("as a warrior. Let's respect his wishes", "as a warrior. Let’s respect his wishes"),
("as you think. Let's explore the village", "as you think. Let’s explore the village"),
("attacking it. We can't stop now!", "attacking it. We can’t stop now!"),
("Aye, there's still life in", "Aye, there’s still life in"),
("be left that I'm not leading my", "be left that I’m not leading my"),
("be magical. Maybe that's why he was", "be magical. Maybe that’s why he was"),
("be too sure. I'll", "be too sure. I’ll"),
("beast you aren't familiar with. The", "beast you aren’t familiar with. The"),
("become useful eventually. I'll keep it just", "become useful eventually. I’ll keep it just"),
("been a mirage. I've been out", "been a mirage. I’ve been out"),
("Before we leave, there's one thing I", "Before we leave, there’s one thing I"),
("being burnt alive! It's terrible, I", "being burnt alive! It’s terrible, I"),
("better. But we don't know", "better. But we don’t know"),
("blue stuff. I don't know", "blue stuff. I don’t know"),
("blue. The pool isn't very deep, I", "blue. The pool isn’t very deep, I"),
("broken. But I don't think we have", "broken. But I don’t think we have"),
("but frankly we didn't have enough dwarves", "but frankly we didn’t have enough dwarves"),
("But I still don't understand how you", "But I still don’t understand how you"),
("But no, they wouldn't kill me. They", "But no, they wouldn’t kill me. They"),
("But still she doesn't stand a", "But still she doesn’t stand a"),
("but to forgive divine'?", "but to forgive divine”?"),
("but, well, we don't like going that", "but, well, we don’t like going that"),
("by orcs. I wouldn't be surprised if", "by orcs. I wouldn’t be surprised if"),
("by the Sea God's hand I call", "by the Sea God’s hand I call"),
("can damage it. It's almost as if", "can damage it. It’s almost as if"),
("can't bear to watch.", "can’t bear to watch."),
("can't last forever, and", "can’t last forever, and"),
("Can't the dead ever", "Can’t the dead ever"),
("caves, and we won't have the advantages", "caves, and we won’t have the advantages"),
("charges! If we can't stop them then", "charges! If we can’t stop them then"),
("charges! If we can't stop them", "charges! If we can’t stop them"),
("close to the trolls' main lair; you", "close to the trolls’ main lair; you"),
("Come on! I ain't going anywhere for", "Come on! I ain’t going anywhere for"),
("consequences would be. I'm not", "consequences would be. I’m not"),
("creature was, she doesn't smell any better", "creature was, she doesn’t smell any better"),
("cross our blades... It's time", "cross our blades... It’s time"),
("Curse that girl! She'll be the death", "Curse that girl! She’ll be the death"),
("dangers we don't. We really are", "dangers we don’t. We really are"),
("darkness, and you won't be lacking a", "darkness, and you won’t be lacking a"),
("darkness? And I'm afraid that while", "darkness? And I’m afraid that while"),
("Dead?! You 'found' them you say?", "Dead?! You ‘found’ them you say?"),
("defenses'. It would have", "defenses’. It would have"),
("didn't have vile humans", "didn’t have vile humans"),
("didn't talk much, but", "didn’t talk much, but"),
("died fighting gloriously. I'm so ashamed. I", "died fighting gloriously. I’m so ashamed. I"),
("disappears at a moment's notice. No one", "disappears at a moment’s notice. No one"),
("do you mean 'lead you to me'? You are a god, don't you exist", "do you mean ‘lead you to me’? You are a god, don’t you exist"),
("Do you think I'm stupid enough to", "Do you think I’m stupid enough to"),
("doesn't know where to", "doesn’t know where to"),
("doesn't look good.", "doesn’t look good."),
("doesn't make sense.", "doesn’t make sense."),
("Don't ask where the", "Don’t ask where the"),
("Don't blame yourself. You didn't know. If we", "Don’t blame yourself. You didn’t know. If we"),
("don't do anything dangerous.", "don’t do anything dangerous."),
("don't have to spend.", "don’t have to spend."),
("don't know how long.", "don’t know how long."),
("don't know what happened.", "don’t know what happened."),
("Don't listen to them,", "Don’t listen to them,"),
("don't seem as threatening", "don’t seem as threatening"),
("don't suppose they're going to mind", "don’t suppose they’re going to mind"),
("Don't think you know", "Don’t think you know"),
("don't want the rest", "don’t want the rest"),
("Don't worry. We're not the monsters", "Don’t worry. We’re not the monsters"),
("Don't you worry about us, Kaleh. We'll be careful. I'll organize five bands", "Don’t you worry about us, Kaleh. We’ll be careful. I’ll organize five bands"),
("Don't you worry, we won't rest until we", "Don’t you worry, we won’t rest until we"),
("down this way. It's odd, I guess", "down this way. It’s odd, I guess"),
("dry ground, and you've been through a", "dry ground, and you’ve been through a"),
("due north. From Esanoo's description it sounds", "due north. From Esanoo’s description it sounds"),
("duplicate their ancestors' previous success, the", "duplicate their ancestors’ previous success, the"),
("dwarves. And you don't really need me,", "dwarves. And you don’t really need me,"),
("effect! I don't think the trolls", "effect! I don’t think the trolls"),
("elf... But you won't be so lucky", "elf... But you won’t be so lucky"),
("Eloh and the merfolk's god, and delighting", "Eloh and the merfolk’s god, and delighting"),
("escapes the valley, we'll be in trouble.", "escapes the valley, we’ll be in trouble."),
("explored no further. I'm", "explored no further. I’m"),
("faster if you don't interrupt me. Now,", "faster if you don’t interrupt me. Now,"),
("fear not the dark.' And again, just", "fear not the dark.” And again, just"),
("fear will surely appear' It looks", "fear will surely appear’ It looks"),
("fighting, but she doesn't smell any better", "fighting, but she doesn’t smell any better"),
("find another infestation. They've", "find another infestation. They’ve"),
("first. I don't think I want", "first. I don’t think I want"),
("for rescuing me. We'll show those humans", "for rescuing me. We’ll show those humans"),
("former owner certainly won't miss it.", "former owner certainly won’t miss it."),
("forward to that. I'm afraid my scales", "forward to that. I’m afraid my scales"),
("free us. I can't believe the humans", "free us. I can’t believe the humans"),
("from them, I can't leave now.", "from them, I can’t leave now."),
("from, but she wouldn't answer, she just", "from, but she wouldn’t answer, she just"),
("from. You really don't want to know.", "from. You really don’t want to know."),
("fruitful lives. Don't let our sacrifices", "fruitful lives. Don’t let our sacrifices"),
("further. I'm freezing cold, wet, and I can't see a thing. I'm not exactly", "further. I’m freezing cold, wet, and I can’t see a thing. I’m not exactly"),
("Garak were here, he'd know more about", "Garak were here, he’d know more about"),
("go through those 'light", "go through those ‘light"),
("Good, then let's get out of", "Good, then let’s get out of"),
("Good, until then we'll settle around that", "Good, until then we’ll settle around that"),
("Good, we'll move our people", "Good, we’ll move our people"),
("Good. We're safe for now.", "Good. We’re safe for now."),
("Greetings. I'm afraid I am", "Greetings. I’m afraid I am"),
("group of enemies. Let's hope", "group of enemies. Let’s hope"),
("guard, if we don't come out in", "guard, if we don’t come out in"),
("Ha, you're trapped. I've got you right", "Ha, you’re trapped. I’ve got you right"),
("hadn't come Grog would", "hadn’t come Grog would"),
("half-manned. They obviously didn't expect any serious", "half-manned. They obviously didn’t expect any serious"),
("happened! Who in Moradin's name rigged the", "happened! Who in Moradin’s name rigged the"),
("have defeated me, don't you? Foolish boy,", "have defeated me, don’t you? Foolish boy,"),
("he died a warrior's", "he died a warrior’s"),
("He'll be slaughtered! He", "He’ll be slaughtered! He"),
("He's dead. Rest in", "He’s dead. Rest in"),
("He's probably dead, then,", "He’s probably dead, then,"),
("heh, no you didn't find me. I", "heh, no you didn’t find me. I"),
("here are insects, I'll be very", "here are insects, I’ll be very"),
("here too long we'll roast alive. I don't even want to", "here too long we’ll roast alive. I don’t even want to"),
("here, and yet there's so", "here, and yet there’s so"),
("here, but I don't know what made", "here, but I don’t know what made"),
("Hey, c'mon, maybe we can", "Hey, c’mon, maybe we can"),
("Hey, what's this? There seems", "Hey, what’s this? There seems"),
("him through. Well here's your chance. Come", "him through. Well here’s your chance. Come"),
("his scouts haven't reported back yet. I'm starting to get", "his scouts haven’t reported back yet. I’m starting to get"),
("horizon, but I don't know what it", "horizon, but I don’t know what it"),
("Humans aren't just the bandits and outlaws you're familiar with from", "Humans aren’t just the bandits and outlaws you’re familiar with from"),
("humans doing here? I've never seen so", "humans doing here? I’ve never seen so"),
("humans much anyway. I'll be glad to", "humans much anyway. I’ll be glad to"),
("hurts and I, I'm stuck!", "hurts and I, I’m stuck!"),
("I am, I'm a fire mage. I've been traveling for", "I am, I’m a fire mage. I’ve been traveling for"),
("I can't even begin to", "I can’t even begin to"),
("I do. But it's just a theory. I don't want to say", "I do. But it’s just a theory. I don’t want to say"),
("I don't care! Do you think I'm going to let", "I don’t care! Do you think I’m going to let"),
("I don't get it. All", "I don’t get it. All"),
("I don't know what 'they' are, but we can't go back. Prepare", "I don’t know what ‘they’ are, but we can’t go back. Prepare"),
("I don't know. When I", "I don’t know. When I"),
("I don't see any spiders,", "I don’t see any spiders,"),
("I don't want to be", "I don’t want to be"),
("I fear our people's fate", "I fear our people’s fate"),
("I for one can't wait to feel", "I for one can’t wait to feel"),
("I lived long, I'm not afraid of", "I lived long, I’m not afraid of"),
("I thought that's what all the", "I thought that’s what all the"),
("I'd better head back", "I’d better head back"),
("I'll blow the charges", "I’ll blow the charges"),
("I'll just be happy", "I’ll just be happy"),
("I'll muster the rest", "I’ll muster the rest"),
("I'm exhausted after walking", "I’m exhausted after walking"),
("I'm not sure. I've read various references", "I’m not sure. I’ve read various references"),
("I'm sorry Kaleh, I", "I’m sorry Kaleh, I"),
("I'm sorry, Kaleh. My", "I’m sorry, Kaleh. My"),
("I'm still not sure", "I’m still not sure"),
("I've followed that boy this far, I'm not going to", "I’ve followed that boy this far, I’m not going to"),
("I've heard of dwarves,", "I’ve heard of dwarves,"),
("I've heard of your", "I’ve heard of your"),
("I've saved his sorry", "I’ve saved his sorry"),
("I've seen the ancient", "I’ve seen the ancient"),
("in the end, I'm afraid you just", "in the end, I’m afraid you just"),
("in the wall. 'If you dare", "in the wall. “If you dare"),
("in this chamber. You're surprised to see", "in this chamber. You’re surprised to see"),
("Incoming! Ugh, it's big, hairy, and", "Incoming! Ugh, it’s big, hairy, and"),
("instead of a coward's. For that I", "instead of a coward’s. For that I"),
("interesting. In truth, I'd never heard of", "interesting. In truth, I’d never heard of"),
("into dust. All that's", "into dust. All that’s"),
("into the caves you'll", "into the caves you’ll"),
("is one they don't have to. Still,", "is one they don’t have to. Still,"),
("is so hot it's stifling; I can", "is so hot it’s stifling; I can"),
("island. I bet it's", "island. I bet it’s"),
("it can't kill it.", "it can’t kill it."),
("It wouldn't have been my", "It wouldn’t have been my"),
("it's a hard knock", "it’s a hard knock"),
("It's a huge underground", "It’s a huge underground"),
("It's a long story,", "It’s a long story,"),
("It's another one of", "It’s another one of"),
("It's complicated, and I don't know how much", "It’s complicated, and I don’t know how much"),
("It's hard to imagine.", "It’s hard to imagine."),
("It's not important. What's important is that", "It’s not important. What’s important is that"),
("It's okay, you're not used to", "It’s okay, you’re not used to"),
("It's some sort of", "It’s some sort of"),
("its power. We don't normally tolerate using", "its power. We don’t normally tolerate using"),
("jungle, but I can't tell what", "jungle, but I can’t tell what"),
("just fine. I don't know", "just fine. I don’t know"),
("Kaleh, I wouldn't be surprised if", "Kaleh, I wouldn’t be surprised if"),
("Kaleh, we don't have time for", "Kaleh, we don’t have time for"),
("Kali's just a hardass because he's bitter that he", "Kali’s just a hardass because he’s bitter that he"),
("keep practicing until I'm satisfied.", "keep practicing until I’m satisfied."),
("killing trolls. And you're not going anywhere until I'm done with you.", "killing trolls. And you’re not going anywhere until I’m done with you."),
("Lady's name are you", "Lady’s name are you"),
("lands when I don't know what's in front of us. We've lost too many", "lands when I don’t know what’s in front of us. We’ve lost too many"),
("Leader wouldn't want any unpleasant", "Leader wouldn’t want any unpleasant"),
("leaderless. I guess it's every ant for", "leaderless. I guess it’s every ant for"),
("leadership so far; I'll wait", "leadership so far; I’ll wait"),
("let her die? I've never seen fire", "let her die? I’ve never seen fire"),
("Let's blast those monsters", "Let’s blast those monsters"),
("lightly defended; he's sent most of", "lightly defended; he’s sent most of"),
("Like I haven't killed enough undead recently. Why can't these guys just", "Like I haven’t killed enough undead recently. Why can’t these guys just"),
("like. I suppose we'll just try", "like. I suppose we’ll just try"),
("listen to the mages' protests. So, seeking", "listen to the mages’ protests. So, seeking"),
("lived underground, Elyssa. I've", "lived underground, Elyssa. I’ve"),
("location. So she didn't tell us where", "location. So she didn’t tell us where"),
("long as you didn't try to cheat", "long as you didn’t try to cheat"),
("Looks like he's been squirreling away", "Looks like he’s been squirreling away"),
("Looks like we don't have any choice", "Looks like we don’t have any choice"),
("made it outside. I'd forgotten how big", "made it outside. I’d forgotten how big"),
("many of our people's faith is wavering.", "many of our people’s faith is wavering."),
("master know about 'Yechnagoth' and 'Zhangor'? If Eloh", "master know about ‘Yechnagoth’ and ‘Zhangor’? If Eloh"),
("May the Sea God's bounty bless you", "May the Sea God’s bounty bless you"),
("Maybe it's just fish, but", "Maybe it’s just fish, but"),
("me if I don't take you at", "me if I don’t take you at"),
("meantime, we'll go ahead and", "meantime, we’ll go ahead and"),
("merfolk's help we would", "merfolk’s help we would"),
("merfolk, then we don't have any time", "merfolk, then we don’t have any time"),
("mess. I think you're right", "mess. I think you’re right"),
("more fire guardians. I'm going to be", "more fire guardians. I’m going to be"),
("my hand. It doesn't seem to have", "my hand. It doesn’t seem to have"),
("my life. I can't believe I was", "my life. I can’t believe I was"),
("named it 'Quenoth Isle' in honor of", "named it ‘Quenoth Isle’ in honor of"),
("nasty orcses. And 'specially stinking elves.", "nasty orcses. And ’specially stinking elves."),
("need to explain, it's pretty obvious what you're up to. Here", "need to explain, it’s pretty obvious what you’re up to. Here"),
("No I haven't, I don't often explore underground", "No I haven’t, I don’t often explore underground"),
("no idea what you're babbling about, elf, but you're just lucky you", "no idea what you’re babbling about, elf, but you’re just lucky you"),
("No we can't. But for now", "No we can’t. But for now"),
("No, I don't think you shall", "No, I don’t think you shall"),
("No, you don't have to introduce", "No, you don’t have to introduce"),
("northeast and northwest. Don't go too far,", "northeast and northwest. Don’t go too far,"),
("not enough. Look, you're a wonderful girl", "not enough. Look, you’re a wonderful girl"),
("not sure why I'm the one she", "not sure why I’m the one she"),
("Not worth... Whoa! What's happening?!", "Not worth... Whoa! What’s happening?!"),
("Now that we've set up a", "Now that we’ve set up a"),
("Now that you've found the way", "Now that you’ve found the way"),
("Nym is Kaleh's childhood friend who", "Nym is Kaleh’s childhood friend who"),
("Nym, and Kaleh, you've", "Nym, and Kaleh, you’ve"),
("oases aren't easy to find", "oases aren’t easy to find"),
("obsidian wall. I can't find any entrances", "obsidian wall. I can’t find any entrances"),
("obsidian wall. I can't find any way", "obsidian wall. I can’t find any way"),
("of here, but it's already flooding.", "of here, but it’s already flooding."),
("of me boys didn't think you could", "of me boys didn’t think you could"),
("of the desert? It's a rather barren", "of the desert? It’s a rather barren"),
("of the orcs. I'd seen sandstorms", "of the orcs. I’d seen sandstorms"),
("of water: it's even creating a", "of water: it’s even creating a"),
("of Wesnoth. And it's isn't", "of Wesnoth. And it’s isn’t"),
("Oh they're no need to explain, it's pretty obvious what you're up to. Here", "Oh, there’s no need to explain, it’s pretty obvious what you’re up to. Here"),
("Oh, Eloh. They're all dead. Butchered.", "Oh, Eloh. They’re all dead. Butchered."),
("on the horizon. We'll surely be", "on the horizon. We’ll surely be"),
("on Trolls. But you'll see,", "on Trolls. But you’ll see,"),
("Once we've escaped and are", "Once we’ve escaped and are"),
("orcs and goblins. We'll never make it", "orcs and goblins. We’ll never make it"),
("our destination. It wasn't even an island,", "our destination. It wasn’t even an island,"),
("our god, I don't remember Zhul mentioning", "our god, I don’t remember Zhul mentioning"),
("our people dwindle, it's going to become", "our people dwindle, it’s going to become"),
("our people dwindle, it's going to", "our people dwindle, it’s going to"),
("outpost isn't guarded as heavily", "outpost isn’t guarded as heavily"),
("Part of me didn't think we would", "Part of me didn’t think we would"),
("past the orcs. It's odd, I guess", "past the orcs. It’s odd, I guess"),
("peacefully or I'll make you sorry you didn't.", "peacefully or I’ll make you sorry you didn’t."),
("people already. I don't want to lead", "people already. I don’t want to lead"),
("people, I don't want to kill", "people, I don’t want to kill"),
("Pinnacle Rock. It's the highest land", "Pinnacle Rock. It’s the highest land"),
("place and they can't reach him there.", "place and they can’t reach him there."),
("places you can't.", "places you can’t."),
("planned or not, what's important is what", "planned or not, what’s important is what"),
("possible, but we won't last long in", "possible, but we won’t last long in"),
("pray, but I'm going in there!", "pray, but I’m going in there!"),
("protecting our encampments isn't enough, darkness and", "protecting our encampments isn’t enough, darkness and"),
("put here, but it's obviously powerful and", "put here, but it’s obviously powerful and"),
("rally more reinforcements. We're hurtin', Kaleh, I'll", "rally more reinforcements. We’re hurtin’, Kaleh, I’ll"),
("range. When they don't have a Queen", "range. When they don’t have a Queen"),
("recent losses we don't have the", "recent losses we don’t have the"),
("reconnaissance. We'll send out small", "reconnaissance. We’ll send out small"),
("red glowing runes. I'm not", "red glowing runes. I’m not"),
("refer to as 'The Golden Age'.", "refer to as ‘The Golden Age’."),
("reinforcements have arrived. We'll surely", "reinforcements have arrived. We’ll surely"),
("Remember, Kaleh, it's nasty fighting underground.", "Remember, Kaleh, it’s nasty fighting underground."),
("right you runts, let's try this again.", "right you runts, let’s try this again."),
("right, I'm going in. The", "right, I’m going in. The"),
("river and lake. I'm glad we weren't downstream of that", "river and lake. I’m glad we weren’t downstream of that"),
("river. I sure wouldn't want to be", "river. I sure wouldn’t want to be"),
("sacrificed them on Zhangor's bloody altars. Zhangor", "sacrificed them on Zhangor’s bloody altars. Zhangor"),
("safety. We certainly wouldn't want", "safety. We certainly wouldn’t want"),
("said $intl_ally_name, that you'd been here before?", "said $intl_ally_name, that you’d been here before?"),
("seem particularly inhospitable! I've been", "seem particularly inhospitable! I’ve been"),
("seen. It's almost unnatural.", "seen. It’s almost unnatural."),
("serve the merfolk's foul god.", "serve the merfolk’s foul god."),
("set, then I won't try to convince", "set, then I won’t try to convince"),
("snowy peaks. So that's how several thousand", "snowy peaks. So that’s how several thousand"),
("so big. And what's", "so big. And what’s"),
("so far. I don't think I", "so far. I don’t think I"),
("Some of trolls didn't think you strong", "Some of trolls didn’t think you strong"),
("Sometimes called the 'hollow men', spectres form the", "Sometimes called the ‘hollow men’, spectres form the"),
("south. And we can't hang around here", "south. And we can’t hang around here"),
("spider. Aren't we lucky!", "spider. Aren’t we lucky!"),
("spilled already. Knock 'em out, wound them,", "spilled already. Knock ’em out, wound them,"),
("stay dead? And aren't they trapped by", "stay dead? And aren’t they trapped by"),
("stayed in the captain's quarters and", "stayed in the captain’s quarters and"),
("still haunted by Nym's death in that", "still haunted by Nym’s death in that"),
("Still, the trolls don't seem to be", "Still, the trolls don’t seem to be"),
("stone wall. This can't be a coincidence.", "stone wall. This can’t be a coincidence."),
("strange vision. I didn't have long to", "strange vision. I didn’t have long to"),
("such as the elves' stats and the", "such as the elves’ stats and the"),
("summoned back I don't think we", "summoned back I don’t think we"),
("sunlit lands. When you're ready I'll show you", "sunlit lands. When you’re ready I’ll show you"),
("suns. She said 'Have", "suns. She said “Have"),
("support, so we don't want to run", "support, so we don’t want to run"),
("sure what I'm doing up here.", "sure what I’m doing up here."),
("surface, and besides there's still lots of", "surface, and besides there’s still lots of"),
("swear those castles weren't there at sunset.", "swear those castles weren’t there at sunset."),
("tastes terrible, I can't drink this! What", "tastes terrible, I can’t drink this! What"),
("tell me to 'kill the unbelievers'? If we had", "tell me to ‘kill the unbelievers’? If we had"),
("Tenacious savages, aren't they? But these", "Tenacious savages, aren’t they? But these"),
("than I do. I've", "than I do. I’ve"),
("Thank you, I'm sure you will", "Thank you, I’m sure you will"),
("Thank you. I'm sorry, I don't know what came", "Thank you. I’m sorry, I don’t know what came"),
("that appears by Tanstafaal's", "that appears by Tanstafaal’s"),
("that I don't know where she", "that I don’t know where she"),
("that Yechnagoth wasn't the one who", "that Yechnagoth wasn’t the one who"),
("that? It sure doesn't look good. The", "that? It sure doesn’t look good. The"),
("the best, so I'll be your", "the best, so I’ll be your"),
("the cavern nicely. I'm just thankful the", "the cavern nicely. I’m just thankful the"),
("the dangers we don't. We really are", "the dangers we don’t. We really are"),
("the essence, so let's move out as", "the essence, so let’s move out as"),
("the hills, you can't even see all that water, it's", "the hills, you can’t even see all that water, it’s"),
("the island's foul inhabitants, and", "the island’s foul inhabitants, and"),
("the left. And it's big enough", "the left. And it’s big enough"),
("the olden days. I'm sure it must", "the olden days. I’m sure it must"),
("the past, and there's nothing we can", "the past, and there’s nothing we can"),
("The point is, it's not her fight, it's our fight too.", "The point is, it’s not her fight, it’s our fight too."),
("the rubble. It doesn't look", "the rubble. It doesn’t look"),
("the shamans. The troll's military leader was", "the shamans. The troll’s military leader was"),
("The stinkin' elves have freed", "The stinkin’ elves have freed"),
("the stone. But there's no", "the stone. But there’s no"),
("the suns. But I've", "the suns. But I’ve"),
("the troll menace. They've been", "the troll menace. They’ve been"),
("the unbelievers proves your-", "the unbelievers proves your—"),
("the way. It's not far.", "the way. It’s not far."),
("the world that hadn't been plagued with", "the world that hadn’t been plagued with"),
("their fear of Zhangor's wrath, the", "their fear of Zhangor’s wrath, the"),
("them. I think that's where they are", "them. I think that’s where they are"),
("them. Whatever 'them' was.", "them. Whatever ‘them’ was."),
("Then we'll just have to", "Then we’ll just have to"),
("There isn't much left of", "There isn’t much left of"),
("these branching tunnels we'll have no idea", "these branching tunnels we’ll have no idea"),
("these secret tunnels, you'd think they could", "these secret tunnels, you’d think they could"),
("These smaller hills aren't as hard to", "These smaller hills aren’t as hard to"),
("these tunnels deserted. I'd be", "these tunnels deserted. I’d be"),
("these tunnels deserted. I'll be", "these tunnels deserted. I’ll be"),
("they are, it can't be good.", "they are, it can’t be good."),
("they look. And they're huge! Well, the", "they look. And they’re huge! Well, the"),
("They obviously weren't enough. You go", "They obviously weren’t enough. You go"),
("they shan't. We shall kill", "they shan’t. We shall kill"),
("They're a proud people,", "They’re a proud people,"),
("They've come for my precious. It's mine, yes it is. They shan't have it, no", "They’ve come for my precious. It’s mine, yes it is. They shan’t have it, no"),
("They've crossed the chasm!", "They’ve crossed the chasm!"),
("thing to a god's actual", "thing to a god’s actual"),
("this time, we won't always be so", "this time, we won’t always be so"),
("those caves aren't safe. Can you", "those caves aren’t safe. Can you"),
("through this inferno I'm", "through this inferno I’m"),
("to a head. I'm worried about Tanstafaal", "to a head. I’m worried about Tanstafaal"),
("to be judged. They'll deal with you", "to be judged. They’ll deal with you"),
("to challenge me, I'll devour your soul", "to challenge me, I’ll devour your soul"),
("to give you. I'm afraid that where", "to give you. I’m afraid that where"),
("to ignore enemy units' zones of", "to ignore enemy units’ zones of"),
("to me? This isn't a negotiation. You", "to me? This isn’t a negotiation. You"),
("to meet the merfolk's leader!", "to meet the merfolk’s leader!"),
("to open it. Let's go, I'd really like to", "to open it. Let’s go, I’d really like to"),
("to respect your leaders' decisions. Kaleh has", "to respect your leaders’ decisions. Kaleh has"),
("to the base. They're going to sacrifice", "to the base. They’re going to sacrifice"),
("tomorrow night, then I'll tell you", "tomorrow night, then I’ll tell you"),
("Too late. And it's just sand inside.", "Too late. And it’s just sand inside."),
("trolls' secret weapon.", "trolls’ secret weapon."),
("trolls. And you don't really need me,", "trolls. And you don’t really need me,"),
("trolls. But there's no point dwelling", "trolls. But there’s no point dwelling"),
("truly came from. I'd been to some", "truly came from. I’d been to some"),
("tunnels towards the Dwarves'", "tunnels towards the Dwarves’"),
("Ugh. I'm covered in blood", "Ugh. I’m covered in blood"),
("Undead don't have to breathe and I don't think a little", "Undead don’t have to breathe and I don’t think a little"),
("Until I know what's out there, I'm not taking any", "Until I know what’s out there, I’m not taking any"),
("upon the elves' fears and demanded", "upon the elves’ fears and demanded"),
("us back. Well, we'll show them. It", "us back. Well, we’ll show them. It"),
("us if we can't end this battle", "us if we can’t end this battle"),
("us, we'll stick to the", "us, we’ll stick to the"),
("used to. So we'd best be extra", "used to. So we’d best be extra"),
("very close to dwarves' main lair. You", "very close to dwarves’ main lair. You"),
("Very well. I've trusted your decisions", "Very well. I’ve trusted your decisions"),
("vile, that I can't imagine them worshiping", "vile, that I can’t imagine them worshiping"),
("wait a minute, you're elves!?!", "wait a minute, you’re elves!?!"),
("wall of stone. I've had me boys", "wall of stone. I’ve had me boys"),
("wand. I don't have much experience", "wand. I don’t have much experience"),
("want to flee, don't you? But you", "want to flee, don’t you? But you"),
("want to know what's out there.", "want to know what’s out there."),
("warriors. I'm sure they have", "warriors. I’m sure they have"),
("wastes if we can't find", "wastes if we can’t find"),
("water-skins and rations. We'll have enough if", "water-skins and rations. We’ll have enough if"),
("way. But they aren't even elves,", "way. But they aren’t even elves,"),
("We don't know who you", "We don’t know who you"),
("we know you aren't servants of Yechnagoth,", "we know you aren’t servants of Yechnagoth,"),
("We will remember Esanoo's sacrifice.", "We will remember Esanoo’s sacrifice."),
("We won't. If we keep", "We won’t. If we keep"),
("We'll meet you along", "We’ll meet you along"),
("We're in luck, a", "We’re in luck, a"),
("We're still trying to", "We’re still trying to"),
("We've come far and", "We’ve come far and"),
("We've come out on", "We’ve come out on"),
("We've found both keys.", "We’ve found both keys."),
("We've reached what looks", "We’ve reached what looks"),
("We've run out of", "We’ve run out of"),
("We've taken too long", "We’ve taken too long"),
("Well, Nym's right, we don't know what's out there. So", "Well, Nym’s right, we don’t know what’s out there. So"),
("Well, we can't spend all day", "Well, we can’t spend all day"),
("west of here. Don't worry about losing", "west of here. Don’t worry about losing"),
("what's inside...", "what’s inside..."),
("What's that, boy? Are", "What’s that, boy? Are"),
("What's this on our borders? Stinkin' elves and more undead? We'll teach them", "What’s this on our borders? Stinkin’ elves and more undead? We’ll teach them"),
("What's this? Hidden underneath", "What’s this? Hidden underneath"),
("What's this? His clothes", "What’s this? His clothes"),
("What's this? Someone has", "What’s this? Someone has"),
("Who are you? What's behind that black", "Who are you? What’s behind that black"),
("who knows what we'll encounter deep under", "who knows what we’ll encounter deep under"),
("with their tactics; we've lost several of", "with their tactics; we’ve lost several of"),
("with us, but I'm afraid we don't", "with us, but I’m afraid we don’t"),
("won't help me, then", "won’t help me, then"),
("won't let a couple", "won’t let a couple"),
("won't lose all your", "won’t lose all your"),
("years. Her commands don't make any sense; she doesn't seem", "years. Her commands don’t make any sense; she doesn’t seem"),
("Yes, but I didn't explore very far.", "Yes, but I didn’t explore very far."),
("you can't drink it?", "you can’t drink it?"),
("You didn't expect the other", "You didn’t expect the other"),
("you first arrived, we've had a few", "you first arrived, we’ve had a few"),
("you know what you're doing, Kaleh.", "you know what you’re doing, Kaleh."),
("you not say: 'to err is elven,", "you not say: “to err is elven,"),
("You rescued Nog's brother, and he", "You rescued Nog’s brother, and he"),
("you sacrifice for Garak's sake?", "you sacrifice for Garak’s sake?"),
("You saved Grog's life. Grog still", "You saved Grog’s life. Grog still"),
("you see him, he's big, green and", "you see him, he’s big, green and"),
("you with. We haven't sent anyone to", "you with. We haven’t sent anyone to"),
("You're being very cryptic,", "You’re being very cryptic,"),
("you, I don't think that the", "you, I don’t think that the"),
("your word. We haven't seen elves for", "your word. We haven’t seen elves for"),
("Zur... Come mortal, let's cross our blades... It's time for", "Zur... Come mortal, let’s cross our blades... It’s time for"),
# Fix screw up
# conversion added in 1.9.0+svn
("in the wall. ‘If you dare", "in the wall. “If you dare"),
# Fixed capitalization
("you ever heard of The", "you ever heard of the"),
# Kill the hyphens
("Mid-day", "Midday"),
("During the daytime (Dawn, Morning, Mid-day", "During the daytime (Dawn, Morning, Midday"),
),
"1.8-announcement" : (
# conversion added shortly before 1.8.0, might be relevant for the 1.10.0 announcement
("WML events an AI components", "WML events and AI components"),
("1.7.3", "1.7.13"),
("/tags/1.8/", "/tags/1.8.0/"),
),
"1.10-announcement" : (
("roleplaying", "role-playing"),
),
# Bulk apostrophe and dash conversion, pass 1, all campaigns EXCEPT
# LOW (already sent as another patch)
# HTTT and DW (being done by Espreon and ancestral)
# AOI, DID, L (already fully converted)
# Simons Mith, 23/08/2010, based on build 45968
# Also about twelve new pairs of <i> tags, marked with comments
# From here downwards to next comment block...
"wesnoth-tutorial" : (
("<big>You have selected Li'sar.</big>", "<big>You have selected Li’sar.</big>"),
("a ranged attack (3-3; or 3 damage,", "a ranged attack (3–3; or 3 damage,"),
("are in its 'Zone of Control'", "are in its <i>Zone of Control</i>"), # new <i> tags
("attack! I'd better grab more", "attack! I’d better grab more"),
("be? The sword (5-4) or the bow (3-3)? I suppose you'll", "be? The sword (5–4) or the bow (3–3)? I suppose you’ll"),
("but I think you're", "but I think you’re"),
("can make sure you've used all your", "can make sure you’ve used all your"),
("captured our village! You'd better get him", "captured our village! You’d better get him"),
("dark blue water? It's too deep for", "dark blue water? It’s too deep for"),
("designed as a beginner's campaign.", "designed as a beginner’s campaign."),
("Don't forget about your", "Don’t forget about your"),
("dummy's attacks are magical,", "dummy’s attacks are magical,"),
("enough experience and you'll become more powerful.", "enough experience and you’ll become more powerful."),
("every time, you'll drop from $student_hp", "every time, you’ll drop from $student_hp"),
("experience so it's more efficient to", "experience so it’s more efficient to"),
("female^Unfortunately, you've used up your", "female^Unfortunately, you’ve used up your"),
("female^You're about to be", "female^You’re about to be"),
("from the 'Actions' menu.", "from the ‘Actions’ menu."),
("gold per turn. You're only losing 1", "gold per turn. You’re only losing 1"),
("healed 2 hitpoints! I'd better attack it", "healed 2 hitpoints! I’d better attack it"),
("However, both units don't have much", "However, both units don’t have much"),
("If a unit doesn't do anything for", "If a unit doesn’t do anything for"),
("if your leader (Li'sar) is killed.", "if your leader (Li’sar) is killed."),
("into that village! He's not as stupid", "into that village! He’s not as stupid"),
("into the forest they'll", "into the forest they’ll"),
("It's very dangerous to", "It’s very dangerous to"),
("its melee attack (3-5).", "its melee attack (3–5)."),
("Konrad, Li'sar and Delfador are", "Konrad, Li’sar and Delfador are"),
("Li'sar will attack.", "Li’sar will attack."),
("miss $deadguy.name|, but it's better than one", "miss $deadguy.name|, but it’s better than one"),
("Now, Li'sar, I will leave", "Now, Li’sar, I will leave"),
("once with the 'Show Enemy Moves' command", "once with the <b>Show Enemy Moves</b> command"), # new <b> tags
("out of the enemy's reach!", "out of the enemy’s reach!"),
("right-clicking and selecting 'Recruit'. This time you", "right-clicking and selecting <b>Recruit</b>. This time you"), # new <b> tags
("select a unit, you'll see percentages for", "select a unit, you’ll see percentages for"),
("select the attacker (Li'sar) then the target", "select the attacker (Li’sar) then the target"),
("troops before clicking 'End Scenario' to continue to", "troops before clicking <b>End Scenario</b> to continue to"), # new <b> tags
("turn by pressing 'n' to", "turn by pressing <b>n</b> to"), # new <b> tags
("Unfortunately, you've used up your", "Unfortunately, you’ve used up your"),
("used a sword (5-4; or 5 damage,", "used a sword (5–4; or 5 damage,"),
("water, you'll have to kill", "water, you’ll have to kill"),
("When 'n' no longer selects", "When <b>n</b> no longer selects"), # new <b> tags
("Whenever you're on a keep,", "Whenever you’re on a keep,"),
("Yes. It's a magical quintain.", "Yes. It’s a magical quintain."),
("you are playing Li'sar.", "you are playing Li’sar."),
("You can press 'u' to undo most", "You can press <b>u</b> to undo most"), # new <b> tags
("you can press 'u' to undo,", "you can press <b>u</b> to undo,"), # new <b> tags
("you recruited first; they'll be a lot", "you recruited first; they’ll be a lot"),
("You wouldn't do anything stupid", "You wouldn’t do anything stupid"),
("You're about to be", "You’re about to be"),
("you're going to need", "you’re going to need"),
("You've captured all the", "You’ve captured all the"),
("your keep for Li'sar to capture, as", "your keep for Li’sar to capture, as"),
),
# Pass 2: Bulk apostrophe conversion by Simons Mith
# ~ 600 apostrophes changed to sexed single and double quotes
# No other changes made.
"unspecified" : (
# Single-word changes; currently commented out as they give duplicate msgids
# Although they're only names, so they should be safe to apply. Except
# possibly Li'sar
("\"Al'Brock\"", "\"Al’Brock\""),
("\"Al'Mar\"", "\"Al’Mar\""),
("\"Al'Tar\"", "\"Al’Tar\""),
("\"Ar'Muff\"", "\"Ar’Muff\""),
("\"Bak'man\"", "\"Bak’man\""),
("\"Flar'Tar\"", "\"Flar’Tar\""),
("\"Ha'Tang\"", "\"Ha’Tang\""),
("\"Ha'Tuil\"", "\"Ha’Tuil\""),
("\"Hida'tul\"", "\"Hida’tul\""),
("\"J'rem\"", "\"J’rem\""),
("\"Kapou'e\"", "\"Kapou’e\""),
("\"Lanbec'h\"", "\"Lanbec’h\""),
("\"Ro'Arthian\"", "\"Ro’Arthian\""),
("\"Ro'Sothian\"", "\"Ro’Sothian\""),
("\"Shek'kahan\"", "\"Shek’kahan\""),
("\"T'bhan\"", "\"T’bhan\""),
("\"Thu'lok\"", "\"Thu’lok\""),
("\"Ug'lok\"", "\"Ug’lok\""),
("\"Ut'Tan-Grilg\"", "\"Ut’Tan-Grilg\""),
("\"Ut'Tan-Grorag\"", "\"Ut’Tan-Grorag\""),
("\"Ut'Tan-Vrork\"", "\"Ut’Tan-Vrork\""),
# Generates long po line: 86 bytes
# ("kind that won't leave <i>us</i>", "kind that won’t leave <i>us</i>"),
# May generate long po line: 85 bytes
("It's the Prince", "It’s the Prince"),
# The rest all appear safe to apply
("'Lord'? Oh no!", "‘Lord’? Oh no!"),
("'Prince Haldric the Dragonbane' sounds rather", "‘Prince Haldric the Dragonbane’ sounds rather"),
("'The Great Chamber'? Hmmm, wonder", "‘The Great Chamber’? Hmmm, wonder"),
("'The master’s study' eh? I bet we'll find Malifor", "‘The master’s study’ eh? I bet we’ll find Malifor"),
("(<i>Grunt... strain...</i>) It's not moving.", "(<i>Grunt... strain...</i>) It’s not moving."),
("(<i>Reads</i>) <i>Kannin a'kana du'masi a'forigln de'amp.</i>", "(<i>Reads</i>) <i>Kannin a’kana du’masi a’forigln de’amp.</i>"),
("(<i>Shrugs</i>) I don't think we", "(<i>Shrugs</i>) I don’t think we"),
("... Can't... The... heat...", "... Can’t... The... heat..."),
("... Don't tax yourself...", "... Don’t tax yourself..."),
("800 gold! We're rich!!", "800 gold! We’re rich!!"),
("<i>I</i> want? I'll tell you", "<i>I</i> want? I’ll tell you"),
("<i>Swamp</i>?! I'm under five", "<i>Swamp</i>?! I’m under five"),
("a hundred. Don't lose it.", "a hundred. Don’t lose it."),
("a minute, you're elves?!", "a minute, you’re elves?!"),
("a sec, Ro'Sothian, you don't want to", "a sec, Ro’Sothian, you don’t want to"),
("a sword. I'm fairly sure you'll win.", "a sword. I’m fairly sure you’ll win."),
("a true 'Kingdom of Wesnoth'. I... I", "a true ‘Kingdom of Wesnoth’. I... I"),
("aan ogras, us'll elp yoo!", "aan ogras, us’ll elp yoo!"),
("about trolls? I'm not sure I'd want to", "about trolls? I’m not sure I’d want to"),
("Actually... we won't be coming.", "Actually... we won’t be coming."),
("Afraid you'll find out you're not of", "Afraid you’ll find out you’re not of"),
("Ah, they don't scare me!", "Ah, they don’t scare me!"),
("Ahh yes, that's better.", "Ahh yes, that’s better."),
("Ahhh, I'm so tired.", "Ahhh, I’m so tired."),
("All I'm saying is", "All I’m saying is"),
("All right! Blast'n time!", "All right! Blast’n time!"),
("All right, let's move out", "All right, let’s move out"),
("All right, let's move out,", "All right, let’s move out,"),
("all... but I'm sure none", "all... but I’m sure none"),
("alliance, Earl Lanbec'h.", "alliance, Earl Lanbec’h."),
("Amazing, I haven't seen a", "Amazing, I haven’t seen a"),
("and arrows won't work.", "and arrows won’t work."),
("and Earl Lanbec'h", "and Earl Lanbec’h"),
("and fight... What's this?", "and fight... What’s this?"),
("and hammers o' every dwarf", "and hammers o’ every dwarf"),
("and I don't think I", "and I don’t think I"),
("and I— I'm stuck!", "and I— I’m stuck!"),
("And look, he's gone. Now", "And look, he’s gone. Now"),
("And now we'll never know", "And now we’ll never know"),
("and undisturbed, Ro'Arthian.", "and undisturbed, Ro’Arthian."),
("And what's the bad", "And what’s the bad"),
("any better, we'll probably die", "any better, we’ll probably die"),
("Anything you can't handle?", "Anything you can’t handle?"),
("are arriving. They've surrounded us! We're doomed.", "are arriving. They’ve surrounded us! We’re doomed."),
("are but children's tales... Still,", "are but children’s tales... Still,"),
("Argh! I'll just come", "Argh! I’ll just come"),
("Argh! I'm dead! Well,", "Argh! I’m dead! Well,"),
("Argh! I'm done. My", "Argh! I’m done. My"),
("at least he's finally at", "at least he’s finally at"),
("at least you're improving.", "at least you’re improving."),
("attacks more. You're becoming too", "attacks more. You’re becoming too"),
("Awesome, let's go!", "Awesome, let’s go!"),
("Aye! Down wi' the orcs!", "Aye! Down wi’ the orcs!"),
("Aye, it's still bandit", "Aye, it’s still bandit"),
("A’right then, we'll wait fa' later, eh?", "A’right then, we’ll wait fa’ later, eh?"),
("be called 'Dragonbane'.", "be called ‘Dragonbane’."),
("be mad! I'll offer one", "be mad! I’ll offer one"),
("be quiet, I'll explain it", "be quiet, I’ll explain it"),
("be useful, I'll take it.", "be useful, I’ll take it."),
("be... highly irregular.' Bah! I'll show you", "be... highly irregular.” Bah! I’ll show you"),
("bet this wasn't the welcome", "bet this wasn’t the welcome"),
("Black-Eye! We won't submit to", "Black-Eye! We won’t submit to"),
("break free! Don't let that", "break free! Don’t let that"),
("bridge yet, Cap'n?", "bridge yet, Cap’n?"),
("But don't you need", "But don’t you need"),
("But hasn't Eloh told", "But hasn’t Eloh told"),
("but it doesn't sound friendly.", "but it doesn’t sound friendly."),
("but we dinna' ha' much of", "but we dinna’ ha’ much of"),
("But we don't want to", "But we don’t want to"),
("but you haven't beat us", "but you haven’t beat us"),
("But... Asheviere, Garard's queen and Eldred's mother, seemed", "But... Asheviere, Garard’s queen and Eldred’s mother, seemed"),
("calling you 'the Dragonbane'.", "calling you ‘the Dragonbane’."),
("can just 'pass through' a Drake", "can just ‘pass through’ a Drake"),
("Can't our units", "Can’t our units"),
("Capture Ro'Arthian.", "Capture Ro’Arthian."),
("Careful, you don't know what's lurking in", "Careful, you don’t know what’s lurking in"),
("Chief! It's true, the", "Chief! It’s true, the"),
("Chief, we can't be blowing", "Chief, we can’t be blowing"),
("Chop 'em down. It's cool in", "Chop ’em down. It’s cool in"),
("click on Li'sar", "click on Li’sar"),
("coal that wi' melt this", "coal that wi’ melt this"),
("Come mortal, let's cross our", "Come mortal, let’s cross our"),
("coming! Smash 'em good!", "coming! Smash ’em good!"),
("course you didn't. That’s why it's called sneaking.", "course you didn’t. That’s why it’s called sneaking."),
("dares disturb Shek'kahan the Terrible?", "dares disturb Shek’kahan the Terrible?"),
("Dead-Man's Ford", "Dead-Man’s Ford"),
("Death of Al'Brock", "Death of Al’Brock"),
("Death of Flar'Tar", "Death of Flar’Tar"),
("Death of Kapou'e", "Death of Kapou’e"),
("Death of Li'sar", "Death of Li’sar"),
("Death of Ro'Arthian", "Death of Ro’Arthian"),
("Death of Ro'Sothian", "Death of Ro’Sothian"),
("Defeat Lanbec'h", "Defeat Lanbec’h"),
("Delfador to Ur-Thorodor's castle", "Delfador to Ur-Thorodor’s castle"),
("died a hero's death, one", "died a hero’s death, one"),
("died... a warrior's death... You", "died... a warrior’s death... You"),
("do. I don't know why,", "do. I don’t know why,"),
("do? We can't go much", "do? We can’t go much"),
("doing? I haven't let you", "doing? I haven’t let you"),
("Don't kill me,", "Don’t kill me,"),
("Don't make me", "Don’t make me"),
("Don't try to", "Don’t try to"),
("Don't we have", "Don’t we have"),
("Don't worry about", "Don’t worry about"),
("Don't worry Kaleh,", "Don’t worry Kaleh,"),
("Don't you intend", "Don’t you intend"),
("drama, but that's no excuse", "drama, but that’s no excuse"),
("dwarf. But he's been beaten", "dwarf. But he’s been beaten"),
("dwarves and stinkin' elves, we", "dwarves and stinkin’ elves, we"),
("dwarves, it don't matter!", "dwarves, it don’t matter!"),
("eh? You won't find many", "eh? You won’t find many"),
("elves, we can't trust them!", "elves, we can’t trust them!"),
("enough. Well, let's get them!", "enough. Well, let’s get them!"),
("envy your kind's prowess when", "envy your kind’s prowess when"),
("especially where you'll be going,", "especially where you’ll be going,"),
("expect, Chief? They're elves after", "expect, Chief? They’re elves after"),
("explain, Esanoo. We'll have to", "explain, Esanoo. We’ll have to"),
("Father, I'd like to", "Father, I’d like to"),
("feel like I'm forgetting something.", "feel like I’m forgetting something."),
("few here don't stand a", "few here don’t stand a"),
("filthy elves! I'll...", "filthy elves! I’ll..."),
("Finally! We'll fight at", "Finally! We’ll fight at"),
("Finally! You don't know how", "Finally! You don’t know how"),
("First, why don't you tell", "First, why don’t you tell"),
("for my family's memory, you", "for my family’s memory, you"),
("for now we've got to", "for now we’ve got to"),
("forbidden lore you've been taught.", "forbidden lore you’ve been taught."),
("Free, I'm free, and", "Free, I’m free, and"),
("Freedom! Don't worry about", "Freedom! Don’t worry about"),
("from? I can't see a", "from? I can’t see a"),
("frontier or it's all over!", "frontier or it’s all over!"),
("gave us, they're glowing!", "gave us, they’re glowing!"),
("GO HOME! AIN'T NUTTIN TO", "GO HOME! AIN’T NUTTIN TO"),
("go. Now let's see where", "go. Now let’s see where"),
("gold! We don't need more!", "gold! We don’t need more!"),
("grab him! Don't let him", "grab him! Don’t let him"),
("Greetings from Kapou'e, Son of", "Greetings from Kapou’e, Son of"),
("Grunt, and I'm done for!", "Grunt, and I’m done for!"),
("guy. We aren't gonna hurt", "guy. We aren’t gonna hurt"),
("Ha! <i>You</i> don't trust <i>me</i>?", "Ha! <i>You</i> don’t trust <i>me</i>?"),
("Haldric enters Southbay's sewer", "Haldric enters Southbay’s sewer"),
("Haldric, what's going on? What's the plan?", "Haldric, what’s going on? What’s the plan?"),
("Haldric, you're too paranoid.", "Haldric, you’re too paranoid."),
("hands of Kalenz's elves!", "hands of Kalenz’s elves!"),
("have allied wi' the humans...", "have allied wi’ the humans..."),
("he disappeared. Let's find him", "he disappeared. Let’s find him"),
("he go? Let's find him", "he go? Let’s find him"),
("He's in his", "He’s in his"),
("He's raising our", "He’s raising our"),
("He's still breathing.", "He’s still breathing."),
("He's stirring.", "He’s stirring."),
("He's... he's still breathing!", "He’s... he’s still breathing!"),
("heed the King's voice. My", "heed the King’s voice. My"),
("help you? You're not a", "help you? You’re not a"),
("Help! They're everywhere!", "Help! They’re everywhere!"),
("Help, I'm drowning!", "Help, I’m drowning!"),
("Hey look! It's a dwarf!", "Hey look! It’s a dwarf!"),
("Hey look! It's a troll!", "Hey look! It’s a troll!"),
("Hey look, it's a troll!", "Hey look, it’s a troll!"),
("Hey! Look who's back!", "Hey! Look who’s back!"),
("Hey, don't light it up, I'm not safe!", "Hey, don’t light it up, I’m not safe!"),
("Hey, don't you always", "Hey, don’t you always"),
("Hey, there's somebody hidden", "Hey, there’s somebody hidden"),
("Hey, what's going on", "Hey, what’s going on"),
("him — he's big, green", "him — he’s big, green"),
("him. We don't want him", "him. We don’t want him"),
("His brother Ro'Arthian is the", "His brother Ro’Arthian is the"),
("Hm... Me le' no'w!", "Hm... Me le’ no’w!"),
("Hmph! You're just happy", "Hmph! You’re just happy"),
("Hold it! What's going on", "Hold it! What’s going on"),
("hold Southbay. We're done for!", "hold Southbay. We’re done for!"),
("home, now we'll take yer", "home, now we’ll take yer"),
("How'd they get", "How’d they get"),
("Huh? Didn't a messenger", "Huh? Didn’t a messenger"),
("Huh? Who's there, who", "Huh? Who’s there, who"),
("Huh?! Can't be, the", "Huh?! Can’t be, the"),
("hundred, but don't go up,", "hundred, but don’t go up,"),
("Hurry, friends, let's set up", "Hurry, friends, let’s set up"),
("I almost can't believe it.", "I almost can’t believe it."),
("I can't be finished", "I can’t be finished"),
("I don't care, get", "I don’t care, get"),
("I don't find that", "I don’t find that"),
("I don't know, but", "I don’t know, but"),
("I don't know, see", "I don’t know, see"),
("I don't see how", "I don’t see how"),
("I don't think diplomacy", "I don’t think diplomacy"),
("I don't think I", "I don’t think I"),
("I expect we'll be facing", "I expect we’ll be facing"),
("I guess we'll just have", "I guess we’ll just have"),
("I just don't know who", "I just don’t know who"),
("I s'ppose we'll hav' to wade", "I s’ppose we’ll hav’ to wade"),
("I think I'll fight the", "I think I’ll fight the"),
("I think I'll say that", "I think I’ll say that"),
("I think I'll take the", "I think I’ll take the"),
("I think I'll wait a", "I think I’ll wait a"),
("I think I've figured something", "I think I’ve figured something"),
("I think I've found it.", "I think I’ve found it."),
("I think I've got it.", "I think I’ve got it."),
("I think you've the right", "I think you’ve the right"),
("I wasn't talking to", "I wasn’t talking to"),
("I'd like to", "I’d like to"),
("I'll do it, sir. I'll sacrifice myself", "I’ll do it, sir. I’ll sacrifice myself"),
("I'll not go", "I’ll not go"),
("I'll recruit some", "I’ll recruit some"),
("I'll take twenty-five", "I’ll take twenty-five"),
("I'll tell them", "I’ll tell them"),
("I'll try, sir.", "I’ll try, sir."),
("I'm afraid, Kaleh,", "I’m afraid, Kaleh,"),
("I'm always careful. I'll be back", "I’m always careful. I’ll be back"),
("I'm back, Kaleh.", "I’m back, Kaleh."),
("I'm coming... coming...", "I’m coming... coming..."),
("I'm fine. But", "I’m fine. But"),
("I'm gonna make", "I’m gonna make"),
("I'm here, chief!", "I’m here, chief!"),
("I'm still not", "I’m still not"),
("I'm thirsty... <i>Gulp", "I’m thirsty... <i>Gulp"),
("I'm too young", "I’m too young"),
("I've asked and", "I’ve asked and"),
("I've found the", "I’ve found the"),
("I've seen some", "I’ve seen some"),
("if anything, it's our road!", "if anything, it’s our road!"),
("if you don't mind, I'll go with", "if you don’t mind, I’ll go with"),
("Iliah-Malal's body lay", "Iliah-Malal’s body lay"),
("Impossible! I canna' believe any", "Impossible! I canna’ believe any"),
("is departing. We're trapped!", "is departing. We’re trapped!"),
("is I, Kapou'e, son of", "is I, Kapou’e, son of"),
("is this 'Landar' you speak", "is this ‘Landar’ you speak"),
("is this thing's daughter?", "is this thing’s daughter?"),
("It couldna' been more", "It couldna’ been more"),
("It couldna' ha' been more", "It couldna’ ha’ been more"),
("it sure isn't gonna be", "it sure isn’t gonna be"),
("It won't be said", "It won’t be said"),
("It won't be that", "It won’t be that"),
("it you wouldn't mind if", "it you wouldn’t mind if"),
("it! We weren't fast enough.", "it! We weren’t fast enough."),
("It's a 'he', and yes, they're actually very", "It’s a ‘he’, and yes, they’re actually very"),
("It's a dust", "It’s a dust"),
("It's a heck", "It’s a heck"),
("It's been... it's been years since I've been down", "It’s been... it’s been years since I’ve been down"),
("It's better than", "It’s better than"),
("It's called 'The End', foul orc.", "It’s called ‘The End’, foul orc."),
("It's cooler here,", "It’s cooler here,"),
("It's just ‘Tallin’,", "It’s just ‘Tallin’,"),
("It's locked, but", "It’s locked, but"),
("It's no good,", "It’s no good,"),
("It's no use", "It’s no use"),
("It's not very", "It’s not very"),
("It's Rakshas!", "It’s Rakshas!"),
("It's talking to", "It’s talking to"),
("It's the only", "It’s the only"),
("It's time to", "It’s time to"),
("It's too late!", "It’s too late!"),
("It's too quiet...", "It’s too quiet..."),
("It's very easy,", "It’s very easy,"),
("It's... it's monstrous!", "It’s... it’s monstrous!"),
("its some o' them cave-dwarves.", "its some o’ them cave-dwarves."),
("Kaleh, you can't just go", "Kaleh, you can’t just go"),
("Keep of El'Ithsomir", "Keep of El’Ithsomir"),
("kill Mal-Ravanal. He's here somewhere...", "kill Mal-Ravanal. He’s here somewhere..."),
("kill me, I'm not telling", "kill me, I’m not telling"),
("kin would ha' helped a", "kin would ha’ helped a"),
("Krawg sme' o'cz in iz 'ave!", "Krawg sme’ o’cz in iz ’ave!"),
("Let's clear out", "Let’s clear out"),
("Let's finish off", "Let’s finish off"),
("Let's make them", "Let’s make them"),
("Let's put those", "Let’s put those"),
("Let's send these", "Let’s send these"),
("Living... Soon, you'll be more", "Living... Soon, you’ll be more"),
("long as they're in charge you'll have to", "long as they’re in charge you’ll have to"),
("long enough. Let's go!", "long enough. Let’s go!"),
("Look, there's a dragon", "Look, there’s a dragon"),
("looks like he's been beaten", "looks like he’s been beaten"),
("looks like he's in bad", "looks like he’s in bad"),
("Lord El'Isomithir", "Lord El’Isomithir"),
("M' 'u!", "M’ ’u!"),
("makes your Master's punishment from", "makes your Master’s punishment from"),
("Mal-Ravanal's Capital", "Mal-Ravanal’s Capital"),
("Malifor the Great's Study", "Malifor the Great’s Study"),
("Many ships. We're being invaded!", "Many ships. We’re being invaded!"),
("mask. You... you're not with", "mask. You... you’re not with"),
("Maybe they won't be hostile.", "Maybe they won’t be hostile."),
("Maybe they'll clear it", "Maybe they’ll clear it"),
("me, Kaleh, I'm dying...", "me, Kaleh, I’m dying..."),
("mentioned I can't float.", "mentioned I can’t float."),
("mine, and don't touch it!", "mine, and don’t touch it!"),
("Monsters! We're being invaded", "Monsters! We’re being invaded"),
("Move Kapou'e to the", "Move Kapou’e to the"),
("Move Li'sar next to", "Move Li’sar next to"),
("Move Li'sar to another", "Move Li’sar to another"),
("Move Li'sar to capture", "Move Li’sar to capture"),
("Move Li'sar to the", "Move Li’sar to the"),
("much as I'd like to, we don't have time", "much as I’d like to, we don’t have time"),
("Must obey... Can't resist... I...", "Must obey... Can’t resist... I..."),
("Must... Can't... Must... Help", "Must... Can’t... Must... Help"),
("My love, I'll be there", "My love, I’ll be there"),
("my my, what's in that", "my my, what’s in that"),
("never mind, I'll get you", "never mind, I’ll get you"),
("no choice... I've never tasted", "no choice... I’ve never tasted"),
("no game, Li'sar!", "no game, Li’sar!"),
("no ghosts, I'll take it.", "no ghosts, I’ll take it."),
("No you won't, you soldier", "No you won’t, you soldier"),
("No! I'll not go", "No! I’ll not go"),
("No! It's all over!", "No! It’s all over!"),
("no! Please don't die!", "no! Please don’t die!"),
("No!! They'll kill me!", "No!! They’ll kill me!"),
("No, $unit.name can't die now!", "No, $unit.name can’t die now!"),
("No, don't...", "No, don’t..."),
("No, it's an elf!", "No, it’s an elf!"),
("No, we can't. But for", "No, we can’t. But for"),
("No. I can't ask you", "No. I can’t ask you"),
("No. It can't be!", "No. It can’t be!"),
("no. You don't understand! We", "no. You don’t understand! We"),
("Noo!! I can't be promoted", "Noo!! I can’t be promoted"),
("Nooo! Don't kill me,", "Nooo! Don’t kill me,"),
("not say, 'To err is", "not say, “To err is"),
("now $intl_ally_name, I'm busy.", "now $intl_ally_name, I’m busy."),
("now between Iliah-Malal's army and", "now between Iliah-Malal’s army and"),
("Now let's get going,", "Now let’s get going,"),
("Now let's make mush", "Now let’s make mush"),
("now that you've triggered it.", "now that you’ve triggered it."),
("now, and don't look back!", "now, and don’t look back!"),
("now, Father. I'm a little", "now, Father. I’m a little"),
("Nym! No! Don't open—", "Nym! No! Don’t open—"),
("Nym. I didn't hear you", "Nym. I didn’t hear you"),
("of here. Don't make me", "of here. Don’t make me"),
("of here. We're trapped!", "of here. We’re trapped!"),
("of surrender! Don't kill me...", "of surrender! Don’t kill me..."),
("of the Ka'lian do... How", "of the Ka’lian do... How"),
("of this. I'm feeling ill!", "of this. I’m feeling ill!"),
("of this. I'm out of", "of this. I’m out of"),
("of time... We'll never beat", "of time... We’ll never beat"),
("off the 'or the orcs", "off the ‘or the orcs"),
("Oh no, I'm defeated.", "Oh no, I’m defeated."),
("on boys, let's give it", "on boys, let’s give it"),
("on brother, let's get outta", "on brother, let’s get outta"),
("one escape. Let's hope they don't all!", "one escape. Let’s hope they don’t all!"),
("One of Iliah-Malal's creatures. So", "One of Iliah-Malal’s creatures. So"),
("one of Malifor's experiments.", "one of Malifor’s experiments."),
("one of Tallin's men...", "one of Tallin’s men..."),
("Oooooh, cool, it's a drake!", "Oooooh, cool, it’s a drake!"),
("Orcs sighted! They've seized the", "Orcs sighted! They’ve seized the"),
("Orcs, kill'em all! Humans,", "Orcs, kill’em all! Humans,"),
("orcs’ chest! It's filled with", "orcs’ chest! It’s filled with"),
("our hammers can't manage.", "our hammers can’t manage."),
("Ow, I'm stuck!", "Ow, I’m stuck!"),
("Perhaps he didn't want to", "Perhaps he didn’t want to"),
("Phew, it's hot down", "Phew, it’s hot down"),
("Pillage! Let's plunder these", "Pillage! Let’s plunder these"),
("powerful, why can't you destroy", "powerful, why can’t you destroy"),
("Prestim's walls are rock-hard, they'll waste themselves", "Prestim’s walls are rock-hard, they’ll waste themselves"),
("pretty small, let's see if", "pretty small, let’s see if"),
("Price?! Don't insult us,", "Price?! Don’t insult us,"),
("protect us, they're elves!", "protect us, they’re elves!"),
("Put Ro'Sothian into that", "Put Ro’Sothian into that"),
("Quick, honey, we've got to", "Quick, honey, we’ve got to"),
("quiet Zhul, I'll explain it", "quiet Zhul, I’ll explain it"),
("ran away, didn't you?", "ran away, didn’t you?"),
("reinforcements coming! We're trapped! All", "reinforcements coming! We’re trapped! All"),
("Relax, he's a friend.", "Relax, he’s a friend."),
("remain here we'll die.", "remain here we’ll die."),
("Reserves! We can't let them", "Reserves! We can’t let them"),
("Resist until Dacyn's return in", "Resist until Dacyn’s return in"),
("Resist until Dacyn's return tomorrow", "Resist until Dacyn’s return tomorrow"),
("right boys, let's go!", "right boys, let’s go!"),
("right people, let's move out!", "right people, let’s move out!"),
("Right. We'll probably have", "Right. We’ll probably have"),
("Ro'Arthian, are you", "Ro’Arthian, are you"),
("Ro'Arthian, bah! Don't utter that", "Ro’Arthian, bah! Don’t utter that"),
("Ro'Arthian, send a", "Ro’Arthian, send a"),
("Ro'Sothian, is that", "Ro’Sothian, is that"),
("rumbling. And what's that roaring", "rumbling. And what’s that roaring"),
("running, or we'll be fish-bait", "running, or we’ll be fish-bait"),
("said that you'd been here", "said that you’d been here"),
("save us, it's... it's an elf.", "save us, it’s... it’s an elf."),
("second thought, it's better to", "second thought, it’s better to"),
("secret door. Let's see where", "secret door. Let’s see where"),
("secrets of Crelanu's book...", "secrets of Crelanu’s book..."),
("shall spoil, we'll starve!", "shall spoil, we’ll starve!"),
("She's just full", "She’s just full"),
("She's... She's beautiful.", "She’s... She’s beautiful."),
("shut up. Let's go kill", "shut up. Let’s go kill"),
("Sigh, we'll talk later.", "Sigh, we’ll talk later."),
("Sister! Don't you recognize", "Sister! Don’t you recognize"),
("skeletons! Where?! Let's go burn 'em all!", "skeletons! Where?! Let’s go burn ’em all!"),
("So far it's been danger", "So far it’s been danger"),
("so sure it's that simple,", "so sure it’s that simple,"),
("so tough don't you? Well", "so tough don’t you? Well"),
("So you don't have the", "So you don’t have the"),
("So, d'ya want me", "So, d’ya want me"),
("so. I can't see anything.", "so. I can’t see anything."),
("someday maybe we'll meet again.", "someday maybe we’ll meet again."),
("Someone's coming. Quick,", "Someone’s coming. Quick,"),
("somewhere. We can't be sure.", "somewhere. We can’t be sure."),
("Sorry, won't do it", "Sorry, won’t do it"),
("sounds good. I'll stay here", "sounds good. I’ll stay here"),
("Spiders aren't insects.", "Spiders aren’t insects."),
("Stalrag from Ro'Sothian.", "Stalrag from Ro’Sothian."),
("stand still you're a dead", "stand still you’re a dead"),
("stay and 're-educate' these un-orcs.", "stay and ‘re-educate’ these un-orcs."),
("stinking trees, we'll show you!", "stinking trees, we’ll show you!"),
("stop him. Let's go!", "stop him. Let’s go!"),
("sure there aren't any spiders?", "sure there aren’t any spiders?"),
("Sure, we'll help you", "Sure, we’ll help you"),
("tale, for it's worth it.", "tale, for it’s worth it."),
("Tallin, we can't do it", "Tallin, we can’t do it"),
("Tallin, we ha' failed to", "Tallin, we ha’ failed to"),
("Tell him it's time.", "Tell him it’s time."),
("tell me what's going on", "tell me what’s going on"),
("tell you, Kapou'e?", "tell you, Kapou’e?"),
("than goblins. We'll break them!", "than goblins. We’ll break them!"),
("Thank you. I'm sure the", "Thank you. I’m sure the"),
("Thanks, father. Don't worry, I'll take care", "Thanks, father. Don’t worry, I’ll take care"),
("That hurts. Let's try not", "That hurts. Let’s try not"),
("That isn't him...", "That isn’t him..."),
("that snow...? We're doomed!", "that snow...? We’re doomed!"),
("that this isn't going as", "that this isn’t going as"),
("Thats right. I'll be coming", "That’s right. I’ll be coming"),
("That’s... that's the Rod", "That’s... that’s the Rod"),
("The 'true people' speak through", "The ‘true people’ speak through"),
("The dragon's cave has", "The dragon’s cave has"),
("the happier I'll be.", "the happier I’ll be."),
("the horizon. Jevyan's fleet is here! It's all over.", "the horizon. Jevyan’s fleet is here! It’s all over."),
("The Ka'lian", "The Ka’lian"),
("The maker's mark is", "The maker’s mark is"),
("the sewer. We're doomed!", "the sewer. We’re doomed!"),
("the ship isn't here yet.", "the ship isn’t here yet."),
("the swamp. I'm under five", "the swamp. I’m under five"),
("The world won't miss him", "The world won’t miss him"),
("them sacks o' bones.", "them sacks o’ bones."),
("Then it's time to", "Then it’s time to"),
("Then let's kill them!", "Then let’s kill them!"),
("then return Li'sar to the", "then return Li’sar to the"),
("Then, into Southbay's sewer.", "Then, into Southbay’s sewer."),
("there alone. She'll kill you!", "there alone. She’ll kill you!"),
("these tunnels aren't as bad", "these tunnels aren’t as bad"),
("They're coming this", "They’re coming this"),
("They're definitely of the 'attack first, ask questions later' variety.", "They’re definitely of the ‘attack first, ask questions later’ variety."),
("They're here!", "They’re here!"),
("They're raising the", "They’re raising the"),
("They're... the humans", "They’re... the humans"),
("thing just won't stay dead!", "thing just won’t stay dead!"),
("This can't be the", "This can’t be the"),
("This rabble won't be a", "This rabble won’t be a"),
("this real? I'm coming, I'm coming.", "this real? I’m coming, I’m coming."),
("this thing? It's huge!", "this thing? It’s huge!"),
("This trash's gone.", "This trash’s gone."),
("This'll splinter your", "This’ll splinter your"),
("those orcs can't get to", "those orcs can’t get to"),
("thousand, and I'll go no", "thousand, and I’ll go no"),
("Tirigaz. But what's going on?", "Tirigaz. But what’s going on?"),
("to back! Don't let them", "to back! Don’t let them"),
("to his master's aid. When", "to his master’s aid. When"),
("to move Li'sar", "to move Li’sar"),
("to say, 'I told you so'?", "to say, “I told you so”?"),
("to think. It's all so", "to think. It’s all so"),
("to you, Kapou'e, our imbecile", "to you, Kapou’e, our imbecile"),
("too late! They're burning the", "too late! They’re burning the"),
("too late. We've taken what", "too late. We’ve taken what"),
("too long! We'll never be", "too long! We’ll never be"),
("too! Chief doesn't let me", "too! Chief doesn’t let me"),
("Treasury! Cool, let's go loot", "Treasury! Cool, let’s go loot"),
("tribe anymore, Kapou'e.", "tribe anymore, Kapou’e."),
("troll hole? Didn't you?", "troll hole? Didn’t you?"),
("trolls. But you'll see, Griknagh", "trolls. But you’ll see, Griknagh"),
("Uh... I don't think so.", "Uh... I don’t think so."),
("Undead. Bah! We're pros at", "Undead. Bah! We’re pros at"),
("Ungrateful minx! I'm sorely tempted", "Ungrateful minx! I’m sorely tempted"),
("us <i>you</i> won't come back.", "us <i>you</i> won’t come back."),
("us but I'm afraid we can't help you.", "us but I’m afraid we can’t help you."),
("Very well. I'll do my", "Very well. I’ll do my"),
("Very well. We'll go south", "Very well. We’ll go south"),
("victory against Al'Tar.", "victory against Al’Tar."),
("was fun, wasn't it, Grüü?", "was fun, wasn’t it, Grüü?"),
("way already! We're running out", "way already! We’re running out"),
("we die, it's our only", "we die, it’s our only"),
("We don't have any", "We don’t have any"),
("We don't. Not everything", "We don’t. Not everything"),
("we go, let's give this", "we go, let’s give this"),
("We haven't a moment", "We haven’t a moment"),
("we should ha' allied wi' the humans...", "we should ha’ allied wi’ the humans..."),
("We'll ask later,", "We’ll ask later,"),
("We'll be back,", "We’ll be back,"),
("We'll go through", "We’ll go through"),
("We'll make sure.", "We’ll make sure."),
("We're in. Now:", "We’re in. Now:"),
("We're surrounded! I", "We’re surrounded! I"),
("We're surrounded! The", "We’re surrounded! The"),
("We're through the", "We’re through the"),
("We've captured all", "We’ve captured all"),
("We've cleared the beach. Let's return to", "We’ve cleared the beach. Let’s return to"),
("We've defeated the orcs. Let's rig the", "We’ve defeated the orcs. Let’s rig the"),
("We've slain the", "We’ve slain the"),
("we? I can't see where", "we? I can’t see where"),
("Well, fine, I'll hear your", "Well, fine, I’ll hear your"),
("Well, it's pointless to", "Well, it’s pointless to"),
("Well, let's see what's behind it.", "Well, let’s see what’s behind it."),
("Well, there's only one", "Well, there’s only one"),
("Well, what's behind the", "Well, what’s behind the"),
("Well, where's their leader? I don't see him.", "Well, where’s their leader? I don’t see him."),
("well. It isn't as if we weren't expecting this.", "well. It isn’t as if we weren’t expecting this."),
("Wha'? Yah fail'd to give", "Wha’? Yah fail’d to give"),
("what cause ha' I heard naught o' this?", "what cause ha’ I heard naught o’ this?"),
("What in Moradin's name are", "What in Moradin’s name are"),
("What'd he want? And why'd you have", "What’d he want? And why’d you have"),
("What's going on", "What’s going on"),
("What's that strange", "What’s that strange"),
("What's this? A", "What’s this? A"),
("What's this? You", "What’s this? You"),
("What's up big", "What’s up big"),
("What's with this 'Master' business? It's starting to", "What’s with this ‘Master’ business? It’s starting to"),
("What's wrong, Kaleh? Don't you trust", "What’s wrong, Kaleh? Don’t you trust"),
("What... Kapou'e! What in", "What... Kapou’e! What in"),
("What? What's going on", "What? What’s going on"),
("Whenever you're on a", "Whenever you’re on a"),
("Who is 'they'?", "Who is ‘they’?"),
("who murdered El'Isomithir!", "who murdered El’Isomithir!"),
("Why can't you wield", "Why can’t you wield"),
("Why d'ye say that?", "Why d’ye say that?"),
("Why don't you try 'knocking'?", "Why don’t you try ‘knocking’?"),
("Why? What's wrong?", "Why? What’s wrong?"),
("will eat you' part.", "will eat you’ part."),
("will surely appear.' It looks", "will surely appear.” It looks"),
("with them, Kapou'e, they aren't going to", "with them, Kapou’e, they aren’t going to"),
("Yeah, I can't wait to", "Yeah, I can’t wait to"),
("Yeah, we'll be heroes!", "Yeah, we’ll be heroes!"),
("yer mouth! Let's just get 'em.", "yer mouth! Let’s just get ’em."),
("Yes master, I'll make him", "Yes master, I’ll make him"),
("Yes, I'll take it.", "Yes, I’ll take it."),
("Yes, I'm still figuring", "Yes, I’m still figuring"),
("Yess! It's the elves", "Yess! It’s the elves"),
("you are, I'll come up", "you are, I’ll come up"),
("You aren't actually going", "You aren’t actually going"),
("You don't know where", "You don’t know where"),
("You don't turn your", "You don’t turn your"),
("you idiot, I'm still over", "you idiot, I’m still over"),
("you if you'd 'remove' just our", "you if you’d ‘remove’ just our"),
("you sure that's wise?", "you sure that’s wise?"),
("you sure you're okay?", "you sure you’re okay?"),
("you the undead's prisoners?", "you the undead’s prisoners?"),
("You won't get much", "You won’t get much"),
("You're going to", "You’re going to"),
("You're in no", "You’re in no"),
("You're making a", "You’re making a"),
("You're really beat", "You’re really beat"),
("You're right, Elenia,", "You’re right, Elenia,"),
("You're right, Hamel,", "You’re right, Hamel,"),
("You're telling me.", "You’re telling me."),
("You've been this", "You’ve been this"),
("You've fought orcs", "You’ve fought orcs"),
("You've learned well, Li'sar!", "You’ve learned well, Li’sar!"),
("You've learned well,", "You’ve learned well,"),
("You've put my", "You’ve put my"),
("you. Now let's get going,", "you. Now let’s get going,"),
("young prince, you're not as", "young prince, you’re not as"),
("your enemies fall.' Grave robbing", "your enemies fall.’ Grave robbing"),
("your face? We've never seen", "your face? We’ve never seen"),
("Your word can't be trusted.", "Your word can’t be trusted."),
# Added missing apostrophe
("Thats right", "That‘s right"),
),
# End of pass 2, bulk apostrophe conversion
# Apostrophe conversion, core data
# by Simons Mith, based on build 46240 or so (2010-09-03)
"wesnoth-editor" : (
("Can't Undo", "Can’t Undo"),
("Can't Redo", "Can’t Redo"),
# Fix capitalization
("Create new map", "Create New Map"),
("Choose a mask to apply", "Choose a Mask to Apply"),
("Choose target map", "Choose Target Map"),
),
"wesnoth-lib" : (
# Convert makeshift dashes
# conversion added in 1.9.0-svn
("Player Info - ", "Player Info — "),
("Don't ask me again!", "Don’t ask me again!"),
("dedicated server 'wesnothd'", "dedicated server ‘wesnothd’"),
("the game's settings", "the game’s settings"),
# Fix capitalization
("Lava chasm", "Lava Chasm"),
#this rules seems to be f***ed up and not in sync with src/addon/manager.cpp:999
#uncomment it for the moment, please only reactivate after capitalization of
#'add-ons' is/was fixed everywhere!
#("Update add-ons", "Update Add-ons"),
# Use "Wooden"
("Wood Bridge", "Wooden Bridge"),
# Use "Gray"
("Grey Deep Water", "Gray Deep Water"),
("Clean Grey Cobbles", "Clean Gray Cobbles"),
# Fix apostrophes
("Hides allies'", "Hides allies’"),
# Fix apostrophes and capitalization
("Hide Allies' Plans by Default", "Hide allies’ plans by default"),
# Fix capitalization
("Maximum Auto-Saves", "Maximum auto-saves"),
),
"wesnoth-manual" : (
("Mage Delfador - pay attention", "Mage Delfador — pay attention"),
("about 10-20 scenarios.", "about 10–20 scenarios."),
("Life and Death - Experience", "Life and Death — Experience"),
("- i.e. the leader", "— i.e. the leader"),
("voluntarily - usually", "voluntarily — usually"),
("2 + villages - maximum(0, upkeep - villages)", "2 + villages − maximum(0, upkeep − villages)"),
("-25%", "−25%"),
("number keys 2-7 will identify", "number keys 2–7 will identify"),
("want to attack -", "want to attack —"),
("written as 5-4,", "written as 5–4,"),
("level units - to give", "level units — to give"),
("Plan ahead - think about", "Plan ahead — think about"),
),
"wesnoth-help" : (
("Immune to drain, poison and plague", "Immune to drain, poison, and plague"),
("Receive only 50% defense in land-based", "Receives only 50% defense in land-based"),
# Kill useless initial single quote
("'The drakish tradition", "The drakish tradition"),
# 'whose' -> 'who'
("Drakish scrollkeeper whose has", "Drakish scrollkeeper who has"),
),
"wesnoth-manpages" : (
("of on-line players", "of online players"),
),
# conversion added in 1.10.0+svn
"wesnoth-tutorial" : (
("$unit.type", "$unit.language_name"),
),
}
# Speak, if all argument files are newer than this timestamp
# Try to use UTC here
# date --utc "+%s # %c"
timecheck = 1283156523 # Mo 30 Aug 2010 08:22:03 UTC
import os, sys, time, stat, re
try:
from multiprocessing import Pool, cpu_count
def parallel_map(*args, **kw):
pool = Pool(cpu_count())
return pool.map(*args, **kw)
except ImportError:
print "Failed to import 'multiprocessing' module. Multiple cpu cores won't be utilized"
parallel_map = map
def process_file(path):
before = open(path, "r").read()
decommented = re.sub("#.*", "", before)
lines = before.split('\n')
for (domain, fixes) in stringfixes.items():
# In case of screwed-up pairs that are hard to find, uncomment the following:
#for fix in fixes:
# if len(fix) != 2:
# print fix
for (old, new) in fixes:
if old is new:
#complain loudly
print "pofix: old string\n\t\"%s\"\n equals new string\n\t\"%s\"\nexiting." % (old, new)
sys.exit(1)
#this check is problematic and the last clause is added to prevent false
#positives in case that new is a substring of old, though this can also
#lead to "real" probs not found, the real check would be "does replacing
#old with new lead to duplicate msgids? (including old ones marked with #~)"
#which is not easily done in the current design...
elif new in decommented and old in decommented and not new in old:
print "pofix: %s already includes the new string\n\t\"%s\"\nbut also the old\n\t\"%s\"\nthis needs handfixing for now since it likely creates duplicate msgids." % (path, new, old)
else:
for (i, line) in enumerate(lines):
if line and line[0] != '#':
lines[i] = lines[i].replace(old, new)
after = '\n'.join(lines)
if after != before:
print "pofix: %s modified" % path
# Save a backup
os.rename(path, path + "-bak")
# Write out transformed version
ofp = open(path, "w")
ofp.write(after)
ofp.close()
return 1
else:
return 0
if __name__ == '__main__':
newer = 0
modified = 0
pocount = 0
files = []
for path in sys.argv[1:]:
if not path.endswith(".po") and not path.endswith(".pot") and not path.endswith(".cfg"):
continue
pocount += 1
# Notice how many files are newer than the time check
statinfo = os.stat(path)
if statinfo.st_mtime > timecheck:
newer += 1
files.append(path)
modified = sum(parallel_map(process_file, files))
print "pofix: %d files processed, %d files modified, %d files newer" \
% (pocount, modified, newer)
if pocount > 1 and newer == pocount:
print "pofix: script may be obsolete"
|
asimonov-im/wesnoth
|
utils/pofix.py
|
Python
|
gpl-2.0
| 268,282
|
[
"BLAST",
"GULP",
"VisIt"
] |
e28ffc1be0337643c90e0216e5b5e5a40c2560119220da35d8faf44cdfc8ceef
|
from fabric.api import task
from fabric.api import run, sudo
from fabric.api import cd, settings, prefix
# build parameters
MAKEJOBS = '--jobs=4 ' # specify how many make commands to run simultaneously
SRCDIR = '~/src'
PROJ_VER = '4.7.0'
PROJ_SRC = SRCDIR +'/proj/' + PROJ_VER
PROJ_DIR = '/usr/local/proj/' + PROJ_VER
GEOS_VER = '3.2.2'
GEOS_SRC = SRCDIR + '/geos/' + GEOS_VER
GEOS_DIR = '/usr/local/geos/' + GEOS_VER
GDAL_VER = '1.8.1'
GDAL_SRC = SRCDIR + '/gdal/' + GDAL_VER
GDAL_DIR = '/usr/local/gdal/' + GDAL_VER
HDF5_VER = '1.8.7'
HDF5_SRC = SRCDIR + '/hdf5/' + HDF5_VER
HDF5_DIR = '/usr/local/hdf5/' + HDF5_VER
NETCDF4_VER = '4.1.1'
NETCDF4_SRC = SRCDIR + '/hdf5/' + NETCDF4_VER
NETCDF4_DIR = '/usr/local/netCDF4/' + NETCDF4_VER
@task(default=True)
def install_system_dependencies():
'''Installs required geospatial libraries'''
update_system()
install_build_dependencies()
create_source_code_folder()
install_proj()
install_geos()
install_gdal()
install_hdf5()
install_netcdf4()
install_python_dependencies()
@task
def update_system():
'''Update the list of Ubuntu packages'''
sudo('apt-get -y update')
sudo('apt-get -y upgrade')
@task
def install_build_dependencies():
'''Install dependencies for building libraries'''
sudo('apt-get -y install wget')
sudo('apt-get -y install unzip')
sudo('apt-get -y install gcc')
sudo('apt-get -y install g++')
sudo('apt-get -y install python-dev')
sudo('apt-get -y install python-setuptools')
sudo('apt-get -y install python-pip')
sudo('apt-get -y install swig')
sudo('apt-get -y install git-core')
sudo('apt-get -y install mercurial')
@task
def create_source_code_folder():
'''Creates a source code folder'''
with settings(warn_only=True):
run('mkdir {0}'.format(SRCDIR))
@task
def install_proj():
'''Install Proj.4'''
run('mkdir -p {projsrc}'.format(projsrc=PROJ_SRC))
with cd(PROJ_SRC):
run('wget http://download.osgeo.org/proj/proj-datumgrid-1.5.zip')
run('wget http://download.osgeo.org/proj/proj-{projver}.tar.gz'.format(projver=PROJ_VER))
run('tar xzf proj-{projver}.tar.gz'.format(projver=PROJ_VER))
run('unzip proj-datumgrid-1.5.zip -d proj-{projver}/nad/'.format(projver=PROJ_VER))
with cd('proj-' + PROJ_VER):
run('./configure --prefix={projdir} > log_proj_configure.out'.format(projdir=PROJ_DIR))
run('make -j 4 > log_proj_make.out')
sudo('make install > log_proj_make_install.out')
sudo('sh -c "echo \'{projdir}/lib\' > /etc/ld.so.conf.d/proj.conf"'.format(projdir=PROJ_DIR))
sudo('ldconfig')
@task
def install_geos():
'''Install GEOS'''
run('mkdir -p ' + GEOS_SRC)
with cd(GEOS_SRC):
run('wget http://download.osgeo.org/geos/geos-' + GEOS_VER + '.tar.bz2')
run('tar xjf geos-' + GEOS_VER + '.tar.bz2')
with cd('geos-' + GEOS_VER):
run('./configure --prefix=' + GEOS_DIR + ' > log_geos_configure.out')
run('make -j 4 > log_geos_make.out')
sudo('make install > log_geos_make_install.out')
sudo('sh -c "echo \'' + GEOS_DIR + '/lib\' > /etc/ld.so.conf.d/geos.conf"')
sudo('ldconfig')
@task
def install_gdal():
'''Install GDAL'''
run('mkdir -p ' + GDAL_SRC)
with cd(GDAL_SRC):
run('wget http://download.osgeo.org/gdal/gdal-' + GDAL_VER + '.tar.gz')
run('tar xzf gdal-' + GDAL_VER + '.tar.gz')
with cd('gdal-' + GDAL_VER):
run('./configure' + \
' --prefix=' + GDAL_DIR + \
' --with-geos=' + GEOS_DIR + '/bin/geos-config' + \
' --with-python' + \
' >& log_gdal_configure.out')
run('make >& log_gdal_make.out')
sudo('make install >& log_gdal_make_install.out')
sudo('sh -c "echo \'' + GDAL_DIR + '/lib\' > /etc/ld.so.conf.d/gdal.conf"')
sudo('ldconfig')
@task
def install_hdf5():
'''Install HDF5'''
sudo('apt-get install -y libcurl3 libcurl4-openssl-dev')
run('mkdir -p ' + HDF5_SRC)
with cd(HDF5_SRC):
HDF5_TAR = 'hdf5-' + HDF5_VER + '.tar.gz'
run('wget http://www.hdfgroup.org/ftp/HDF5/current/src/' + HDF5_TAR)
run('tar -xzvf ' + HDF5_TAR)
with cd('hdf5-' + HDF5_VER):
run('./configure' + \
' --prefix=' + HDF5_DIR + \
' --enable-shared' + \
' --enable-hl' + \
' > log_hdf5_configure.log')
run('make -j 4 > log_hdf5_make.log')
sudo('make install >& log_hdf5_make_install.log')
sudo('sh -c "echo \'' + HDF5_DIR + '/lib\' > /etc/ld.so.conf.d/hdf5.conf"')
sudo('ldconfig')
@task
def install_netcdf4():
'''Install NetCDF4'''
run('mkdir -p ' + NETCDF4_SRC)
with cd(NETCDF4_SRC):
NETCDF4_TAR = 'netcdf-' + NETCDF4_VER + '.tar.gz'
run('wget ftp://ftp.unidata.ucar.edu/pub/netcdf/' + NETCDF4_TAR)
run('tar -xzvf ' + NETCDF4_TAR)
with cd('netcdf-' + NETCDF4_VER):
run('./configure' + \
' --prefix=' + NETCDF4_DIR + \
' --enable-netcdf-4' + \
' --with-hdf5=' + HDF5_DIR + \
' --enable-shared ' + \
' --enable-dap ' + \
' > log_netcdf4_configure.log')
run('make ' + MAKEJOBS + '> log_netcdf4_make.log')
sudo('make install >& log_netcdf4_make_install.log')
sudo('sh -c "echo \'' + NETCDF4_DIR + '/lib\' > /etc/ld.so.conf.d/hdf5.conf"')
sudo('ldconfig')
@task
def install_python_dependencies():
'''Install required Python packages'''
from virtualenv import VIRTUALENVDIR
from virtualenv import VIRTUALENVNAME
from virtualenv import VIRTUALENVWRAPPER_ACTIVATE
from virtualenv import virtualenv
sudo('apt-get -y install python-dev')
sudo('apt-get -y install python-setuptools')
sudo('apt-get -y install python-pip')
sudo('pip install virtualenv')
sudo('pip install virtualenvwrapper')
with settings(warn_only=True):
run('mkdir ' + VIRTUALENVDIR)
# create the Python virtual environment
with prefix(VIRTUALENVWRAPPER_ACTIVATE):
run('mkvirtualenv --no-site-packages ' + VIRTUALENVNAME)
# install symbolic link in the virtual environment to GDAL
with settings(warn_only=True):
run('ln -s ' + GDAL_DIR + '/bin/gdal-config ' + \
'~/.virtualenvs/' + VIRTUALENVNAME + '/bin/gdal-config')
with virtualenv():
run('pip install yolk')
run('pip install Django==1.3')
run('pip install django-piston')
run('pip install -e hg+https://bitbucket.org/tylere/django-piston#egg=piston')
run('pip install numpy==1.5.1')
run('pip install Shapely')
run('pip install geojson')
run('pip install geoalchemy')
with prefix('export HDF5_DIR=' + HDF5_DIR):
with prefix('export NETCDF4_DIR=' + NETCDF4_DIR):
run('pip install netCDF4==0.9.4')
# install the GDAL Python bindings
run('pip install --no-install GDAL')
# build package extensions
with cd('$HOME/.virtualenvs/' + VIRTUALENVNAME + '/build/GDAL'):
run('python setup.py build_ext' + \
' --gdal-config=' + GDAL_DIR + '/bin/gdal-config' + \
' --library-dirs=' + GDAL_DIR + '/include')
run('pip install --no-download GDAL')
@task
def install_pykml():
'''Install pyKML and dependencies'''
from virtualenv import virtualenv
sudo('apt-get -y install libxml2')
sudo('apt-get -y install libxslt1.1 libxslt-dev')
with virtualenv():
run('pip install pykml')
|
netcon-source/OpenClimateGIS
|
fabfile/tasks_system.py
|
Python
|
bsd-3-clause
| 7,759
|
[
"NetCDF"
] |
49dfc3b35cd780d5c3d448d643043bbadb8a86ec9821668fdf9bce80b6e3adf6
|
#!/usr/bin/env python3
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import sys
import unittest
from PyQt5 import QtWidgets, QtCore
from peacock.ExodusViewer.ExodusViewer import main
from peacock.utils import Testing
class TestExodusViewer2(Testing.PeacockImageTestCase):
"""
A second set of tests for the ExodusViewer.
"""
#: QApplication: The main App for QT, this must be static to work correctly.
qapp = QtWidgets.QApplication(sys.argv)
#: str: The filename to load.
_filename = Testing.get_chigger_input('diffusion_4.e')
def setUp(self):
self._widget, self._main_window = main(size=[400,400])
self._widget.onSetFilenames([self._filename])
self._window = self._widget.currentWidget().VTKWindowPlugin
def testLocalRange(self):
"""Tests the local range toggle."""
clip = self._widget.currentWidget().ClipPlugin
cbar = self._widget.currentWidget().ColorbarPlugin
clip.setChecked(QtCore.Qt.Checked)
clip.clicked.emit(QtCore.Qt.Checked)
clip.ClipSlider.setSliderPosition(35)
clip.ClipSlider.sliderReleased.emit()
Testing.process_events(1)
self.assertTrue(cbar.RangeMinimum.text().startswith('0.635'))
self.assertTrue(cbar.RangeMaximum.text(), '1')
self.assertImage('testLocalRange.png')
cbar.ColorBarRangeType.setChecked(QtCore.Qt.Unchecked)
cbar.ColorBarRangeType.stateChanged.emit(QtCore.Qt.Unchecked)
Testing.process_events(1)
self.assertTrue(cbar.RangeMinimum.text(), '0')
self.assertTrue(cbar.RangeMaximum.text(), '1')
self.assertImage('testLocalRange2.png')
if __name__ == '__main__':
unittest.main(module=__name__, verbosity=2)
|
nuclear-wizard/moose
|
python/peacock/tests/exodus_tab/test_ExodusViewer2.py
|
Python
|
lgpl-2.1
| 2,007
|
[
"MOOSE"
] |
1f887165ae1fde3c5ad9ed0e7a3cc4682273f2a0f27ba34250b13753d801ba98
|
# -*- coding: utf-8 -*-
"""
forms.py
~~~~~~~~
Flask-WTForms
"""
from flask_wtf import FlaskForm
from wtforms import TextField, TextAreaField, SelectField
from wtforms.fields.html5 import EmailField
from wtforms.validators import InputRequired, Email
pitchPlaceholder = 'You have 300 characters to sell your talk. This is known as the "elevator pitch". \
Make it exciting.'
talkFormats = [
(0, "-- select --"),
("IN-DEPTH", "In-Depth Talk (~20-30 minutes, 5-10 minute Q&A)"),
("LIGHTNING", "Lightning Talk (~5-10 minutes, no Q&A)"),
("DEMO", "Short Demo (~15-20 minutes, < 5 minute Q&A)"),
("BEGINNER", "Beginner Track (20 minutes, 5 minute Q&A)"),
]
audienceLevels = [
(0, "-- select --"),
("BEGINNER", "Beginner"),
("INTERMEDIATE", "Intermediate"),
("ADVANCED", "Advanced"),
]
descPlaceholder = "This field supports Markdown. The description will be seen by reviewers during \
the CFP process and may eventually be seen by the attendees of the event."
notesPlaceholder = "This field supports Markdown. Notes will only be seen by reviewers during the CFP \
process. This is where you should explain things such as technical requirements, \
why you're the best person to speak on this subject, etc..."
class SubmissionForm(FlaskForm):
email = EmailField(
"Email",
validators=[InputRequired("Please enter your email address."), Email("Please enter your email address.")],
render_kw={"placeholder": "Email"},
)
title = TextField(
"Title", validators=[InputRequired("Your talk needs a name.")], render_kw={"placeholder": "Talk Title"}
)
pitch = TextAreaField(
"Pitch", validators=[InputRequired("Field is required.")], render_kw={"placeholder": pitchPlaceholder}
)
format = SelectField("Talk Format", choices=talkFormats)
audience = SelectField("Audience Level", choices=audienceLevels)
description = TextAreaField(
"Description", validators=[InputRequired("Field is required.")], render_kw={"placeholder": descPlaceholder}
)
notes = TextAreaField("Notes", render_kw={"placeholder": notesPlaceholder})
|
boulder-python/boulderpython.org
|
application/forms.py
|
Python
|
mit
| 2,150
|
[
"exciting"
] |
478114d2a785819800cac6879278c1f4d26e0ec6ff4cd123fac7dd8270836ee6
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Factory functions producing ABINIT Works.
Works are packed together in a flow. A flow can be ran using abirun (abipy)
Entry points for client code (high-level interface)
"""
from __future__ import unicode_literals, division, print_function
import os
from .abiobjects import KSampling, Screening, SelfEnergy, ExcHamiltonian, HilbertTransform
#from .strategies import ScfStrategy, NscfStrategy, ScreeningStrategy, SelfEnergyStrategy, MdfBse_Strategy
from .works import BandStructureWork, G0W0Work, BseMdfWork
__author__ = "Matteo Giantomassi"
__copyright__ = "Copyright 2013, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Matteo Giantomassi"
__email__ = "gmatteo at gmail.com"
#def bandstructure_work(structure, pseudos, scf_kppa, nscf_nband,
# ndivsm, accuracy="normal", spin_mode="polarized",
# smearing="fermi_dirac:0.1 eV", charge=0.0, scf_algorithm=None,
# dos_kppa=None, workdir=None, manager=None, work_class=None, **extra_abivars):
# """
# Returns a :class:`Work` for bandstructure calculations.
#
# Args:
# structure: Pymatgen structure.
# pseudos: List of `Pseudo` objects.
# scf_kppa: Defines the sampling used for the SCF run.
# nscf_nband: Number of bands included in the NSCF run.
# ndivs: Number of divisions used to sample the smallest segment of the k-path.
# accuracy: Accuracy of the calculation.
# spin_mode: Spin polarization.
# smearing: Smearing technique.
# charge: Electronic charge added to the unit cell.
# scf_algorithm: Algorithm used for solving of the SCF cycle.
# dos_kppa: Defines the k-point sampling used for the computation of the DOS
# (None if DOS is not wanted).
# workdir: Working directory.
# manager: :class:`TaskManager` instance.
# extra_abivars: Dictionary with extra variables passed to ABINIT.
# """
# #multi = MultiDataset(structure, pseudos, ndtset=2 if dos_kppa is None else 2 + len(dos_kppa))
#
# # SCF calculation.
# scf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0)
#
# scf_strategy = ScfStrategy(structure, pseudos, scf_ksampling,
# accuracy=accuracy, spin_mode=spin_mode,
# smearing=smearing, charge=charge,
# scf_algorithm=scf_algorithm, **extra_abivars)
#
# #scf_electrons = Electrons(spin_mode=spin_mode, smearing=smearing, algorithm=scf_algorithm,
# # charge=charge, nband=scf_nband, fband=None)
# #multi[0].set_vars(scf_ksampling.to_abivars())
# #multi[0].set_vars(scf_electrons.to_abivars())
#
# # Band structure calculation.
# nscf_ksampling = KSampling.path_from_structure(ndivsm, structure)
#
# nscf_strategy = NscfStrategy(scf_strategy, nscf_ksampling, nscf_nband, **extra_abivars)
#
# # DOS calculation.
# dos_strategy = None
# if dos_kppa is not None:
# dos_ksampling = KSampling.automatic_density(structure, dos_kppa, chksymbreak=0)
# #dos_ksampling = KSampling.monkhorst(dos_ngkpt, shiftk=dos_shiftk, chksymbreak=0)
# dos_strategy = NscfStrategy(scf_strategy, dos_ksampling, nscf_nband, nscf_solver=None, **extra_abivars)
# #dos_electrons = aobj.Electrons(spin_mode=spin_mode, smearing=smearing, algorithm={"iscf": -2},
# # charge=charge, nband=nscf_nband)
#
# #dt = 2 + i
# #multi[dt].set_vars(dos_ksampling.to_abivars())
# #multi[dt].set_vars(dos_electrons.to_abivars())
# #multi[dt].set_vars(_stopping_criterion("nscf", accuracy))
#
# if work_class is None: work_class = BandStructureWork
# return work_class(scf_strategy, nscf_strategy, dos_inputs=dos_strategy, workdir=workdir, manager=manager)
#
#
#def g0w0_with_ppmodel_work(structure, pseudos, scf_kppa, nscf_nband, ecuteps, ecutsigx,
# accuracy="normal", spin_mode="polarized", smearing="fermi_dirac:0.1 eV",
# ppmodel="godby", charge=0.0, scf_algorithm=None, inclvkb=2, scr_nband=None,
# sigma_nband=None, gw_qprange=1, workdir=None, manager=None, work_class=None, **extra_abivars):
# """
# Returns a :class:`Work` object that performs G0W0 calculations for the given the material.
#
# Args:
# structure: Pymatgen structure.
# pseudos: List of `Pseudo` objects.
# scf_kppa: Defines the sampling used for the SCF run.
# nscf_nband: Number of bands included in the NSCF run.
# ecuteps: Cutoff energy [Ha] for the screening matrix.
# ecutsigx: Cutoff energy [Ha] for the exchange part of the self-energy.
# accuracy: Accuracy of the calculation.
# spin_mode: Spin polarization.
# smearing: Smearing technique.
# ppmodel: Plasmonpole technique.
# charge: Electronic charge added to the unit cell.
# scf_algorithm: Algorithm used for solving of the SCF cycle.
# inclvkb: Treatment of the dipole matrix elements (see abinit variable).
# scr_nband: Number of bands used to compute the screening (default is nscf_nband)
# sigma_nband: Number of bands used to compute the self-energy (default is nscf_nband)
# gw_qprange: Option for the automatic selection of k-points and bands for GW corrections.
# See Abinit docs for more detail. The default value makes the code compute the
# QP energies for all the point in the IBZ and one band above and one band below the Fermi level.
# workdir: Working directory.
# manager: :class:`TaskManager` instance.
# extra_abivars: Dictionary with extra variables passed to ABINIT.
# """
# # TODO: Cannot use istwfk != 1.
# if "istwfk" not in extra_abivars:
# extra_abivars["istwfk"] = "*1"
#
# scf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0)
#
# scf_strategy = ScfStrategy(structure, pseudos, scf_ksampling,
# accuracy=accuracy, spin_mode=spin_mode,
# smearing=smearing, charge=charge,
# scf_algorithm=scf_algorithm, **extra_abivars)
#
# nscf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0)
#
# nscf_strategy = NscfStrategy(scf_strategy, nscf_ksampling, nscf_nband, **extra_abivars)
#
# if scr_nband is None: scr_nband = nscf_nband
# if sigma_nband is None: sigma_nband = nscf_nband
#
# screening = Screening(ecuteps, scr_nband, w_type="RPA", sc_mode="one_shot",
# hilbert=None, ecutwfn=None, inclvkb=inclvkb)
#
# self_energy = SelfEnergy("gw", "one_shot", sigma_nband, ecutsigx, screening,
# gw_qprange=gw_qprange, ppmodel=ppmodel)
#
# scr_strategy = ScreeningStrategy(scf_strategy, nscf_strategy, screening, **extra_abivars)
#
# sigma_strategy = SelfEnergyStrategy(scf_strategy, nscf_strategy, scr_strategy, self_energy,
# **extra_abivars)
#
# if work_class is None: work_class = G0W0Work
# return work_class(scf_strategy, nscf_strategy, scr_strategy, sigma_strategy, workdir=workdir, manager=manager)
def g0w0_extended_work(structure, pseudos, kppa, nscf_nband, ecuteps, ecutsigx, scf_nband, accuracy="normal",
spin_mode="polarized", smearing="fermi_dirac:0.1 eV", response_models=["godby"], charge=0.0,
inclvkb=2, scr_nband=None, sigma_nband=None, workdir=None, manager=None, gamma=True, nksmall=20,
work_class=None, **extra_abivars):
"""
Returns a :class:`Work` object that performs G0W0 calculations for the given the material.
Args:
structure: Pymatgen structure.
pseudos: List of `Pseudo` objects.
scf_ Defines the sampling used for the SCF run.
nscf_nband: Number of bands included in the NSCF run.
ecuteps: Cutoff energy [Ha] for the screening matrix.
ecutsigx: Cutoff energy [Ha] for the exchange part of the self-energy.
accuracy: Accuracy of the calculation.
spin_mode: Spin polarization.
smearing: Smearing technique.
ppmodel: Plasmonpole technique.
charge: Electronic charge added to the unit cell.
scf_algorithm: Algorithm used for solving of the SCF cycle.
inclvkb: Treatment of the dipole matrix elements (see abinit variable).
scr_nband: Number of bands used to compute the screening (default is nscf_nband)
sigma_nband: Number of bands used to compute the self-energy (default is nscf_nband)
workdir: Working directory.
manager: :class:`TaskManager` instance.
nksamll: if not None, a DFT bandstucture calculation will be added after the sc run
extra_abivars: Dictionary with extra variables passed to ABINIT.
"""
# TODO: Cannot use istwfk != 1.
# all these too many options are for development only the current idea for the final version is
#if gamma:
# scf_ksampling = KSampling.automatic_density(structure=structure, kppa=10000, chksymbreak=0, shifts=(0, 0, 0))
# nscf_ksampling = KSampling.gamma_centered(kpts=(2, 2, 2))
# if kppa <= 13:
# nscf_ksampling = KSampling.gamma_centered(kpts=(scf_kppa, scf_kppa, scf_kppa))
# else:
# nscf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0, shifts=(0, 0, 0))
#else:
# scf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0)
# nscf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0)
if gamma:
if kppa == 1:
scf_ksampling = KSampling.gamma_centered(kpts=(1, 1, 1))
nscf_ksampling = KSampling.gamma_centered(kpts=(1, 1, 1))
elif kppa == 2:
scf_ksampling = KSampling.gamma_centered(kpts=(2, 2, 2))
nscf_ksampling = KSampling.gamma_centered(kpts=(2, 2, 2))
elif kppa < 0:
scf_ksampling = KSampling.gamma_centered(kpts=(-kppa, -kppa, -kppa))
nscf_ksampling = KSampling.gamma_centered(kpts=(2, 2, 2))
elif kppa <= 13:
scf_ksampling = KSampling.gamma_centered(kpts=(kppa, kppa, kppa))
nscf_ksampling = KSampling.gamma_centered(kpts=(kppa, kppa, kppa))
else:
scf_ksampling = KSampling.automatic_density(structure, kppa, chksymbreak=0, shifts=(0, 0, 0))
nscf_ksampling = KSampling.automatic_density(structure, kppa, chksymbreak=0, shifts=(0, 0, 0))
else:
#this is the original behaviour before the devellopment of the gwwrapper
scf_ksampling = KSampling.automatic_density(structure, kppa, chksymbreak=0)
nscf_ksampling = KSampling.automatic_density(structure, kppa, chksymbreak=0)
if "istwfk" not in extra_abivars:
extra_abivars["istwfk"] = "*1"
scf_strategy = []
to_add = {}
#scf_nband = min(nscf_nband)
#print(scf_nband)
extra_abivars.update(to_add)
for k in extra_abivars.keys():
if k[-2:] == '_s':
var = k[:len(k)-2]
values = extra_abivars.pop(k)
to_add.update({k: values[-1]})
for value in values:
extra_abivars[var] = value
extra_abivars['pawecutdg'] = extra_abivars['ecut']*2
scf_strategy.append(ScfStrategy(structure, pseudos, scf_ksampling, accuracy=accuracy,
spin_mode=spin_mode, smearing=smearing, charge=charge,
scf_algorithm=None, nband=scf_nband, **extra_abivars))
#temporary for testing a new approach ...
spread_scr = False if os.path.isfile('no_spread_scr') else True
if len(scf_strategy) == 0:
scf_strategy.append(ScfStrategy(structure, pseudos, scf_ksampling, accuracy=accuracy, spin_mode=spin_mode,
smearing=smearing, charge=charge, scf_algorithm=None, nband=scf_nband,
**extra_abivars))
nscf_strategy = NscfStrategy(scf_strategy[-1], nscf_ksampling, int(max(nscf_nband)*1.1)+1,
nbdbuf=int(0.1*max(nscf_nband)), nstep=200, **extra_abivars)
if scr_nband is None:
scr_nband = nscf_nband
if sigma_nband is None:
sigma_nband = nscf_nband
if ecutsigx < max(ecuteps):
ecutsigx = max(ecuteps)
sigma_strategy = []
if 'cd' in response_models:
hilbert = HilbertTransform(nomegasf=100, domegasf=None, spmeth=1, nfreqre=None, freqremax=None, nfreqim=None,
freqremin=None)
for response_model in response_models:
for ecuteps_v in ecuteps:
for nscf_nband_v in nscf_nband:
scr_nband = nscf_nband_v
sigma_nband = nscf_nband_v
if response_model == 'cd':
screening = Screening(ecuteps_v, scr_nband, w_type="RPA", sc_mode="one_shot", hilbert=hilbert,
ecutwfn=None, inclvkb=inclvkb)
self_energy = SelfEnergy("gw", "one_shot", sigma_nband, ecutsigx, screening, hilbert=hilbert)
else:
ppmodel = response_model
screening = Screening(ecuteps_v, scr_nband, w_type="RPA", sc_mode="one_shot", ecutwfn=None,
inclvkb=inclvkb)
self_energy = SelfEnergy("gw", "one_shot", sigma_nband, ecutsigx, screening, ppmodel=ppmodel,
gw_qprange=1)
scr_strategy = ScreeningStrategy(scf_strategy[-1], nscf_strategy, screening, **extra_abivars)
sigma_strategy.append(SelfEnergyStrategy(scf_strategy[-1], nscf_strategy, scr_strategy, self_energy,
**extra_abivars))
if work_class is None: work_class = G0W0Work
return work_class(scf_strategy, nscf_strategy, scr_strategy, sigma_strategy, workdir=workdir, manager=manager,
spread_scr=spread_scr, nksmall=nksmall)
#def bse_with_mdf_work(structure, pseudos, scf_kppa, nscf_nband, nscf_ngkpt, nscf_shiftk,
# ecuteps, bs_loband, bs_nband, soenergy, mdf_epsinf,
# exc_type="TDA", bs_algo="haydock", accuracy="normal", spin_mode="polarized",
# smearing="fermi_dirac:0.1 eV", charge=0.0, scf_algorithm=None, workdir=None, manager=None,
# work_class=None, **extra_abivars):
# """
# Returns a :class:`Work` object that performs a GS + NSCF + Bethe-Salpeter calculation.
# The self-energy corrections are approximated with the scissors operator.
# The screening in modeled by the model dielectric function.
#
# Args:
# structure: :class:`Structure` object.
# pseudos: List of `Pseudo` objects.
# scf_kppa: Defines the sampling used for the SCF run.
# nscf_nband: Number of bands included in the NSCF run.
# nscf_ngkpt: Divisions of the k-mesh used for the NSCF and the BSE run.
# nscf_shiftk: Shifts used for the NSCF and the BSE run.
# ecuteps: Cutoff energy [Ha] for the screening matrix.
# bs_loband: Index of the first occupied band included the e-h basis set
# (ABINIT convention i.e. first band starts at 1).
# Can be scalar or array of shape (nsppol,)
# bs_nband: Highest band idex used for the construction of the e-h basis set.
# soenergy: Scissor energy in Hartree.
# mdf_epsinf: Value of the macroscopic dielectric function used in expression for the model dielectric function.
# exc_type: Approximation used for the BSE Hamiltonian (Tamm-Dancoff or coupling).
# bs_algo: Algorith for the computatio of the macroscopic dielectric function.
# accuracy: Accuracy of the calculation.
# spin_mode: Spin polarization.
# smearing: Smearing technique.
# charge: Electronic charge added to the unit cell.
# scf_algorithm: Algorithm used for solving the SCF cycle.
# workdir: Working directory.
# manager: :class:`TaskManger` instance.
# extra_abivars: Dictionary with extra variables passed to ABINIT.
# """
# # TODO: Cannot use istwfk != 1.
# if "istwfk" not in extra_abivars:
# extra_abivars["istwfk"] = "*1"
#
# # Ground-state strategy.
# scf_ksampling = KSampling.automatic_density(structure, scf_kppa, chksymbreak=0)
#
# scf_strategy = ScfStrategy(structure, pseudos, scf_ksampling,
# accuracy=accuracy, spin_mode=spin_mode,
# smearing=smearing, charge=charge, scf_algorithm=None, **extra_abivars)
#
# # NSCF calculation with the randomly-shifted k-mesh.
# nscf_ksampling = KSampling.monkhorst(nscf_ngkpt, shiftk=nscf_shiftk, chksymbreak=0)
#
# nscf_strategy = NscfStrategy(scf_strategy, nscf_ksampling, nscf_nband, **extra_abivars)
#
# # Strategy for the BSE calculation.
# exc_ham = ExcHamiltonian(bs_loband, bs_nband, soenergy, coulomb_mode="model_df", ecuteps=ecuteps,
# spin_mode=spin_mode, mdf_epsinf=mdf_epsinf, exc_type=exc_type, algo=bs_algo,
# bs_freq_mesh=None, with_lf=True, zcut=None)
#
# bse_strategy = MdfBse_Strategy(scf_strategy, nscf_strategy, exc_ham, **extra_abivars)
#
# if work_class is None: work_class = BseMdfWork
# return work_class(scf_strategy, nscf_strategy, bse_strategy, workdir=workdir, manager=manager)
|
migueldiascosta/pymatgen
|
pymatgen/io/abinit/calculations.py
|
Python
|
mit
| 17,847
|
[
"ABINIT",
"pymatgen"
] |
2f17b7d050ec51ad56b3bcdd8a4cc4a174cb73da708ede5e3db21042d3d81423
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: vmware_guest
short_description: Manages virtual machines in vCenter
description: >
This module can be used to create new virtual machines from templates or other virtual machines,
manage power state of virtual machine such as power on, power off, suspend, shutdown, reboot, restart etc.,
modify various virtual machine components like network, disk, customization etc.,
rename a virtual machine and remove a virtual machine with associated components.
version_added: '2.2'
author:
- Loic Blot (@nerzhul) <loic.blot@unix-experience.fr>
- Philippe Dellaert (@pdellaert) <philippe@dellaert.org>
- Abhijeet Kasurde (@Akasurde) <akasurde@redhat.com>
requirements:
- python >= 2.6
- PyVmomi
notes:
- Please make sure that the user used for vmware_guest has the correct level of privileges.
- For example, following is the list of minimum privileges required by users to create virtual machines.
- " DataStore > Allocate Space"
- " Virtual Machine > Configuration > Add New Disk"
- " Virtual Machine > Configuration > Add or Remove Device"
- " Virtual Machine > Inventory > Create New"
- " Network > Assign Network"
- " Resource > Assign Virtual Machine to Resource Pool"
- "Module may require additional privileges as well, which may be required for gathering facts - e.g. ESXi configurations."
- Tested on vSphere 5.5, 6.0, 6.5 and 6.7
- Use SCSI disks instead of IDE when you want to resize online disks by specifing a SCSI controller
- "For additional information please visit Ansible VMware community wiki - U(https://github.com/ansible/community/wiki/VMware)."
options:
state:
description:
- Specify the state the virtual machine should be in.
- 'If C(state) is set to C(present) and virtual machine exists, ensure the virtual machine
configurations conforms to task arguments.'
- 'If C(state) is set to C(absent) and virtual machine exists, then the specified virtual machine
is removed with its associated components.'
- 'If C(state) is set to one of the following C(poweredon), C(poweredoff), C(present), C(restarted), C(suspended)
and virtual machine does not exists, then virtual machine is deployed with given parameters.'
- 'If C(state) is set to C(poweredon) and virtual machine exists with powerstate other than powered on,
then the specified virtual machine is powered on.'
- 'If C(state) is set to C(poweredoff) and virtual machine exists with powerstate other than powered off,
then the specified virtual machine is powered off.'
- 'If C(state) is set to C(restarted) and virtual machine exists, then the virtual machine is restarted.'
- 'If C(state) is set to C(suspended) and virtual machine exists, then the virtual machine is set to suspended mode.'
- 'If C(state) is set to C(shutdownguest) and virtual machine exists, then the virtual machine is shutdown.'
- 'If C(state) is set to C(rebootguest) and virtual machine exists, then the virtual machine is rebooted.'
default: present
choices: [ present, absent, poweredon, poweredoff, restarted, suspended, shutdownguest, rebootguest ]
name:
description:
- Name of the virtual machine to work with.
- Virtual machine names in vCenter are not necessarily unique, which may be problematic, see C(name_match).
- 'If multiple virtual machines with same name exists, then C(folder) is required parameter to
identify uniqueness of the virtual machine.'
- This parameter is required, if C(state) is set to C(poweredon), C(poweredoff), C(present), C(restarted), C(suspended)
and virtual machine does not exists.
- This parameter is case sensitive.
required: yes
name_match:
description:
- If multiple virtual machines matching the name, use the first or last found.
default: 'first'
choices: [ first, last ]
uuid:
description:
- UUID of the virtual machine to manage if known, this is VMware's unique identifier.
- This is required if C(name) is not supplied.
- If virtual machine does not exists, then this parameter is ignored.
- Please note that a supplied UUID will be ignored on virtual machine creation, as VMware creates the UUID internally.
template:
description:
- Template or existing virtual machine used to create new virtual machine.
- If this value is not set, virtual machine is created without using a template.
- If the virtual machine already exists, this parameter will be ignored.
- This parameter is case sensitive.
- You can also specify template or VM UUID for identifying source. version_added 2.8. Use C(hw_product_uuid) from M(vmware_guest_facts) as UUID value.
- From version 2.8 onwards, absolute path to virtual machine or template can be used.
aliases: [ 'template_src' ]
is_template:
description:
- Flag the instance as a template.
- This will mark the given virtual machine as template.
default: 'no'
type: bool
version_added: '2.3'
folder:
description:
- Destination folder, absolute path to find an existing guest or create the new guest.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter.
- This parameter is case sensitive.
- This parameter is required, while deploying new virtual machine. version_added 2.5.
- 'If multiple machines are found with same name, this parameter is used to identify
uniqueness of the virtual machine. version_added 2.5'
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
hardware:
description:
- Manage virtual machine's hardware attributes.
- All parameters case sensitive.
- 'Valid attributes are:'
- ' - C(hotadd_cpu) (boolean): Allow virtual CPUs to be added while the virtual machine is running.'
- ' - C(hotremove_cpu) (boolean): Allow virtual CPUs to be removed while the virtual machine is running.
version_added: 2.5'
- ' - C(hotadd_memory) (boolean): Allow memory to be added while the virtual machine is running.'
- ' - C(memory_mb) (integer): Amount of memory in MB.'
- ' - C(nested_virt) (bool): Enable nested virtualization. version_added: 2.5'
- ' - C(num_cpus) (integer): Number of CPUs.'
- ' - C(num_cpu_cores_per_socket) (integer): Number of Cores Per Socket. Value should be multiple of C(num_cpus).'
- ' - C(scsi) (string): Valid values are C(buslogic), C(lsilogic), C(lsilogicsas) and C(paravirtual) (default).'
- ' - C(memory_reservation) (integer): Amount of memory in MB to set resource limits for memory. version_added: 2.5'
- " - C(memory_reservation_lock) (boolean): If set true, memory resource reservation for the virtual machine
will always be equal to the virtual machine's memory size. version_added: 2.5"
- ' - C(max_connections) (integer): Maximum number of active remote display connections for the virtual machines.
version_added: 2.5.'
- ' - C(mem_limit) (integer): The memory utilization of a virtual machine will not exceed this limit. Unit is MB.
version_added: 2.5'
- ' - C(mem_reservation) (integer): The amount of memory resource that is guaranteed available to the virtual
machine. Unit is MB. version_added: 2.5'
- ' - C(cpu_limit) (integer): The CPU utilization of a virtual machine will not exceed this limit. Unit is MHz.
version_added: 2.5'
- ' - C(cpu_reservation) (integer): The amount of CPU resource that is guaranteed available to the virtual machine.
Unit is MHz. version_added: 2.5'
- ' - C(version) (integer): The Virtual machine hardware versions. Default is 10 (ESXi 5.5 and onwards).
Please check VMware documentation for correct virtual machine hardware version.
Incorrect hardware version may lead to failure in deployment. If hardware version is already equal to the given
version then no action is taken. version_added: 2.6'
- ' - C(boot_firmware) (string): Choose which firmware should be used to boot the virtual machine.
Allowed values are "bios" and "efi". version_added: 2.7'
- ' - C(virt_based_security) (bool): Enable Virtualization Based Security feature for Windows 10.
(Support from Virtual machine hardware version 14, Guest OS Windows 10 64 bit, Windows Server 2016)'
guest_id:
description:
- Set the guest ID.
- This parameter is case sensitive.
- 'Examples:'
- " virtual machine with RHEL7 64 bit, will be 'rhel7_64Guest'"
- " virtual machine with CensOS 64 bit, will be 'centos64Guest'"
- " virtual machine with Ubuntu 64 bit, will be 'ubuntu64Guest'"
- This field is required when creating a virtual machine.
- >
Valid values are referenced here:
U(http://pubs.vmware.com/vsphere-6-5/topic/com.vmware.wssdk.apiref.doc/vim.vm.GuestOsDescriptor.GuestOsIdentifier.html)
version_added: '2.3'
disk:
description:
- A list of disks to add.
- This parameter is case sensitive.
- Resizing disks is not supported.
- Removing existing disks of the virtual machine is not supported.
- 'Valid attributes are:'
- ' - C(size_[tb,gb,mb,kb]) (integer): Disk storage size in specified unit.'
- ' - C(type) (string): Valid values are:'
- ' - C(thin) thin disk'
- ' - C(eagerzeroedthick) eagerzeroedthick disk, added in version 2.5'
- ' Default: C(None) thick disk, no eagerzero.'
- ' - C(datastore) (string): Datastore to use for the disk. If C(autoselect_datastore) is enabled, filter datastore selection.'
- ' - C(autoselect_datastore) (bool): select the less used datastore. Specify only if C(datastore) is not specified.'
- ' - C(disk_mode) (string): Type of disk mode. Added in version 2.6'
- ' - Available options are :'
- ' - C(persistent): Changes are immediately and permanently written to the virtual disk. This is default.'
- ' - C(independent_persistent): Same as persistent, but not affected by snapshots.'
- ' - C(independent_nonpersistent): Changes to virtual disk are made to a redo log and discarded at power off, but not affected by snapshots.'
cdrom:
description:
- A CD-ROM configuration for the virtual machine.
- 'Valid attributes are:'
- ' - C(type) (string): The type of CD-ROM, valid options are C(none), C(client) or C(iso). With C(none) the CD-ROM will be disconnected but present.'
- ' - C(iso_path) (string): The datastore path to the ISO file to use, in the form of C([datastore1] path/to/file.iso). Required if type is set C(iso).'
version_added: '2.5'
resource_pool:
description:
- Use the given resource pool for virtual machine operation.
- This parameter is case sensitive.
- Resource pool should be child of the selected host parent.
version_added: '2.3'
wait_for_ip_address:
description:
- Wait until vCenter detects an IP address for the virtual machine.
- This requires vmware-tools (vmtoolsd) to properly work after creation.
- "vmware-tools needs to be installed on the given virtual machine in order to work with this parameter."
default: 'no'
type: bool
wait_for_customization:
description:
- Wait until vCenter detects all guest customizations as successfully completed.
- When enabled, the VM will automatically be powered on.
default: 'no'
type: bool
version_added: '2.8'
state_change_timeout:
description:
- If the C(state) is set to C(shutdownguest), by default the module will return immediately after sending the shutdown signal.
- If this argument is set to a positive integer, the module will instead wait for the virtual machine to reach the poweredoff state.
- The value sets a timeout in seconds for the module to wait for the state change.
default: 0
version_added: '2.6'
snapshot_src:
description:
- Name of the existing snapshot to use to create a clone of a virtual machine.
- This parameter is case sensitive.
- While creating linked clone using C(linked_clone) parameter, this parameter is required.
version_added: '2.4'
linked_clone:
description:
- Whether to create a linked clone from the snapshot specified.
- If specified, then C(snapshot_src) is required parameter.
default: 'no'
type: bool
version_added: '2.4'
force:
description:
- Ignore warnings and complete the actions.
- This parameter is useful while removing virtual machine which is powered on state.
- 'This module reflects the VMware vCenter API and UI workflow, as such, in some cases the `force` flag will
be mandatory to perform the action to ensure you are certain the action has to be taken, no matter what the consequence.
This is specifically the case for removing a powered on the virtual machine when C(state) is set to C(absent).'
default: 'no'
type: bool
datacenter:
description:
- Destination datacenter for the deploy operation.
- This parameter is case sensitive.
default: ha-datacenter
cluster:
description:
- The cluster name where the virtual machine will run.
- This is a required parameter, if C(esxi_hostname) is not set.
- C(esxi_hostname) and C(cluster) are mutually exclusive parameters.
- This parameter is case sensitive.
version_added: '2.3'
esxi_hostname:
description:
- The ESXi hostname where the virtual machine will run.
- This is a required parameter, if C(cluster) is not set.
- C(esxi_hostname) and C(cluster) are mutually exclusive parameters.
- This parameter is case sensitive.
annotation:
description:
- A note or annotation to include in the virtual machine.
version_added: '2.3'
customvalues:
description:
- Define a list of custom values to set on virtual machine.
- A custom value object takes two fields C(key) and C(value).
- Incorrect key and values will be ignored.
version_added: '2.3'
networks:
description:
- A list of networks (in the order of the NICs).
- Removing NICs is not allowed, while reconfiguring the virtual machine.
- All parameters and VMware object names are case sensetive.
- 'One of the below parameters is required per entry:'
- ' - C(name) (string): Name of the portgroup or distributed virtual portgroup for this interface.
When specifying distributed virtual portgroup make sure given C(esxi_hostname) or C(cluster) is associated with it.'
- ' - C(vlan) (integer): VLAN number for this interface.'
- 'Optional parameters per entry (used for virtual hardware):'
- ' - C(device_type) (string): Virtual network device (one of C(e1000), C(e1000e), C(pcnet32), C(vmxnet2), C(vmxnet3) (default), C(sriov)).'
- ' - C(mac) (string): Customize MAC address.'
- ' - C(dvswitch_name) (string): Name of the distributed vSwitch.
This value is required if multiple distributed portgroups exists with the same name. version_added 2.7'
- ' - C(start_connected) (bool): Indicates that virtual network adapter starts with associated virtual machine powers on. version_added: 2.5'
- 'Optional parameters per entry (used for OS customization):'
- ' - C(type) (string): Type of IP assignment (either C(dhcp) or C(static)). C(dhcp) is default.'
- ' - C(ip) (string): Static IP address (implies C(type: static)).'
- ' - C(netmask) (string): Static netmask required for C(ip).'
- ' - C(gateway) (string): Static gateway.'
- ' - C(dns_servers) (string): DNS servers for this network interface (Windows).'
- ' - C(domain) (string): Domain name for this network interface (Windows).'
- ' - C(wake_on_lan) (bool): Indicates if wake-on-LAN is enabled on this virtual network adapter. version_added: 2.5'
- ' - C(allow_guest_control) (bool): Enables guest control over whether the connectable device is connected. version_added: 2.5'
version_added: '2.3'
customization:
description:
- Parameters for OS customization when cloning from the template or the virtual machine.
- Not all operating systems are supported for customization with respective vCenter version,
please check VMware documentation for respective OS customization.
- For supported customization operating system matrix, (see U(http://partnerweb.vmware.com/programs/guestOS/guest-os-customization-matrix.pdf))
- All parameters and VMware object names are case sensitive.
- Linux based OSes requires Perl package to be installed for OS customizations.
- 'Common parameters (Linux/Windows):'
- ' - C(dns_servers) (list): List of DNS servers to configure.'
- ' - C(dns_suffix) (list): List of domain suffixes, also known as DNS search path (default: C(domain) parameter).'
- ' - C(domain) (string): DNS domain name to use.'
- ' - C(hostname) (string): Computer hostname (default: shorted C(name) parameter). Allowed characters are alphanumeric (uppercase and lowercase)
and minus, rest of the characters are dropped as per RFC 952.'
- 'Parameters related to Windows customization:'
- ' - C(autologon) (bool): Auto logon after virtual machine customization (default: False).'
- ' - C(autologoncount) (int): Number of autologon after reboot (default: 1).'
- ' - C(domainadmin) (string): User used to join in AD domain (mandatory with C(joindomain)).'
- ' - C(domainadminpassword) (string): Password used to join in AD domain (mandatory with C(joindomain)).'
- ' - C(fullname) (string): Server owner name (default: Administrator).'
- ' - C(joindomain) (string): AD domain to join (Not compatible with C(joinworkgroup)).'
- ' - C(joinworkgroup) (string): Workgroup to join (Not compatible with C(joindomain), default: WORKGROUP).'
- ' - C(orgname) (string): Organisation name (default: ACME).'
- ' - C(password) (string): Local administrator password.'
- ' - C(productid) (string): Product ID.'
- ' - C(runonce) (list): List of commands to run at first user logon.'
- ' - C(timezone) (int): Timezone (See U(https://msdn.microsoft.com/en-us/library/ms912391.aspx)).'
version_added: '2.3'
vapp_properties:
description:
- A list of vApp properties
- 'For full list of attributes and types refer to: U(https://github.com/vmware/pyvmomi/blob/master/docs/vim/vApp/PropertyInfo.rst)'
- 'Basic attributes are:'
- ' - C(id) (string): Property id - required.'
- ' - C(value) (string): Property value.'
- ' - C(type) (string): Value type, string type by default.'
- ' - C(operation): C(remove): This attribute is required only when removing properties.'
version_added: '2.6'
customization_spec:
description:
- Unique name identifying the requested customization specification.
- This parameter is case sensitive.
- If set, then overrides C(customization) parameter values.
version_added: '2.6'
datastore:
description:
- Specify datastore or datastore cluster to provision virtual machine.
- 'This will take precendence over "disk.datastore" parameter.'
- This parameter is useful to override datastore or datastore cluster setting.
- For example, when user has different datastore or datastore cluster for templates and virtual machines.
- Please see example for more usage.
version_added: '2.7'
convert:
description:
- Specify convert disk type while cloning template or virtual machine.
choices: [ thin, thick, eagerzeroedthick ]
version_added: '2.8'
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Create a virtual machine on given ESXi hostname
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
folder: /DC1/vm/
name: test_vm_0001
state: poweredon
guest_id: centos64Guest
# This is hostname of particular ESXi server on which user wants VM to be deployed
esxi_hostname: "{{ esxi_hostname }}"
disk:
- size_gb: 10
type: thin
datastore: datastore1
hardware:
memory_mb: 512
num_cpus: 4
scsi: paravirtual
networks:
- name: VM Network
mac: aa:bb:dd:aa:00:14
ip: 10.10.10.100
netmask: 255.255.255.0
device_type: vmxnet3
wait_for_ip_address: yes
delegate_to: localhost
register: deploy_vm
- name: Create a virtual machine from a template
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
folder: /testvms
name: testvm_2
state: poweredon
template: template_el7
disk:
- size_gb: 10
type: thin
datastore: g73_datastore
hardware:
memory_mb: 512
num_cpus: 6
num_cpu_cores_per_socket: 3
scsi: paravirtual
memory_reservation: 512
memory_reservation_lock: True
mem_limit: 8096
mem_reservation: 4096
cpu_limit: 8096
cpu_reservation: 4096
max_connections: 5
hotadd_cpu: True
hotremove_cpu: True
hotadd_memory: False
version: 12 # Hardware version of virtual machine
boot_firmware: "efi"
cdrom:
type: iso
iso_path: "[datastore1] livecd.iso"
networks:
- name: VM Network
mac: aa:bb:dd:aa:00:14
wait_for_ip_address: yes
delegate_to: localhost
register: deploy
- name: Clone a virtual machine from Windows template and customize
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
datacenter: datacenter1
cluster: cluster
name: testvm-2
template: template_windows
networks:
- name: VM Network
ip: 192.168.1.100
netmask: 255.255.255.0
gateway: 192.168.1.1
mac: aa:bb:dd:aa:00:14
domain: my_domain
dns_servers:
- 192.168.1.1
- 192.168.1.2
- vlan: 1234
type: dhcp
customization:
autologon: yes
dns_servers:
- 192.168.1.1
- 192.168.1.2
domain: my_domain
password: new_vm_password
runonce:
- powershell.exe -ExecutionPolicy Unrestricted -File C:\Windows\Temp\ConfigureRemotingForAnsible.ps1 -ForceNewSSLCert -EnableCredSSP
delegate_to: localhost
- name: Clone a virtual machine from Linux template and customize
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
datacenter: "{{ datacenter }}"
state: present
folder: /DC1/vm
template: "{{ template }}"
name: "{{ vm_name }}"
cluster: DC1_C1
networks:
- name: VM Network
ip: 192.168.10.11
netmask: 255.255.255.0
wait_for_ip_address: True
customization:
domain: "{{ guest_domain }}"
dns_servers:
- 8.9.9.9
- 7.8.8.9
dns_suffix:
- example.com
- example2.com
delegate_to: localhost
- name: Rename a virtual machine (requires the virtual machine's uuid)
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
uuid: "{{ vm_uuid }}"
name: new_name
state: present
delegate_to: localhost
- name: Remove a virtual machine by uuid
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
uuid: "{{ vm_uuid }}"
state: absent
delegate_to: localhost
- name: Manipulate vApp properties
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
name: vm_name
state: present
vapp_properties:
- id: remoteIP
category: Backup
label: Backup server IP
type: str
value: 10.10.10.1
- id: old_property
operation: remove
delegate_to: localhost
- name: Set powerstate of a virtual machine to poweroff by using UUID
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
validate_certs: no
uuid: "{{ vm_uuid }}"
state: poweredoff
delegate_to: localhost
- name: Deploy a virtual machine in a datastore different from the datastore of the template
vmware_guest:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
name: "{{ vm_name }}"
state: present
template: "{{ template_name }}"
# Here datastore can be different which holds template
datastore: "{{ virtual_machine_datastore }}"
hardware:
memory_mb: 512
num_cpus: 2
scsi: paravirtual
delegate_to: localhost
'''
RETURN = r'''
instance:
description: metadata about the new virtual machine
returned: always
type: dict
sample: None
'''
import re
import time
HAS_PYVMOMI = False
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
pass
from random import randint
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.vmware import (find_obj, gather_vm_facts, get_all_objs,
compile_folder_path_for_object, serialize_spec,
vmware_argument_spec, set_vm_power_state, PyVmomi,
find_dvs_by_name, find_dvspg_by_name, wait_for_vm_ip)
class PyVmomiDeviceHelper(object):
""" This class is a helper to create easily VMWare Objects for PyVmomiHelper """
def __init__(self, module):
self.module = module
self.next_disk_unit_number = 0
self.scsi_device_type = {
'lsilogic': vim.vm.device.VirtualLsiLogicController,
'paravirtual': vim.vm.device.ParaVirtualSCSIController,
'buslogic': vim.vm.device.VirtualBusLogicController,
'lsilogicsas': vim.vm.device.VirtualLsiLogicSASController,
}
def create_scsi_controller(self, scsi_type):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
scsi_device = self.scsi_device_type.get(scsi_type, vim.vm.device.ParaVirtualSCSIController)
scsi_ctl.device = scsi_device()
scsi_ctl.device.busNumber = 0
# While creating a new SCSI controller, temporary key value
# should be unique negative integers
scsi_ctl.device.key = -randint(1000, 9999)
scsi_ctl.device.hotAddRemove = True
scsi_ctl.device.sharedBus = 'noSharing'
scsi_ctl.device.scsiCtlrUnitNumber = 7
return scsi_ctl
def is_scsi_controller(self, device):
return isinstance(device, tuple(self.scsi_device_type.values()))
@staticmethod
def create_ide_controller():
ide_ctl = vim.vm.device.VirtualDeviceSpec()
ide_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
ide_ctl.device = vim.vm.device.VirtualIDEController()
ide_ctl.device.deviceInfo = vim.Description()
# While creating a new IDE controller, temporary key value
# should be unique negative integers
ide_ctl.device.key = -randint(200, 299)
ide_ctl.device.busNumber = 0
return ide_ctl
@staticmethod
def create_cdrom(ide_ctl, cdrom_type, iso_path=None):
cdrom_spec = vim.vm.device.VirtualDeviceSpec()
cdrom_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
cdrom_spec.device = vim.vm.device.VirtualCdrom()
cdrom_spec.device.controllerKey = ide_ctl.device.key
cdrom_spec.device.key = -1
cdrom_spec.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
cdrom_spec.device.connectable.allowGuestControl = True
cdrom_spec.device.connectable.startConnected = (cdrom_type != "none")
if cdrom_type in ["none", "client"]:
cdrom_spec.device.backing = vim.vm.device.VirtualCdrom.RemotePassthroughBackingInfo()
elif cdrom_type == "iso":
cdrom_spec.device.backing = vim.vm.device.VirtualCdrom.IsoBackingInfo(fileName=iso_path)
return cdrom_spec
@staticmethod
def is_equal_cdrom(vm_obj, cdrom_device, cdrom_type, iso_path):
if cdrom_type == "none":
return (isinstance(cdrom_device.backing, vim.vm.device.VirtualCdrom.RemotePassthroughBackingInfo) and
cdrom_device.connectable.allowGuestControl and
not cdrom_device.connectable.startConnected and
(vm_obj.runtime.powerState != vim.VirtualMachinePowerState.poweredOn or not cdrom_device.connectable.connected))
elif cdrom_type == "client":
return (isinstance(cdrom_device.backing, vim.vm.device.VirtualCdrom.RemotePassthroughBackingInfo) and
cdrom_device.connectable.allowGuestControl and
cdrom_device.connectable.startConnected and
(vm_obj.runtime.powerState != vim.VirtualMachinePowerState.poweredOn or cdrom_device.connectable.connected))
elif cdrom_type == "iso":
return (isinstance(cdrom_device.backing, vim.vm.device.VirtualCdrom.IsoBackingInfo) and
cdrom_device.backing.fileName == iso_path and
cdrom_device.connectable.allowGuestControl and
cdrom_device.connectable.startConnected and
(vm_obj.runtime.powerState != vim.VirtualMachinePowerState.poweredOn or cdrom_device.connectable.connected))
def create_scsi_disk(self, scsi_ctl, disk_index=None):
diskspec = vim.vm.device.VirtualDeviceSpec()
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
diskspec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.create
diskspec.device = vim.vm.device.VirtualDisk()
diskspec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
diskspec.device.controllerKey = scsi_ctl.device.key
if self.next_disk_unit_number == 7:
raise AssertionError()
if disk_index == 7:
raise AssertionError()
"""
Configure disk unit number.
"""
if disk_index is not None:
diskspec.device.unitNumber = disk_index
self.next_disk_unit_number = disk_index + 1
else:
diskspec.device.unitNumber = self.next_disk_unit_number
self.next_disk_unit_number += 1
# unit number 7 is reserved to SCSI controller, increase next index
if self.next_disk_unit_number == 7:
self.next_disk_unit_number += 1
return diskspec
def get_device(self, device_type, name):
nic_dict = dict(pcnet32=vim.vm.device.VirtualPCNet32(),
vmxnet2=vim.vm.device.VirtualVmxnet2(),
vmxnet3=vim.vm.device.VirtualVmxnet3(),
e1000=vim.vm.device.VirtualE1000(),
e1000e=vim.vm.device.VirtualE1000e(),
sriov=vim.vm.device.VirtualSriovEthernetCard(),
)
if device_type in nic_dict:
return nic_dict[device_type]
else:
self.module.fail_json(msg='Invalid device_type "%s"'
' for network "%s"' % (device_type, name))
def create_nic(self, device_type, device_label, device_infos):
nic = vim.vm.device.VirtualDeviceSpec()
nic.device = self.get_device(device_type, device_infos['name'])
nic.device.wakeOnLanEnabled = bool(device_infos.get('wake_on_lan', True))
nic.device.deviceInfo = vim.Description()
nic.device.deviceInfo.label = device_label
nic.device.deviceInfo.summary = device_infos['name']
nic.device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
nic.device.connectable.startConnected = bool(device_infos.get('start_connected', True))
nic.device.connectable.allowGuestControl = bool(device_infos.get('allow_guest_control', True))
nic.device.connectable.connected = True
if 'mac' in device_infos and self.is_valid_mac_addr(device_infos['mac']):
nic.device.addressType = 'manual'
nic.device.macAddress = device_infos['mac']
else:
nic.device.addressType = 'generated'
return nic
@staticmethod
def is_valid_mac_addr(mac_addr):
"""
Function to validate MAC address for given string
Args:
mac_addr: string to validate as MAC address
Returns: (Boolean) True if string is valid MAC address, otherwise False
"""
mac_addr_regex = re.compile('[0-9a-f]{2}([-:])[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$')
return bool(mac_addr_regex.match(mac_addr))
def integer_value(self, input_value, name):
"""
Function to return int value for given input, else return error
Args:
input_value: Input value to retrive int value from
name: Name of the Input value (used to build error message)
Returns: (int) if integer value can be obtained, otherwise will send a error message.
"""
if isinstance(input_value, int):
return input_value
elif isinstance(input_value, str) and input_value.isdigit():
return int(input_value)
else:
self.module.fail_json(msg='"%s" attribute should be an'
' integer value.' % name)
class PyVmomiCache(object):
""" This class caches references to objects which are requested multiples times but not modified """
def __init__(self, content, dc_name=None):
self.content = content
self.dc_name = dc_name
self.networks = {}
self.clusters = {}
self.esx_hosts = {}
self.parent_datacenters = {}
def find_obj(self, content, types, name, confine_to_datacenter=True):
""" Wrapper around find_obj to set datacenter context """
result = find_obj(content, types, name)
if result and confine_to_datacenter:
if to_text(self.get_parent_datacenter(result).name) != to_text(self.dc_name):
result = None
objects = self.get_all_objs(content, types, confine_to_datacenter=True)
for obj in objects:
if name is None or to_text(obj.name) == to_text(name):
return obj
return result
def get_all_objs(self, content, types, confine_to_datacenter=True):
""" Wrapper around get_all_objs to set datacenter context """
objects = get_all_objs(content, types)
if confine_to_datacenter:
if hasattr(objects, 'items'):
# resource pools come back as a dictionary
# make a copy
tmpobjs = objects.copy()
for k, v in objects.items():
parent_dc = self.get_parent_datacenter(k)
if parent_dc.name != self.dc_name:
tmpobjs.pop(k, None)
objects = tmpobjs
else:
# everything else should be a list
objects = [x for x in objects if self.get_parent_datacenter(x).name == self.dc_name]
return objects
def get_network(self, network):
if network not in self.networks:
self.networks[network] = self.find_obj(self.content, [vim.Network], network)
return self.networks[network]
def get_cluster(self, cluster):
if cluster not in self.clusters:
self.clusters[cluster] = self.find_obj(self.content, [vim.ClusterComputeResource], cluster)
return self.clusters[cluster]
def get_esx_host(self, host):
if host not in self.esx_hosts:
self.esx_hosts[host] = self.find_obj(self.content, [vim.HostSystem], host)
return self.esx_hosts[host]
def get_parent_datacenter(self, obj):
""" Walk the parent tree to find the objects datacenter """
if isinstance(obj, vim.Datacenter):
return obj
if obj in self.parent_datacenters:
return self.parent_datacenters[obj]
datacenter = None
while True:
if not hasattr(obj, 'parent'):
break
obj = obj.parent
if isinstance(obj, vim.Datacenter):
datacenter = obj
break
self.parent_datacenters[obj] = datacenter
return datacenter
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
self.device_helper = PyVmomiDeviceHelper(self.module)
self.configspec = None
self.change_detected = False
self.customspec = None
self.cache = PyVmomiCache(self.content, dc_name=self.params['datacenter'])
def gather_facts(self, vm):
return gather_vm_facts(self.content, vm)
def remove_vm(self, vm):
# https://www.vmware.com/support/developer/converter-sdk/conv60_apireference/vim.ManagedEntity.html#destroy
if vm.summary.runtime.powerState.lower() == 'poweredon':
self.module.fail_json(msg="Virtual machine %s found in 'powered on' state, "
"please use 'force' parameter to remove or poweroff VM "
"and try removing VM again." % vm.name)
task = vm.Destroy()
self.wait_for_task(task)
if task.info.state == 'error':
return {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
return {'changed': True, 'failed': False}
def configure_guestid(self, vm_obj, vm_creation=False):
# guest_id is not required when using templates
if self.params['template'] and not self.params['guest_id']:
return
# guest_id is only mandatory on VM creation
if vm_creation and self.params['guest_id'] is None:
self.module.fail_json(msg="guest_id attribute is mandatory for VM creation")
if self.params['guest_id'] and \
(vm_obj is None or self.params['guest_id'].lower() != vm_obj.summary.config.guestId.lower()):
self.change_detected = True
self.configspec.guestId = self.params['guest_id']
def configure_resource_alloc_info(self, vm_obj):
"""
Function to configure resource allocation information about virtual machine
:param vm_obj: VM object in case of reconfigure, None in case of deploy
:return: None
"""
rai_change_detected = False
memory_allocation = vim.ResourceAllocationInfo()
cpu_allocation = vim.ResourceAllocationInfo()
if 'hardware' in self.params:
if 'mem_limit' in self.params['hardware']:
mem_limit = None
try:
mem_limit = int(self.params['hardware'].get('mem_limit'))
except ValueError as e:
self.module.fail_json(msg="hardware.mem_limit attribute should be an integer value.")
memory_allocation.limit = mem_limit
if vm_obj is None or memory_allocation.limit != vm_obj.config.memoryAllocation.limit:
rai_change_detected = True
if 'mem_reservation' in self.params['hardware']:
mem_reservation = None
try:
mem_reservation = int(self.params['hardware'].get('mem_reservation'))
except ValueError as e:
self.module.fail_json(msg="hardware.mem_reservation should be an integer value.")
memory_allocation.reservation = mem_reservation
if vm_obj is None or \
memory_allocation.reservation != vm_obj.config.memoryAllocation.reservation:
rai_change_detected = True
if 'cpu_limit' in self.params['hardware']:
cpu_limit = None
try:
cpu_limit = int(self.params['hardware'].get('cpu_limit'))
except ValueError as e:
self.module.fail_json(msg="hardware.cpu_limit attribute should be an integer value.")
cpu_allocation.limit = cpu_limit
if vm_obj is None or cpu_allocation.limit != vm_obj.config.cpuAllocation.limit:
rai_change_detected = True
if 'cpu_reservation' in self.params['hardware']:
cpu_reservation = None
try:
cpu_reservation = int(self.params['hardware'].get('cpu_reservation'))
except ValueError as e:
self.module.fail_json(msg="hardware.cpu_reservation should be an integer value.")
cpu_allocation.reservation = cpu_reservation
if vm_obj is None or \
cpu_allocation.reservation != vm_obj.config.cpuAllocation.reservation:
rai_change_detected = True
if rai_change_detected:
self.configspec.memoryAllocation = memory_allocation
self.configspec.cpuAllocation = cpu_allocation
self.change_detected = True
def configure_cpu_and_memory(self, vm_obj, vm_creation=False):
# set cpu/memory/etc
if 'hardware' in self.params:
if 'num_cpus' in self.params['hardware']:
try:
num_cpus = int(self.params['hardware']['num_cpus'])
except ValueError as e:
self.module.fail_json(msg="hardware.num_cpus attribute should be an integer value.")
# check VM power state and cpu hot-add/hot-remove state before re-config VM
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn:
if not vm_obj.config.cpuHotRemoveEnabled and num_cpus < vm_obj.config.hardware.numCPU:
self.module.fail_json(msg="Configured cpu number is less than the cpu number of the VM, "
"cpuHotRemove is not enabled")
if not vm_obj.config.cpuHotAddEnabled and num_cpus > vm_obj.config.hardware.numCPU:
self.module.fail_json(msg="Configured cpu number is more than the cpu number of the VM, "
"cpuHotAdd is not enabled")
if 'num_cpu_cores_per_socket' in self.params['hardware']:
try:
num_cpu_cores_per_socket = int(self.params['hardware']['num_cpu_cores_per_socket'])
except ValueError as e:
self.module.fail_json(msg="hardware.num_cpu_cores_per_socket attribute "
"should be an integer value.")
if num_cpus % num_cpu_cores_per_socket != 0:
self.module.fail_json(msg="hardware.num_cpus attribute should be a multiple "
"of hardware.num_cpu_cores_per_socket")
self.configspec.numCoresPerSocket = num_cpu_cores_per_socket
if vm_obj is None or self.configspec.numCoresPerSocket != vm_obj.config.hardware.numCoresPerSocket:
self.change_detected = True
self.configspec.numCPUs = num_cpus
if vm_obj is None or self.configspec.numCPUs != vm_obj.config.hardware.numCPU:
self.change_detected = True
# num_cpu is mandatory for VM creation
elif vm_creation and not self.params['template']:
self.module.fail_json(msg="hardware.num_cpus attribute is mandatory for VM creation")
if 'memory_mb' in self.params['hardware']:
try:
memory_mb = int(self.params['hardware']['memory_mb'])
except ValueError:
self.module.fail_json(msg="Failed to parse hardware.memory_mb value."
" Please refer the documentation and provide"
" correct value.")
# check VM power state and memory hotadd state before re-config VM
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn:
if vm_obj.config.memoryHotAddEnabled and memory_mb < vm_obj.config.hardware.memoryMB:
self.module.fail_json(msg="Configured memory is less than memory size of the VM, "
"operation is not supported")
elif not vm_obj.config.memoryHotAddEnabled and memory_mb != vm_obj.config.hardware.memoryMB:
self.module.fail_json(msg="memoryHotAdd is not enabled")
self.configspec.memoryMB = memory_mb
if vm_obj is None or self.configspec.memoryMB != vm_obj.config.hardware.memoryMB:
self.change_detected = True
# memory_mb is mandatory for VM creation
elif vm_creation and not self.params['template']:
self.module.fail_json(msg="hardware.memory_mb attribute is mandatory for VM creation")
if 'hotadd_memory' in self.params['hardware']:
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn and \
vm_obj.config.memoryHotAddEnabled != bool(self.params['hardware']['hotadd_memory']):
self.module.fail_json(msg="Configure hotadd memory operation is not supported when VM is power on")
self.configspec.memoryHotAddEnabled = bool(self.params['hardware']['hotadd_memory'])
if vm_obj is None or self.configspec.memoryHotAddEnabled != vm_obj.config.memoryHotAddEnabled:
self.change_detected = True
if 'hotadd_cpu' in self.params['hardware']:
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn and \
vm_obj.config.cpuHotAddEnabled != bool(self.params['hardware']['hotadd_cpu']):
self.module.fail_json(msg="Configure hotadd cpu operation is not supported when VM is power on")
self.configspec.cpuHotAddEnabled = bool(self.params['hardware']['hotadd_cpu'])
if vm_obj is None or self.configspec.cpuHotAddEnabled != vm_obj.config.cpuHotAddEnabled:
self.change_detected = True
if 'hotremove_cpu' in self.params['hardware']:
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn and \
vm_obj.config.cpuHotRemoveEnabled != bool(self.params['hardware']['hotremove_cpu']):
self.module.fail_json(msg="Configure hotremove cpu operation is not supported when VM is power on")
self.configspec.cpuHotRemoveEnabled = bool(self.params['hardware']['hotremove_cpu'])
if vm_obj is None or self.configspec.cpuHotRemoveEnabled != vm_obj.config.cpuHotRemoveEnabled:
self.change_detected = True
if 'memory_reservation' in self.params['hardware']:
memory_reservation_mb = 0
try:
memory_reservation_mb = int(self.params['hardware']['memory_reservation'])
except ValueError as e:
self.module.fail_json(msg="Failed to set memory_reservation value."
"Valid value for memory_reservation value in MB (integer): %s" % e)
mem_alloc = vim.ResourceAllocationInfo()
mem_alloc.reservation = memory_reservation_mb
self.configspec.memoryAllocation = mem_alloc
if vm_obj is None or self.configspec.memoryAllocation.reservation != vm_obj.config.memoryAllocation.reservation:
self.change_detected = True
if 'memory_reservation_lock' in self.params['hardware']:
self.configspec.memoryReservationLockedToMax = bool(self.params['hardware']['memory_reservation_lock'])
if vm_obj is None or self.configspec.memoryReservationLockedToMax != vm_obj.config.memoryReservationLockedToMax:
self.change_detected = True
if 'boot_firmware' in self.params['hardware']:
# boot firmware re-config can cause boot issue
if vm_obj is not None:
return
boot_firmware = self.params['hardware']['boot_firmware'].lower()
if boot_firmware not in ('bios', 'efi'):
self.module.fail_json(msg="hardware.boot_firmware value is invalid [%s]."
" Need one of ['bios', 'efi']." % boot_firmware)
self.configspec.firmware = boot_firmware
self.change_detected = True
def configure_cdrom(self, vm_obj):
# Configure the VM CD-ROM
if "cdrom" in self.params and self.params["cdrom"]:
if "type" not in self.params["cdrom"] or self.params["cdrom"]["type"] not in ["none", "client", "iso"]:
self.module.fail_json(msg="cdrom.type is mandatory")
if self.params["cdrom"]["type"] == "iso" and ("iso_path" not in self.params["cdrom"] or not self.params["cdrom"]["iso_path"]):
self.module.fail_json(msg="cdrom.iso_path is mandatory in case cdrom.type is iso")
if vm_obj and vm_obj.config.template:
# Changing CD-ROM settings on a template is not supported
return
cdrom_spec = None
cdrom_device = self.get_vm_cdrom_device(vm=vm_obj)
iso_path = self.params["cdrom"]["iso_path"] if "iso_path" in self.params["cdrom"] else None
if cdrom_device is None:
# Creating new CD-ROM
ide_device = self.get_vm_ide_device(vm=vm_obj)
if ide_device is None:
# Creating new IDE device
ide_device = self.device_helper.create_ide_controller()
self.change_detected = True
self.configspec.deviceChange.append(ide_device)
elif len(ide_device.device) > 3:
self.module.fail_json(msg="hardware.cdrom specified for a VM or template which already has 4 IDE devices of which none are a cdrom")
cdrom_spec = self.device_helper.create_cdrom(ide_ctl=ide_device, cdrom_type=self.params["cdrom"]["type"], iso_path=iso_path)
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn:
cdrom_spec.device.connectable.connected = (self.params["cdrom"]["type"] != "none")
elif not self.device_helper.is_equal_cdrom(vm_obj=vm_obj, cdrom_device=cdrom_device, cdrom_type=self.params["cdrom"]["type"], iso_path=iso_path):
# Updating an existing CD-ROM
if self.params["cdrom"]["type"] in ["client", "none"]:
cdrom_device.backing = vim.vm.device.VirtualCdrom.RemotePassthroughBackingInfo()
elif self.params["cdrom"]["type"] == "iso":
cdrom_device.backing = vim.vm.device.VirtualCdrom.IsoBackingInfo(fileName=iso_path)
cdrom_device.connectable = vim.vm.device.VirtualDevice.ConnectInfo()
cdrom_device.connectable.allowGuestControl = True
cdrom_device.connectable.startConnected = (self.params["cdrom"]["type"] != "none")
if vm_obj and vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOn:
cdrom_device.connectable.connected = (self.params["cdrom"]["type"] != "none")
cdrom_spec = vim.vm.device.VirtualDeviceSpec()
cdrom_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
cdrom_spec.device = cdrom_device
if cdrom_spec:
self.change_detected = True
self.configspec.deviceChange.append(cdrom_spec)
def configure_hardware_params(self, vm_obj):
"""
Function to configure hardware related configuration of virtual machine
Args:
vm_obj: virtual machine object
"""
if 'hardware' in self.params:
if 'max_connections' in self.params['hardware']:
# maxMksConnections == max_connections
self.configspec.maxMksConnections = int(self.params['hardware']['max_connections'])
if vm_obj is None or self.configspec.maxMksConnections != vm_obj.config.hardware.maxMksConnections:
self.change_detected = True
if 'nested_virt' in self.params['hardware']:
self.configspec.nestedHVEnabled = bool(self.params['hardware']['nested_virt'])
if vm_obj is None or self.configspec.nestedHVEnabled != bool(vm_obj.config.nestedHVEnabled):
self.change_detected = True
if 'version' in self.params['hardware']:
hw_version_check_failed = False
temp_version = self.params['hardware'].get('version', 10)
try:
temp_version = int(temp_version)
except ValueError:
hw_version_check_failed = True
if temp_version not in range(3, 15):
hw_version_check_failed = True
if hw_version_check_failed:
self.module.fail_json(msg="Failed to set hardware.version '%s' value as valid"
" values range from 3 (ESX 2.x) to 14 (ESXi 6.5 and greater)." % temp_version)
# Hardware version is denoted as "vmx-10"
version = "vmx-%02d" % temp_version
self.configspec.version = version
if vm_obj is None or self.configspec.version != vm_obj.config.version:
self.change_detected = True
if vm_obj is not None:
# VM exists and we need to update the hardware version
current_version = vm_obj.config.version
# current_version = "vmx-10"
version_digit = int(current_version.split("-", 1)[-1])
if temp_version < version_digit:
self.module.fail_json(msg="Current hardware version '%d' which is greater than the specified"
" version '%d'. Downgrading hardware version is"
" not supported. Please specify version greater"
" than the current version." % (version_digit,
temp_version))
new_version = "vmx-%02d" % temp_version
try:
task = vm_obj.UpgradeVM_Task(new_version)
self.wait_for_task(task)
if task.info.state != 'error':
self.change_detected = True
except vim.fault.AlreadyUpgraded:
# Don't fail if VM is already upgraded.
pass
if 'virt_based_security' in self.params['hardware']:
host_version = self.select_host().summary.config.product.version
if int(host_version.split('.')[0]) < 6 or (int(host_version.split('.')[0]) == 6 and int(host_version.split('.')[1]) < 7):
self.module.fail_json(msg="ESXi version %s not support VBS." % host_version)
guest_ids = ['windows9_64Guest', 'windows9Server64Guest']
if vm_obj is None:
guestid = self.configspec.guestId
else:
guestid = vm_obj.summary.config.guestId
if guestid not in guest_ids:
self.module.fail_json(msg="Guest '%s' not support VBS." % guestid)
if (vm_obj is None and int(self.configspec.version.split('-')[1]) >= 14) or \
(vm_obj and int(vm_obj.config.version.split('-')[1]) >= 14 and (vm_obj.runtime.powerState == vim.VirtualMachinePowerState.poweredOff)):
self.configspec.flags = vim.vm.FlagInfo()
self.configspec.flags.vbsEnabled = bool(self.params['hardware']['virt_based_security'])
if bool(self.params['hardware']['virt_based_security']):
self.configspec.flags.vvtdEnabled = True
self.configspec.nestedHVEnabled = True
if (vm_obj is None and self.configspec.firmware == 'efi') or \
(vm_obj and vm_obj.config.firmware == 'efi'):
self.configspec.bootOptions = vim.vm.BootOptions()
self.configspec.bootOptions.efiSecureBootEnabled = True
else:
self.module.fail_json(msg="Not support VBS when firmware is BIOS.")
if vm_obj is None or self.configspec.flags.vbsEnabled != vm_obj.config.flags.vbsEnabled:
self.change_detected = True
def get_device_by_type(self, vm=None, type=None):
if vm is None or type is None:
return None
for device in vm.config.hardware.device:
if isinstance(device, type):
return device
return None
def get_vm_cdrom_device(self, vm=None):
return self.get_device_by_type(vm=vm, type=vim.vm.device.VirtualCdrom)
def get_vm_ide_device(self, vm=None):
return self.get_device_by_type(vm=vm, type=vim.vm.device.VirtualIDEController)
def get_vm_network_interfaces(self, vm=None):
device_list = []
if vm is None:
return device_list
nw_device_types = (vim.vm.device.VirtualPCNet32, vim.vm.device.VirtualVmxnet2,
vim.vm.device.VirtualVmxnet3, vim.vm.device.VirtualE1000,
vim.vm.device.VirtualE1000e, vim.vm.device.VirtualSriovEthernetCard)
for device in vm.config.hardware.device:
if isinstance(device, nw_device_types):
device_list.append(device)
return device_list
def sanitize_network_params(self):
"""
Sanitize user provided network provided params
Returns: A sanitized list of network params, else fails
"""
network_devices = list()
# Clean up user data here
for network in self.params['networks']:
if 'name' not in network and 'vlan' not in network:
self.module.fail_json(msg="Please specify at least a network name or"
" a VLAN name under VM network list.")
if 'name' in network and self.cache.get_network(network['name']) is None:
self.module.fail_json(msg="Network '%(name)s' does not exist." % network)
elif 'vlan' in network:
dvps = self.cache.get_all_objs(self.content, [vim.dvs.DistributedVirtualPortgroup])
for dvp in dvps:
if hasattr(dvp.config.defaultPortConfig, 'vlan') and \
isinstance(dvp.config.defaultPortConfig.vlan.vlanId, int) and \
str(dvp.config.defaultPortConfig.vlan.vlanId) == str(network['vlan']):
network['name'] = dvp.config.name
break
if 'dvswitch_name' in network and \
dvp.config.distributedVirtualSwitch.name == network['dvswitch_name'] and \
dvp.config.name == network['vlan']:
network['name'] = dvp.config.name
break
if dvp.config.name == network['vlan']:
network['name'] = dvp.config.name
break
else:
self.module.fail_json(msg="VLAN '%(vlan)s' does not exist." % network)
if 'type' in network:
if network['type'] not in ['dhcp', 'static']:
self.module.fail_json(msg="Network type '%(type)s' is not a valid parameter."
" Valid parameters are ['dhcp', 'static']." % network)
if network['type'] != 'static' and ('ip' in network or 'netmask' in network):
self.module.fail_json(msg='Static IP information provided for network "%(name)s",'
' but "type" is set to "%(type)s".' % network)
else:
# Type is optional parameter, if user provided IP or Subnet assume
# network type as 'static'
if 'ip' in network or 'netmask' in network:
network['type'] = 'static'
else:
# User wants network type as 'dhcp'
network['type'] = 'dhcp'
if network.get('type') == 'static':
if 'ip' in network and 'netmask' not in network:
self.module.fail_json(msg="'netmask' is required if 'ip' is"
" specified under VM network list.")
if 'ip' not in network and 'netmask' in network:
self.module.fail_json(msg="'ip' is required if 'netmask' is"
" specified under VM network list.")
validate_device_types = ['pcnet32', 'vmxnet2', 'vmxnet3', 'e1000', 'e1000e', 'sriov']
if 'device_type' in network and network['device_type'] not in validate_device_types:
self.module.fail_json(msg="Device type specified '%s' is not valid."
" Please specify correct device"
" type from ['%s']." % (network['device_type'],
"', '".join(validate_device_types)))
if 'mac' in network and not PyVmomiDeviceHelper.is_valid_mac_addr(network['mac']):
self.module.fail_json(msg="Device MAC address '%s' is invalid."
" Please provide correct MAC address." % network['mac'])
network_devices.append(network)
return network_devices
def configure_network(self, vm_obj):
# Ignore empty networks, this permits to keep networks when deploying a template/cloning a VM
if len(self.params['networks']) == 0:
return
network_devices = self.sanitize_network_params()
# List current device for Clone or Idempotency
current_net_devices = self.get_vm_network_interfaces(vm=vm_obj)
if len(network_devices) < len(current_net_devices):
self.module.fail_json(msg="Given network device list is lesser than current VM device list (%d < %d). "
"Removing interfaces is not allowed"
% (len(network_devices), len(current_net_devices)))
for key in range(0, len(network_devices)):
nic_change_detected = False
network_name = network_devices[key]['name']
if key < len(current_net_devices) and (vm_obj or self.params['template']):
# We are editing existing network devices, this is either when
# are cloning from VM or Template
nic = vim.vm.device.VirtualDeviceSpec()
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
nic.device = current_net_devices[key]
if ('wake_on_lan' in network_devices[key] and
nic.device.wakeOnLanEnabled != network_devices[key].get('wake_on_lan')):
nic.device.wakeOnLanEnabled = network_devices[key].get('wake_on_lan')
nic_change_detected = True
if ('start_connected' in network_devices[key] and
nic.device.connectable.startConnected != network_devices[key].get('start_connected')):
nic.device.connectable.startConnected = network_devices[key].get('start_connected')
nic_change_detected = True
if ('allow_guest_control' in network_devices[key] and
nic.device.connectable.allowGuestControl != network_devices[key].get('allow_guest_control')):
nic.device.connectable.allowGuestControl = network_devices[key].get('allow_guest_control')
nic_change_detected = True
if nic.device.deviceInfo.summary != network_name:
nic.device.deviceInfo.summary = network_name
nic_change_detected = True
if 'device_type' in network_devices[key]:
device = self.device_helper.get_device(network_devices[key]['device_type'], network_name)
device_class = type(device)
if not isinstance(nic.device, device_class):
self.module.fail_json(msg="Changing the device type is not possible when interface is already present. "
"The failing device type is %s" % network_devices[key]['device_type'])
# Changing mac address has no effect when editing interface
if 'mac' in network_devices[key] and nic.device.macAddress != current_net_devices[key].macAddress:
self.module.fail_json(msg="Changing MAC address has not effect when interface is already present. "
"The failing new MAC address is %s" % nic.device.macAddress)
else:
# Default device type is vmxnet3, VMWare best practice
device_type = network_devices[key].get('device_type', 'vmxnet3')
nic = self.device_helper.create_nic(device_type,
'Network Adapter %s' % (key + 1),
network_devices[key])
nic.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
nic_change_detected = True
if hasattr(self.cache.get_network(network_name), 'portKeys'):
# VDS switch
pg_obj = None
if 'dvswitch_name' in network_devices[key]:
dvs_name = network_devices[key]['dvswitch_name']
dvs_obj = find_dvs_by_name(self.content, dvs_name)
if dvs_obj is None:
self.module.fail_json(msg="Unable to find distributed virtual switch %s" % dvs_name)
pg_obj = find_dvspg_by_name(dvs_obj, network_name)
if pg_obj is None:
self.module.fail_json(msg="Unable to find distributed port group %s" % network_name)
else:
pg_obj = self.cache.find_obj(self.content, [vim.dvs.DistributedVirtualPortgroup], network_name)
if (nic.device.backing and
(not hasattr(nic.device.backing, 'port') or
(nic.device.backing.port.portgroupKey != pg_obj.key or
nic.device.backing.port.switchUuid != pg_obj.config.distributedVirtualSwitch.uuid))):
nic_change_detected = True
dvs_port_connection = vim.dvs.PortConnection()
dvs_port_connection.portgroupKey = pg_obj.key
# If user specifies distributed port group without associating to the hostsystem on which
# virtual machine is going to be deployed then we get error. We can infer that there is no
# association between given distributed port group and host system.
host_system = self.params.get('esxi_hostname')
if host_system and host_system not in [host.config.host.name for host in pg_obj.config.distributedVirtualSwitch.config.host]:
self.module.fail_json(msg="It seems that host system '%s' is not associated with distributed"
" virtual portgroup '%s'. Please make sure host system is associated"
" with given distributed virtual portgroup" % (host_system, pg_obj.name))
# TODO: (akasurde) There is no way to find association between resource pool and distributed virtual portgroup
# For now, check if we are able to find distributed virtual switch
if not pg_obj.config.distributedVirtualSwitch:
self.module.fail_json(msg="Failed to find distributed virtual switch which is associated with"
" distributed virtual portgroup '%s'. Make sure hostsystem is associated with"
" the given distributed virtual portgroup." % pg_obj.name)
dvs_port_connection.switchUuid = pg_obj.config.distributedVirtualSwitch.uuid
nic.device.backing = vim.vm.device.VirtualEthernetCard.DistributedVirtualPortBackingInfo()
nic.device.backing.port = dvs_port_connection
elif isinstance(self.cache.get_network(network_name), vim.OpaqueNetwork):
# NSX-T Logical Switch
nic.device.backing = vim.vm.device.VirtualEthernetCard.OpaqueNetworkBackingInfo()
network_id = self.cache.get_network(network_name).summary.opaqueNetworkId
nic.device.backing.opaqueNetworkType = 'nsx.LogicalSwitch'
nic.device.backing.opaqueNetworkId = network_id
nic.device.deviceInfo.summary = 'nsx.LogicalSwitch: %s' % network_id
else:
# vSwitch
if not isinstance(nic.device.backing, vim.vm.device.VirtualEthernetCard.NetworkBackingInfo):
nic.device.backing = vim.vm.device.VirtualEthernetCard.NetworkBackingInfo()
nic_change_detected = True
net_obj = self.cache.get_network(network_name)
if nic.device.backing.network != net_obj:
nic.device.backing.network = net_obj
nic_change_detected = True
if nic.device.backing.deviceName != network_name:
nic.device.backing.deviceName = network_name
nic_change_detected = True
if nic_change_detected:
self.configspec.deviceChange.append(nic)
self.change_detected = True
def configure_vapp_properties(self, vm_obj):
if len(self.params['vapp_properties']) == 0:
return
for x in self.params['vapp_properties']:
if not x.get('id'):
self.module.fail_json(msg="id is required to set vApp property")
new_vmconfig_spec = vim.vApp.VmConfigSpec()
# This is primarily for vcsim/integration tests, unset vAppConfig was not seen on my deployments
orig_spec = vm_obj.config.vAppConfig if vm_obj.config.vAppConfig else new_vmconfig_spec
vapp_properties_current = dict((x.id, x) for x in orig_spec.property)
vapp_properties_to_change = dict((x['id'], x) for x in self.params['vapp_properties'])
# each property must have a unique key
# init key counter with max value + 1
all_keys = [x.key for x in orig_spec.property]
new_property_index = max(all_keys) + 1 if all_keys else 0
for property_id, property_spec in vapp_properties_to_change.items():
is_property_changed = False
new_vapp_property_spec = vim.vApp.PropertySpec()
if property_id in vapp_properties_current:
if property_spec.get('operation') == 'remove':
new_vapp_property_spec.operation = 'remove'
new_vapp_property_spec.removeKey = vapp_properties_current[property_id].key
is_property_changed = True
else:
# this is 'edit' branch
new_vapp_property_spec.operation = 'edit'
new_vapp_property_spec.info = vapp_properties_current[property_id]
try:
for property_name, property_value in property_spec.items():
if property_name == 'operation':
# operation is not an info object property
# if set to anything other than 'remove' we don't fail
continue
# Updating attributes only if needed
if getattr(new_vapp_property_spec.info, property_name) != property_value:
setattr(new_vapp_property_spec.info, property_name, property_value)
is_property_changed = True
except Exception as e:
self.module.fail_json(msg="Failed to set vApp property field='%s' and value='%s'. Error: %s"
% (property_name, property_value, to_text(e)))
else:
if property_spec.get('operation') == 'remove':
# attemp to delete non-existent property
continue
# this is add new property branch
new_vapp_property_spec.operation = 'add'
property_info = vim.vApp.PropertyInfo()
property_info.classId = property_spec.get('classId')
property_info.instanceId = property_spec.get('instanceId')
property_info.id = property_spec.get('id')
property_info.category = property_spec.get('category')
property_info.label = property_spec.get('label')
property_info.type = property_spec.get('type', 'string')
property_info.userConfigurable = property_spec.get('userConfigurable', True)
property_info.defaultValue = property_spec.get('defaultValue')
property_info.value = property_spec.get('value', '')
property_info.description = property_spec.get('description')
new_vapp_property_spec.info = property_info
new_vapp_property_spec.info.key = new_property_index
new_property_index += 1
is_property_changed = True
if is_property_changed:
new_vmconfig_spec.property.append(new_vapp_property_spec)
if new_vmconfig_spec.property:
self.configspec.vAppConfig = new_vmconfig_spec
self.change_detected = True
def customize_customvalues(self, vm_obj, config_spec):
if len(self.params['customvalues']) == 0:
return
vm_custom_spec = config_spec
vm_custom_spec.extraConfig = []
changed = False
facts = self.gather_facts(vm_obj)
for kv in self.params['customvalues']:
if 'key' not in kv or 'value' not in kv:
self.module.exit_json(msg="customvalues items required both 'key' and 'value fields.")
# If kv is not kv fetched from facts, change it
if kv['key'] not in facts['customvalues'] or facts['customvalues'][kv['key']] != kv['value']:
option = vim.option.OptionValue()
option.key = kv['key']
option.value = kv['value']
vm_custom_spec.extraConfig.append(option)
changed = True
if changed:
self.change_detected = True
def customize_vm(self, vm_obj):
# User specified customization specification
custom_spec_name = self.params.get('customization_spec')
if custom_spec_name:
cc_mgr = self.content.customizationSpecManager
if cc_mgr.DoesCustomizationSpecExist(name=custom_spec_name):
temp_spec = cc_mgr.GetCustomizationSpec(name=custom_spec_name)
self.customspec = temp_spec.spec
return
else:
self.module.fail_json(msg="Unable to find customization specification"
" '%s' in given configuration." % custom_spec_name)
# Network settings
adaptermaps = []
for network in self.params['networks']:
guest_map = vim.vm.customization.AdapterMapping()
guest_map.adapter = vim.vm.customization.IPSettings()
if 'ip' in network and 'netmask' in network:
guest_map.adapter.ip = vim.vm.customization.FixedIp()
guest_map.adapter.ip.ipAddress = str(network['ip'])
guest_map.adapter.subnetMask = str(network['netmask'])
elif 'type' in network and network['type'] == 'dhcp':
guest_map.adapter.ip = vim.vm.customization.DhcpIpGenerator()
if 'gateway' in network:
guest_map.adapter.gateway = network['gateway']
# On Windows, DNS domain and DNS servers can be set by network interface
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.IPSettings.html
if 'domain' in network:
guest_map.adapter.dnsDomain = network['domain']
elif 'domain' in self.params['customization']:
guest_map.adapter.dnsDomain = self.params['customization']['domain']
if 'dns_servers' in network:
guest_map.adapter.dnsServerList = network['dns_servers']
elif 'dns_servers' in self.params['customization']:
guest_map.adapter.dnsServerList = self.params['customization']['dns_servers']
adaptermaps.append(guest_map)
# Global DNS settings
globalip = vim.vm.customization.GlobalIPSettings()
if 'dns_servers' in self.params['customization']:
globalip.dnsServerList = self.params['customization']['dns_servers']
# TODO: Maybe list the different domains from the interfaces here by default ?
if 'dns_suffix' in self.params['customization']:
dns_suffix = self.params['customization']['dns_suffix']
if isinstance(dns_suffix, list):
globalip.dnsSuffixList = " ".join(dns_suffix)
else:
globalip.dnsSuffixList = dns_suffix
elif 'domain' in self.params['customization']:
globalip.dnsSuffixList = self.params['customization']['domain']
if self.params['guest_id']:
guest_id = self.params['guest_id']
else:
guest_id = vm_obj.summary.config.guestId
# For windows guest OS, use SysPrep
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.Sysprep.html#field_detail
if 'win' in guest_id:
ident = vim.vm.customization.Sysprep()
ident.userData = vim.vm.customization.UserData()
# Setting hostName, orgName and fullName is mandatory, so we set some default when missing
ident.userData.computerName = vim.vm.customization.FixedName()
ident.userData.computerName.name = str(self.params['customization'].get('hostname', self.params['name'].split('.')[0]))
ident.userData.fullName = str(self.params['customization'].get('fullname', 'Administrator'))
ident.userData.orgName = str(self.params['customization'].get('orgname', 'ACME'))
if 'productid' in self.params['customization']:
ident.userData.productId = str(self.params['customization']['productid'])
ident.guiUnattended = vim.vm.customization.GuiUnattended()
if 'autologon' in self.params['customization']:
ident.guiUnattended.autoLogon = self.params['customization']['autologon']
ident.guiUnattended.autoLogonCount = self.params['customization'].get('autologoncount', 1)
if 'timezone' in self.params['customization']:
# Check if timezone value is a int before proceeding.
ident.guiUnattended.timeZone = self.device_helper.integer_value(
self.params['customization']['timezone'],
'customization.timezone')
ident.identification = vim.vm.customization.Identification()
if self.params['customization'].get('password', '') != '':
ident.guiUnattended.password = vim.vm.customization.Password()
ident.guiUnattended.password.value = str(self.params['customization']['password'])
ident.guiUnattended.password.plainText = True
if 'joindomain' in self.params['customization']:
if 'domainadmin' not in self.params['customization'] or 'domainadminpassword' not in self.params['customization']:
self.module.fail_json(msg="'domainadmin' and 'domainadminpassword' entries are mandatory in 'customization' section to use "
"joindomain feature")
ident.identification.domainAdmin = str(self.params['customization']['domainadmin'])
ident.identification.joinDomain = str(self.params['customization']['joindomain'])
ident.identification.domainAdminPassword = vim.vm.customization.Password()
ident.identification.domainAdminPassword.value = str(self.params['customization']['domainadminpassword'])
ident.identification.domainAdminPassword.plainText = True
elif 'joinworkgroup' in self.params['customization']:
ident.identification.joinWorkgroup = str(self.params['customization']['joinworkgroup'])
if 'runonce' in self.params['customization']:
ident.guiRunOnce = vim.vm.customization.GuiRunOnce()
ident.guiRunOnce.commandList = self.params['customization']['runonce']
else:
# FIXME: We have no clue whether this non-Windows OS is actually Linux, hence it might fail!
# For Linux guest OS, use LinuxPrep
# https://pubs.vmware.com/vi3/sdk/ReferenceGuide/vim.vm.customization.LinuxPrep.html
ident = vim.vm.customization.LinuxPrep()
# TODO: Maybe add domain from interface if missing ?
if 'domain' in self.params['customization']:
ident.domain = str(self.params['customization']['domain'])
ident.hostName = vim.vm.customization.FixedName()
hostname = str(self.params['customization'].get('hostname', self.params['name'].split('.')[0]))
# Remove all characters except alphanumeric and minus which is allowed by RFC 952
valid_hostname = re.sub(r"[^a-zA-Z0-9\-]", "", hostname)
ident.hostName.name = valid_hostname
self.customspec = vim.vm.customization.Specification()
self.customspec.nicSettingMap = adaptermaps
self.customspec.globalIPSettings = globalip
self.customspec.identity = ident
def get_vm_scsi_controller(self, vm_obj):
# If vm_obj doesn't exist there is no SCSI controller to find
if vm_obj is None:
return None
for device in vm_obj.config.hardware.device:
if self.device_helper.is_scsi_controller(device):
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.device = device
return scsi_ctl
return None
def get_configured_disk_size(self, expected_disk_spec):
# what size is it?
if [x for x in expected_disk_spec.keys() if x.startswith('size_') or x == 'size']:
# size, size_tb, size_gb, size_mb, size_kb
if 'size' in expected_disk_spec:
size_regex = re.compile(r'(\d+(?:\.\d+)?)([tgmkTGMK][bB])')
disk_size_m = size_regex.match(expected_disk_spec['size'])
try:
if disk_size_m:
expected = disk_size_m.group(1)
unit = disk_size_m.group(2)
else:
raise ValueError
if re.match(r'\d+\.\d+', expected):
# We found float value in string, let's typecast it
expected = float(expected)
else:
# We found int value in string, let's typecast it
expected = int(expected)
if not expected or not unit:
raise ValueError
except (TypeError, ValueError, NameError):
# Common failure
self.module.fail_json(msg="Failed to parse disk size please review value"
" provided using documentation.")
else:
param = [x for x in expected_disk_spec.keys() if x.startswith('size_')][0]
unit = param.split('_')[-1].lower()
expected = [x[1] for x in expected_disk_spec.items() if x[0].startswith('size_')][0]
expected = int(expected)
disk_units = dict(tb=3, gb=2, mb=1, kb=0)
if unit in disk_units:
unit = unit.lower()
return expected * (1024 ** disk_units[unit])
else:
self.module.fail_json(msg="%s is not a supported unit for disk size."
" Supported units are ['%s']." % (unit,
"', '".join(disk_units.keys())))
# No size found but disk, fail
self.module.fail_json(
msg="No size, size_kb, size_mb, size_gb or size_tb attribute found into disk configuration")
def configure_disks(self, vm_obj):
# Ignore empty disk list, this permits to keep disks when deploying a template/cloning a VM
if len(self.params['disk']) == 0:
return
scsi_ctl = self.get_vm_scsi_controller(vm_obj)
# Create scsi controller only if we are deploying a new VM, not a template or reconfiguring
if vm_obj is None or scsi_ctl is None:
scsi_ctl = self.device_helper.create_scsi_controller(self.get_scsi_type())
self.change_detected = True
self.configspec.deviceChange.append(scsi_ctl)
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)] \
if vm_obj is not None else None
if disks is not None and self.params.get('disk') and len(self.params.get('disk')) < len(disks):
self.module.fail_json(msg="Provided disks configuration has less disks than "
"the target object (%d vs %d)" % (len(self.params.get('disk')), len(disks)))
disk_index = 0
for expected_disk_spec in self.params.get('disk'):
disk_modified = False
# If we are manipulating and existing objects which has disks and disk_index is in disks
if vm_obj is not None and disks is not None and disk_index < len(disks):
diskspec = vim.vm.device.VirtualDeviceSpec()
# set the operation to edit so that it knows to keep other settings
diskspec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
diskspec.device = disks[disk_index]
else:
diskspec = self.device_helper.create_scsi_disk(scsi_ctl, disk_index)
disk_modified = True
if 'disk_mode' in expected_disk_spec:
disk_mode = expected_disk_spec.get('disk_mode', 'persistent').lower()
valid_disk_mode = ['persistent', 'independent_persistent', 'independent_nonpersistent']
if disk_mode not in valid_disk_mode:
self.module.fail_json(msg="disk_mode specified is not valid."
" Should be one of ['%s']" % "', '".join(valid_disk_mode))
if (vm_obj and diskspec.device.backing.diskMode != disk_mode) or (vm_obj is None):
diskspec.device.backing.diskMode = disk_mode
disk_modified = True
else:
diskspec.device.backing.diskMode = "persistent"
# is it thin?
if 'type' in expected_disk_spec:
disk_type = expected_disk_spec.get('type', '').lower()
if disk_type == 'thin':
diskspec.device.backing.thinProvisioned = True
elif disk_type == 'eagerzeroedthick':
diskspec.device.backing.eagerlyScrub = True
# which datastore?
if expected_disk_spec.get('datastore'):
# TODO: This is already handled by the relocation spec,
# but it needs to eventually be handled for all the
# other disks defined
pass
# increment index for next disk search
disk_index += 1
# index 7 is reserved to SCSI controller
if disk_index == 7:
disk_index += 1
kb = self.get_configured_disk_size(expected_disk_spec)
# VMWare doesn't allow to reduce disk sizes
if kb < diskspec.device.capacityInKB:
self.module.fail_json(
msg="Given disk size is smaller than found (%d < %d). Reducing disks is not allowed." %
(kb, diskspec.device.capacityInKB))
if kb != diskspec.device.capacityInKB or disk_modified:
diskspec.device.capacityInKB = kb
self.configspec.deviceChange.append(diskspec)
self.change_detected = True
def select_host(self):
hostsystem = self.cache.get_esx_host(self.params['esxi_hostname'])
if not hostsystem:
self.module.fail_json(msg='Failed to find ESX host "%(esxi_hostname)s"' % self.params)
if hostsystem.runtime.connectionState != 'connected' or hostsystem.runtime.inMaintenanceMode:
self.module.fail_json(msg='ESXi "%(esxi_hostname)s" is in invalid state or in maintenance mode.' % self.params)
return hostsystem
def autoselect_datastore(self):
datastore = None
datastores = self.cache.get_all_objs(self.content, [vim.Datastore])
if datastores is None or len(datastores) == 0:
self.module.fail_json(msg="Unable to find a datastore list when autoselecting")
datastore_freespace = 0
for ds in datastores:
if ds.summary.freeSpace > datastore_freespace:
datastore = ds
datastore_freespace = ds.summary.freeSpace
return datastore
def get_recommended_datastore(self, datastore_cluster_obj=None):
"""
Function to return Storage DRS recommended datastore from datastore cluster
Args:
datastore_cluster_obj: datastore cluster managed object
Returns: Name of recommended datastore from the given datastore cluster
"""
if datastore_cluster_obj is None:
return None
# Check if Datastore Cluster provided by user is SDRS ready
sdrs_status = datastore_cluster_obj.podStorageDrsEntry.storageDrsConfig.podConfig.enabled
if sdrs_status:
# We can get storage recommendation only if SDRS is enabled on given datastorage cluster
pod_sel_spec = vim.storageDrs.PodSelectionSpec()
pod_sel_spec.storagePod = datastore_cluster_obj
storage_spec = vim.storageDrs.StoragePlacementSpec()
storage_spec.podSelectionSpec = pod_sel_spec
storage_spec.type = 'create'
try:
rec = self.content.storageResourceManager.RecommendDatastores(storageSpec=storage_spec)
rec_action = rec.recommendations[0].action[0]
return rec_action.destination.name
except Exception as e:
# There is some error so we fall back to general workflow
pass
datastore = None
datastore_freespace = 0
for ds in datastore_cluster_obj.childEntity:
if isinstance(ds, vim.Datastore) and ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
datastore = ds
datastore_freespace = ds.summary.freeSpace
if datastore:
return datastore.name
return None
def select_datastore(self, vm_obj=None):
datastore = None
datastore_name = None
if len(self.params['disk']) != 0:
# TODO: really use the datastore for newly created disks
if 'autoselect_datastore' in self.params['disk'][0] and self.params['disk'][0]['autoselect_datastore']:
datastores = self.cache.get_all_objs(self.content, [vim.Datastore])
datastores = [x for x in datastores if self.cache.get_parent_datacenter(x).name == self.params['datacenter']]
if datastores is None or len(datastores) == 0:
self.module.fail_json(msg="Unable to find a datastore list when autoselecting")
datastore_freespace = 0
for ds in datastores:
if (ds.summary.freeSpace > datastore_freespace) or (ds.summary.freeSpace == datastore_freespace and not datastore):
# If datastore field is provided, filter destination datastores
if 'datastore' in self.params['disk'][0] and \
isinstance(self.params['disk'][0]['datastore'], str) and \
ds.name.find(self.params['disk'][0]['datastore']) < 0:
continue
datastore = ds
datastore_name = datastore.name
datastore_freespace = ds.summary.freeSpace
elif 'datastore' in self.params['disk'][0]:
datastore_name = self.params['disk'][0]['datastore']
# Check if user has provided datastore cluster first
datastore_cluster = self.cache.find_obj(self.content, [vim.StoragePod], datastore_name)
if datastore_cluster:
# If user specified datastore cluster so get recommended datastore
datastore_name = self.get_recommended_datastore(datastore_cluster_obj=datastore_cluster)
# Check if get_recommended_datastore or user specified datastore exists or not
datastore = self.cache.find_obj(self.content, [vim.Datastore], datastore_name)
else:
self.module.fail_json(msg="Either datastore or autoselect_datastore should be provided to select datastore")
if not datastore and self.params['template']:
# use the template's existing DS
disks = [x for x in vm_obj.config.hardware.device if isinstance(x, vim.vm.device.VirtualDisk)]
if disks:
datastore = disks[0].backing.datastore
datastore_name = datastore.name
# validation
if datastore:
dc = self.cache.get_parent_datacenter(datastore)
if dc.name != self.params['datacenter']:
datastore = self.autoselect_datastore()
datastore_name = datastore.name
if not datastore:
if len(self.params['disk']) != 0 or self.params['template'] is None:
self.module.fail_json(msg="Unable to find the datastore with given parameters."
" This could mean, %s is a non-existent virtual machine and module tried to"
" deploy it as new virtual machine with no disk. Please specify disks parameter"
" or specify template to clone from." % self.params['name'])
self.module.fail_json(msg="Failed to find a matching datastore")
return datastore, datastore_name
def obj_has_parent(self, obj, parent):
if obj is None and parent is None:
raise AssertionError()
current_parent = obj
while True:
if current_parent.name == parent.name:
return True
# Check if we have reached till root folder
moid = current_parent._moId
if moid in ['group-d1', 'ha-folder-root']:
return False
current_parent = current_parent.parent
if current_parent is None:
return False
def get_scsi_type(self):
disk_controller_type = "paravirtual"
# set cpu/memory/etc
if 'hardware' in self.params:
if 'scsi' in self.params['hardware']:
if self.params['hardware']['scsi'] in ['buslogic', 'paravirtual', 'lsilogic', 'lsilogicsas']:
disk_controller_type = self.params['hardware']['scsi']
else:
self.module.fail_json(msg="hardware.scsi attribute should be 'paravirtual' or 'lsilogic'")
return disk_controller_type
def find_folder(self, searchpath):
""" Walk inventory objects one position of the searchpath at a time """
# split the searchpath so we can iterate through it
paths = [x.replace('/', '') for x in searchpath.split('/')]
paths_total = len(paths) - 1
position = 0
# recursive walk while looking for next element in searchpath
root = self.content.rootFolder
while root and position <= paths_total:
change = False
if hasattr(root, 'childEntity'):
for child in root.childEntity:
if child.name == paths[position]:
root = child
position += 1
change = True
break
elif isinstance(root, vim.Datacenter):
if hasattr(root, 'vmFolder'):
if root.vmFolder.name == paths[position]:
root = root.vmFolder
position += 1
change = True
else:
root = None
if not change:
root = None
return root
def get_resource_pool(self, cluster=None, host=None, resource_pool=None):
""" Get a resource pool, filter on cluster, esxi_hostname or resource_pool if given """
cluster_name = cluster or self.params.get('cluster', None)
host_name = host or self.params.get('esxi_hostname', None)
resource_pool_name = resource_pool or self.params.get('resource_pool', None)
# get the datacenter object
datacenter = find_obj(self.content, [vim.Datacenter], self.params['datacenter'])
if not datacenter:
self.module.fail_json(msg='Unable to find datacenter "%s"' % self.params['datacenter'])
# if cluster is given, get the cluster object
if cluster_name:
cluster = find_obj(self.content, [vim.ComputeResource], cluster_name, folder=datacenter)
if not cluster:
self.module.fail_json(msg='Unable to find cluster "%s"' % cluster_name)
# if host is given, get the cluster object using the host
elif host_name:
host = find_obj(self.content, [vim.HostSystem], host_name, folder=datacenter)
if not host:
self.module.fail_json(msg='Unable to find host "%s"' % host_name)
cluster = host.parent
else:
cluster = None
# get resource pools limiting search to cluster or datacenter
resource_pool = find_obj(self.content, [vim.ResourcePool], resource_pool_name, folder=cluster or datacenter)
if not resource_pool:
if resource_pool_name:
self.module.fail_json(msg='Unable to find resource_pool "%s"' % resource_pool_name)
else:
self.module.fail_json(msg='Unable to find resource pool, need esxi_hostname, resource_pool, or cluster')
return resource_pool
def deploy_vm(self):
# https://github.com/vmware/pyvmomi-community-samples/blob/master/samples/clone_vm.py
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.CloneSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.vm.ConfigSpec.html
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# FIXME:
# - static IPs
self.folder = self.params.get('folder', None)
if self.folder is None:
self.module.fail_json(msg="Folder is required parameter while deploying new virtual machine")
# Prepend / if it was missing from the folder path, also strip trailing slashes
if not self.folder.startswith('/'):
self.folder = '/%(folder)s' % self.params
self.folder = self.folder.rstrip('/')
datacenter = self.cache.find_obj(self.content, [vim.Datacenter], self.params['datacenter'])
if datacenter is None:
self.module.fail_json(msg='No datacenter named %(datacenter)s was found' % self.params)
dcpath = compile_folder_path_for_object(datacenter)
# Nested folder does not have trailing /
if not dcpath.endswith('/'):
dcpath += '/'
# Check for full path first in case it was already supplied
if (self.folder.startswith(dcpath + self.params['datacenter'] + '/vm') or
self.folder.startswith(dcpath + '/' + self.params['datacenter'] + '/vm')):
fullpath = self.folder
elif self.folder.startswith('/vm/') or self.folder == '/vm':
fullpath = "%s%s%s" % (dcpath, self.params['datacenter'], self.folder)
elif self.folder.startswith('/'):
fullpath = "%s%s/vm%s" % (dcpath, self.params['datacenter'], self.folder)
else:
fullpath = "%s%s/vm/%s" % (dcpath, self.params['datacenter'], self.folder)
f_obj = self.content.searchIndex.FindByInventoryPath(fullpath)
# abort if no strategy was successful
if f_obj is None:
# Add some debugging values in failure.
details = {
'datacenter': datacenter.name,
'datacenter_path': dcpath,
'folder': self.folder,
'full_search_path': fullpath,
}
self.module.fail_json(msg='No folder %s matched in the search path : %s' % (self.folder, fullpath),
details=details)
destfolder = f_obj
if self.params['template']:
vm_obj = self.get_vm_or_template(template_name=self.params['template'])
if vm_obj is None:
self.module.fail_json(msg="Could not find a template named %(template)s" % self.params)
else:
vm_obj = None
# always get a resource_pool
resource_pool = self.get_resource_pool()
# set the destination datastore for VM & disks
if self.params['datastore']:
# Give precendence to datastore value provided by user
# User may want to deploy VM to specific datastore.
datastore_name = self.params['datastore']
# Check if user has provided datastore cluster first
datastore_cluster = self.cache.find_obj(self.content, [vim.StoragePod], datastore_name)
if datastore_cluster:
# If user specified datastore cluster so get recommended datastore
datastore_name = self.get_recommended_datastore(datastore_cluster_obj=datastore_cluster)
# Check if get_recommended_datastore or user specified datastore exists or not
datastore = self.cache.find_obj(self.content, [vim.Datastore], datastore_name)
else:
(datastore, datastore_name) = self.select_datastore(vm_obj)
self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=vm_obj, vm_creation=True)
self.configure_cpu_and_memory(vm_obj=vm_obj, vm_creation=True)
self.configure_hardware_params(vm_obj=vm_obj)
self.configure_resource_alloc_info(vm_obj=vm_obj)
self.configure_disks(vm_obj=vm_obj)
self.configure_network(vm_obj=vm_obj)
self.configure_cdrom(vm_obj=vm_obj)
# Find if we need network customizations (find keys in dictionary that requires customizations)
network_changes = False
for nw in self.params['networks']:
for key in nw:
# We don't need customizations for these keys
if key not in ('device_type', 'mac', 'name', 'vlan', 'type', 'start_connected'):
network_changes = True
break
if len(self.params['customization']) > 0 or network_changes or self.params.get('customization_spec'):
self.customize_vm(vm_obj=vm_obj)
clonespec = None
clone_method = None
try:
if self.params['template']:
# create the relocation spec
relospec = vim.vm.RelocateSpec()
# Only select specific host when ESXi hostname is provided
if self.params['esxi_hostname']:
relospec.host = self.select_host()
relospec.datastore = datastore
# Convert disk present in template if is set
if self.params['convert']:
for device in vm_obj.config.hardware.device:
if hasattr(device.backing, 'fileName'):
disk_locator = vim.vm.RelocateSpec.DiskLocator()
disk_locator.diskBackingInfo = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
if self.params['convert'] in ['thin']:
disk_locator.diskBackingInfo.thinProvisioned = True
if self.params['convert'] in ['eagerzeroedthick']:
disk_locator.diskBackingInfo.eagerlyScrub = True
if self.params['convert'] in ['thick']:
disk_locator.diskBackingInfo.diskMode = "persistent"
disk_locator.diskId = device.key
disk_locator.datastore = datastore
relospec.disk.append(disk_locator)
# https://www.vmware.com/support/developer/vc-sdk/visdk41pubs/ApiReference/vim.vm.RelocateSpec.html
# > pool: For a clone operation from a template to a virtual machine, this argument is required.
relospec.pool = resource_pool
linked_clone = self.params.get('linked_clone')
snapshot_src = self.params.get('snapshot_src', None)
if linked_clone:
if snapshot_src is not None:
relospec.diskMoveType = vim.vm.RelocateSpec.DiskMoveOptions.createNewChildDiskBacking
else:
self.module.fail_json(msg="Parameter 'linked_src' and 'snapshot_src' are"
" required together for linked clone operation.")
clonespec = vim.vm.CloneSpec(template=self.params['is_template'], location=relospec)
if self.customspec:
clonespec.customization = self.customspec
if snapshot_src is not None:
if vm_obj.snapshot is None:
self.module.fail_json(msg="No snapshots present for virtual machine or template [%(template)s]" % self.params)
snapshot = self.get_snapshots_by_name_recursively(snapshots=vm_obj.snapshot.rootSnapshotList,
snapname=snapshot_src)
if len(snapshot) != 1:
self.module.fail_json(msg='virtual machine "%(template)s" does not contain'
' snapshot named "%(snapshot_src)s"' % self.params)
clonespec.snapshot = snapshot[0].snapshot
clonespec.config = self.configspec
clone_method = 'Clone'
try:
task = vm_obj.Clone(folder=destfolder, name=self.params['name'], spec=clonespec)
except vim.fault.NoPermission as e:
self.module.fail_json(msg="Failed to clone virtual machine %s to folder %s "
"due to permission issue: %s" % (self.params['name'],
destfolder,
to_native(e.msg)))
self.change_detected = True
else:
# ConfigSpec require name for VM creation
self.configspec.name = self.params['name']
self.configspec.files = vim.vm.FileInfo(logDirectory=None,
snapshotDirectory=None,
suspendDirectory=None,
vmPathName="[" + datastore_name + "]")
clone_method = 'CreateVM_Task'
try:
task = destfolder.CreateVM_Task(config=self.configspec, pool=resource_pool)
except vmodl.fault.InvalidRequest as e:
self.module.fail_json(msg="Failed to create virtual machine due to invalid configuration "
"parameter %s" % to_native(e.msg))
except vim.fault.RestrictedVersion as e:
self.module.fail_json(msg="Failed to create virtual machine due to "
"product versioning restrictions: %s" % to_native(e.msg))
self.change_detected = True
self.wait_for_task(task)
except TypeError as e:
self.module.fail_json(msg="TypeError was returned, please ensure to give correct inputs. %s" % to_text(e))
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
# provide these to the user for debugging
clonespec_json = serialize_spec(clonespec)
configspec_json = serialize_spec(self.configspec)
kwargs = {
'changed': self.change_detected,
'failed': True,
'msg': task.info.error.msg,
'clonespec': clonespec_json,
'configspec': configspec_json,
'clone_method': clone_method
}
return kwargs
else:
# set annotation
vm = task.info.result
if self.params['annotation']:
annotation_spec = vim.vm.ConfigSpec()
annotation_spec.annotation = str(self.params['annotation'])
task = vm.ReconfigVM_Task(annotation_spec)
self.wait_for_task(task)
if self.params['customvalues']:
vm_custom_spec = vim.vm.ConfigSpec()
self.customize_customvalues(vm_obj=vm, config_spec=vm_custom_spec)
task = vm.ReconfigVM_Task(vm_custom_spec)
self.wait_for_task(task)
if self.params['wait_for_ip_address'] or self.params['wait_for_customization'] or self.params['state'] in ['poweredon', 'restarted']:
set_vm_power_state(self.content, vm, 'poweredon', force=False)
if self.params['wait_for_ip_address']:
self.wait_for_vm_ip(vm)
if self.params['wait_for_customization']:
is_customization_ok = self.wait_for_customization(vm)
if not is_customization_ok:
vm_facts = self.gather_facts(vm)
return {'changed': self.change_detected, 'failed': True, 'instance': vm_facts}
vm_facts = self.gather_facts(vm)
return {'changed': self.change_detected, 'failed': False, 'instance': vm_facts}
def get_snapshots_by_name_recursively(self, snapshots, snapname):
snap_obj = []
for snapshot in snapshots:
if snapshot.name == snapname:
snap_obj.append(snapshot)
else:
snap_obj = snap_obj + self.get_snapshots_by_name_recursively(snapshot.childSnapshotList, snapname)
return snap_obj
def reconfigure_vm(self):
self.configspec = vim.vm.ConfigSpec()
self.configspec.deviceChange = []
self.configure_guestid(vm_obj=self.current_vm_obj)
self.configure_cpu_and_memory(vm_obj=self.current_vm_obj)
self.configure_hardware_params(vm_obj=self.current_vm_obj)
self.configure_disks(vm_obj=self.current_vm_obj)
self.configure_network(vm_obj=self.current_vm_obj)
self.configure_cdrom(vm_obj=self.current_vm_obj)
self.customize_customvalues(vm_obj=self.current_vm_obj, config_spec=self.configspec)
self.configure_resource_alloc_info(vm_obj=self.current_vm_obj)
self.configure_vapp_properties(vm_obj=self.current_vm_obj)
if self.params['annotation'] and self.current_vm_obj.config.annotation != self.params['annotation']:
self.configspec.annotation = str(self.params['annotation'])
self.change_detected = True
change_applied = False
relospec = vim.vm.RelocateSpec()
if self.params['resource_pool']:
relospec.pool = self.get_resource_pool()
if relospec.pool != self.current_vm_obj.resourcePool:
task = self.current_vm_obj.RelocateVM_Task(spec=relospec)
self.wait_for_task(task)
change_applied = True
# Only send VMWare task if we see a modification
if self.change_detected:
task = None
try:
task = self.current_vm_obj.ReconfigVM_Task(spec=self.configspec)
except vim.fault.RestrictedVersion as e:
self.module.fail_json(msg="Failed to reconfigure virtual machine due to"
" product versioning restrictions: %s" % to_native(e.msg))
self.wait_for_task(task)
change_applied = True
if task.info.state == 'error':
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2021361
# https://kb.vmware.com/selfservice/microsites/search.do?language=en_US&cmd=displayKC&externalId=2173
return {'changed': change_applied, 'failed': True, 'msg': task.info.error.msg}
# Rename VM
if self.params['uuid'] and self.params['name'] and self.params['name'] != self.current_vm_obj.config.name:
task = self.current_vm_obj.Rename_Task(self.params['name'])
self.wait_for_task(task)
change_applied = True
if task.info.state == 'error':
return {'changed': change_applied, 'failed': True, 'msg': task.info.error.msg}
# Mark VM as Template
if self.params['is_template'] and not self.current_vm_obj.config.template:
try:
self.current_vm_obj.MarkAsTemplate()
except vmodl.fault.NotSupported as e:
self.module.fail_json(msg="Failed to mark virtual machine [%s] "
"as template: %s" % (self.params['name'], e.msg))
change_applied = True
# Mark Template as VM
elif not self.params['is_template'] and self.current_vm_obj.config.template:
resource_pool = self.get_resource_pool()
kwargs = dict(pool=resource_pool)
if self.params.get('esxi_hostname', None):
host_system_obj = self.select_host()
kwargs.update(host=host_system_obj)
try:
self.current_vm_obj.MarkAsVirtualMachine(**kwargs)
except vim.fault.InvalidState as invalid_state:
self.module.fail_json(msg="Virtual machine is not marked"
" as template : %s" % to_native(invalid_state.msg))
except vim.fault.InvalidDatastore as invalid_ds:
self.module.fail_json(msg="Converting template to virtual machine"
" operation cannot be performed on the"
" target datastores: %s" % to_native(invalid_ds.msg))
except vim.fault.CannotAccessVmComponent as cannot_access:
self.module.fail_json(msg="Failed to convert template to virtual machine"
" as operation unable access virtual machine"
" component: %s" % to_native(cannot_access.msg))
except vmodl.fault.InvalidArgument as invalid_argument:
self.module.fail_json(msg="Failed to convert template to virtual machine"
" due to : %s" % to_native(invalid_argument.msg))
except Exception as generic_exc:
self.module.fail_json(msg="Failed to convert template to virtual machine"
" due to generic error : %s" % to_native(generic_exc))
# Automatically update VMWare UUID when converting template to VM.
# This avoids an interactive prompt during VM startup.
uuid_action = [x for x in self.current_vm_obj.config.extraConfig if x.key == "uuid.action"]
if not uuid_action:
uuid_action_opt = vim.option.OptionValue()
uuid_action_opt.key = "uuid.action"
uuid_action_opt.value = "create"
self.configspec.extraConfig.append(uuid_action_opt)
self.change_detected = True
change_applied = True
vm_facts = self.gather_facts(self.current_vm_obj)
return {'changed': change_applied, 'failed': False, 'instance': vm_facts}
@staticmethod
def wait_for_task(task):
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.Task.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.TaskInfo.html
# https://github.com/virtdevninja/pyvmomi-community-samples/blob/master/samples/tools/tasks.py
while task.info.state not in ['error', 'success']:
time.sleep(1)
def wait_for_vm_ip(self, vm, poll=100, sleep=5):
ips = None
facts = {}
thispoll = 0
while not ips and thispoll <= poll:
newvm = self.get_vm()
facts = self.gather_facts(newvm)
if facts['ipv4'] or facts['ipv6']:
ips = True
else:
time.sleep(sleep)
thispoll += 1
return facts
def get_vm_events(self, eventTypeIdList):
newvm = self.get_vm()
byEntity = vim.event.EventFilterSpec.ByEntity(entity=newvm, recursion="self")
filterSpec = vim.event.EventFilterSpec(entity=byEntity, eventTypeId=eventTypeIdList)
eventManager = self.content.eventManager
return eventManager.QueryEvent(filterSpec)
def wait_for_customization(self, vm, poll=10000, sleep=10):
facts = {}
thispoll = 0
while thispoll <= poll:
eventStarted = self.get_vm_events(['CustomizationStartedEvent'])
if len(eventStarted):
thispoll = 0
while thispoll <= poll:
eventsFinishedResult = self.get_vm_events(['CustomizationSucceeded', 'CustomizationFailed'])
if len(eventsFinishedResult):
if not isinstance(eventsFinishedResult[0], vim.event.CustomizationSucceeded):
self.module.fail_json(msg='Customization failed with error {0}:\n{1}'.format(
eventsFinishedResult[0]._wsdlName, eventsFinishedResult[0].fullFormattedMessage))
return False
break
else:
time.sleep(sleep)
thispoll += 1
return True
else:
time.sleep(sleep)
thispoll += 1
self.module.fail_json('waiting for customizations timed out.')
return False
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
state=dict(type='str', default='present',
choices=['absent', 'poweredoff', 'poweredon', 'present', 'rebootguest', 'restarted', 'shutdownguest', 'suspended']),
template=dict(type='str', aliases=['template_src']),
is_template=dict(type='bool', default=False),
annotation=dict(type='str', aliases=['notes']),
customvalues=dict(type='list', default=[]),
name=dict(type='str'),
name_match=dict(type='str', choices=['first', 'last'], default='first'),
uuid=dict(type='str'),
folder=dict(type='str'),
guest_id=dict(type='str'),
disk=dict(type='list', default=[]),
cdrom=dict(type='dict', default={}),
hardware=dict(type='dict', default={}),
force=dict(type='bool', default=False),
datacenter=dict(type='str', default='ha-datacenter'),
esxi_hostname=dict(type='str'),
cluster=dict(type='str'),
wait_for_ip_address=dict(type='bool', default=False),
state_change_timeout=dict(type='int', default=0),
snapshot_src=dict(type='str'),
linked_clone=dict(type='bool', default=False),
networks=dict(type='list', default=[]),
resource_pool=dict(type='str'),
customization=dict(type='dict', default={}, no_log=True),
customization_spec=dict(type='str', default=None),
wait_for_customization=dict(type='bool', default=False),
vapp_properties=dict(type='list', default=[]),
datastore=dict(type='str'),
convert=dict(type='str', choices=['thin', 'thick', 'eagerzeroedthick']),
)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[
['cluster', 'esxi_hostname'],
],
required_one_of=[
['name', 'uuid'],
],
)
result = {'failed': False, 'changed': False}
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.get_vm()
# VM already exists
if vm:
if module.params['state'] == 'absent':
# destroy it
if module.check_mode:
result.update(
vm_name=vm.name,
changed=True,
current_powerstate=vm.summary.runtime.powerState.lower(),
desired_operation='remove_vm',
)
module.exit_json(**result)
if module.params['force']:
# has to be poweredoff first
set_vm_power_state(pyv.content, vm, 'poweredoff', module.params['force'])
result = pyv.remove_vm(vm)
elif module.params['state'] == 'present':
if module.check_mode:
result.update(
vm_name=vm.name,
changed=True,
desired_operation='reconfigure_vm',
)
module.exit_json(**result)
result = pyv.reconfigure_vm()
elif module.params['state'] in ['poweredon', 'poweredoff', 'restarted', 'suspended', 'shutdownguest', 'rebootguest']:
if module.check_mode:
result.update(
vm_name=vm.name,
changed=True,
current_powerstate=vm.summary.runtime.powerState.lower(),
desired_operation='set_vm_power_state',
)
module.exit_json(**result)
# set powerstate
tmp_result = set_vm_power_state(pyv.content, vm, module.params['state'], module.params['force'], module.params['state_change_timeout'])
if tmp_result['changed']:
result["changed"] = True
if module.params['state'] in ['poweredon', 'restarted', 'rebootguest'] and module.params['wait_for_ip_address']:
wait_result = wait_for_vm_ip(pyv.content, vm)
if not wait_result:
module.fail_json(msg='Waiting for IP address timed out')
tmp_result['instance'] = wait_result
if not tmp_result["failed"]:
result["failed"] = False
result['instance'] = tmp_result['instance']
else:
# This should not happen
raise AssertionError()
# VM doesn't exist
else:
if module.params['state'] in ['poweredon', 'poweredoff', 'present', 'restarted', 'suspended']:
if module.check_mode:
result.update(
changed=True,
desired_operation='deploy_vm',
)
module.exit_json(**result)
result = pyv.deploy_vm()
if result['failed']:
module.fail_json(msg='Failed to create a virtual machine : %s' % result['msg'])
if result['failed']:
module.fail_json(**result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
|
orgito/ansible
|
lib/ansible/modules/cloud/vmware/vmware_guest.py
|
Python
|
gpl-3.0
| 128,055
|
[
"VisIt"
] |
a9914fefcece181e166ed005c6744bb253cc24b7945d10ada538717c304fe89b
|
#!/usr/bin/env python
"""
Do the initial installation and configuration of a DIRAC service based on tornado
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
from DIRAC import exit as DIRACexit
from DIRAC import gConfig, gLogger, S_OK
from DIRAC.Core.Utilities.DIRACScript import DIRACScript as Script
from DIRAC.Core.Utilities.Extensions import extensionsByPriority
from DIRAC.FrameworkSystem.Utilities import MonitoringUtilities
overwrite = False
def setOverwrite(opVal):
global overwrite
overwrite = True
return S_OK()
module = ""
specialOptions = {}
def setModule(optVal):
global specialOptions, module
specialOptions["Module"] = optVal
module = optVal
return S_OK()
def setSpecialOption(optVal):
global specialOptions
option, value = optVal.split("=")
specialOptions[option] = value
return S_OK()
@Script()
def main():
global overwrite
global specialOptions
global module
global specialOptions
from DIRAC.FrameworkSystem.Client.ComponentInstaller import gComponentInstaller
gComponentInstaller.exitOnError = True
Script.registerSwitch("w", "overwrite", "Overwrite the configuration in the global CS", setOverwrite)
Script.registerSwitch("m:", "module=", "Python module name for the component code", setModule)
Script.registerSwitch("p:", "parameter=", "Special component option ", setSpecialOption)
# Registering arguments will automatically add their description to the help menu
Script.registerArgument(
(
"System/Component: Full component name (ie: WorkloadManagement/Matcher)",
"System: Name of the DIRAC system (ie: WorkloadManagement)",
)
)
Script.registerArgument(" Component: Name of the DIRAC service (ie: Matcher)", mandatory=False)
Script.parseCommandLine()
args = Script.getPositionalArgs()
if len(args) == 1:
args = args[0].split("/")
if len(args) != 2:
Script.showHelp()
DIRACexit(1)
system = args[0]
component = args[1]
result = gComponentInstaller.addDefaultOptionsToCS(
gConfig,
"service",
system,
component,
extensionsByPriority(),
specialOptions=specialOptions,
overwrite=overwrite,
)
if not result["OK"]:
gLogger.error(result["Message"])
DIRACexit(1)
result = gComponentInstaller.addTornadoOptionsToCS(gConfig)
if not result["OK"]:
gLogger.error(result["Message"])
DIRACexit(1)
result = gComponentInstaller.installTornado()
if not result["OK"]:
gLogger.error(result["Message"])
DIRACexit(1)
gLogger.notice("Successfully installed component %s in %s system, now setting it up" % (component, system))
result = gComponentInstaller.setupTornadoService(system, component, extensionsByPriority(), module)
if not result["OK"]:
gLogger.error(result["Message"])
DIRACexit(1)
result = MonitoringUtilities.monitorInstallation("service", system, component, module)
if not result["OK"]:
gLogger.error(result["Message"])
DIRACexit(1)
gLogger.notice("Successfully completed the installation of %s/%s" % (system, component))
DIRACexit()
if __name__ == "__main__":
main()
|
ic-hep/DIRAC
|
src/DIRAC/FrameworkSystem/scripts/dirac_install_tornado_service.py
|
Python
|
gpl-3.0
| 3,388
|
[
"DIRAC"
] |
cb385e1f1516c399b30af94da3eda7798ba54afb0e4ff9e366a0252733e65c88
|
from compose.config.errors import DependencyError
def get_service_name_from_network_mode(network_mode):
return get_source_name_from_network_mode(network_mode, 'service')
def get_container_name_from_network_mode(network_mode):
return get_source_name_from_network_mode(network_mode, 'container')
def get_source_name_from_network_mode(network_mode, source_type):
if not network_mode:
return
if not network_mode.startswith(source_type+':'):
return
_, net_name = network_mode.split(':', 1)
return net_name
def get_service_names(links):
return [link.split(':', 1)[0] for link in links]
def get_service_names_from_volumes_from(volumes_from):
return [volume_from.source for volume_from in volumes_from]
def get_service_dependents(service_dict, services):
name = service_dict['name']
return [
service for service in services
if (name in get_service_names(service.get('links', [])) or
name in get_service_names_from_volumes_from(service.get('volumes_from', [])) or
name == get_service_name_from_network_mode(service.get('network_mode')) or
name == get_service_name_from_network_mode(service.get('pid')) or
name == get_service_name_from_network_mode(service.get('ipc')) or
name in service.get('depends_on', []))
]
def sort_service_dicts(services):
# Topological sort (Cormen/Tarjan algorithm).
unmarked = services[:]
temporary_marked = set()
sorted_services = []
def visit(n):
if n['name'] in temporary_marked:
if n['name'] in get_service_names(n.get('links', [])):
raise DependencyError('A service can not link to itself: %s' % n['name'])
if n['name'] in n.get('volumes_from', []):
raise DependencyError('A service can not mount itself as volume: %s' % n['name'])
if n['name'] in n.get('depends_on', []):
raise DependencyError('A service can not depend on itself: %s' % n['name'])
raise DependencyError('Circular dependency between %s' % ' and '.join(temporary_marked))
if n in unmarked:
temporary_marked.add(n['name'])
for m in get_service_dependents(n, services):
visit(m)
temporary_marked.remove(n['name'])
unmarked.remove(n)
sorted_services.insert(0, n)
while unmarked:
visit(unmarked[-1])
return sorted_services
|
thaJeztah/compose
|
compose/config/sort_services.py
|
Python
|
apache-2.0
| 2,487
|
[
"VisIt"
] |
68df8b6c7c23a7b39e73e32c0d5e98c33c96e6f4583cde9610e729a1b4f7df70
|
# -*- coding: utf-8 -*-
# creates: ener.png distance.png angle.png
import os
from ase import *
import matplotlib
matplotlib.use('Agg')
import pylab as plt
e_s = [0.01,0.1,0.2,0.3,0.4,0.5]
E = [-463.2160, -462.9633, -462.4891, -462.0551,
-461.5426, -461.1714]
d = [1.1131, 1.1046, 1.0960, 1.0901,
1.0857, 1.0810]
alpha = [100.832453365, 99.568214268, 99.1486065462,
98.873671379, 98.1726341945, 98.0535643778]
fig=plt.figure(figsize=(3, 2.5))
fig.subplots_adjust(left=.29, right=.96, top=.9, bottom=0.16)
plt.plot(e_s, E, 'o-')
plt.xlabel(u'Energy shift [eV]')
plt.ylabel(u'Energy [eV]')
plt.title('Total Energy vs Eshift')
plt.savefig('ener.png')
fig=plt.figure(figsize=(3, 2.5))
fig.subplots_adjust(left=.24, right=.96, top=.9, bottom=0.16)
plt.plot(e_s, d, 'o-')
plt.xlabel(u'Energy shift [eV]')
plt.ylabel(u'O-H distance [Å]')
limits = plt.axis('tight')
plt.title('O-H distance vs Eshift')
plt.savefig('distance.png')
fig=plt.figure(figsize=(3, 2.5))
fig.subplots_adjust(left=.26, right=.96, top=.9, bottom=0.16)
plt.plot(e_s, alpha, 'o-')
plt.xlabel(u'Energy shift [eV]')
plt.ylabel(u'H20 angle')
limits = plt.axis('tight')
plt.title('O-H distance vs Eshift')
plt.savefig('angle.png')
|
freephys/python_ase
|
doc/exercises/siesta1/answer1.py
|
Python
|
gpl-3.0
| 1,215
|
[
"ASE"
] |
3b46e723ada2d015e362ad940f5a5d0694c82dbf6dc2d0ef77c577a837950602
|
#
# Yasara.py
# Python_Xcode
#
# Created by Jens on 31/05/2010.
# Copyright (c) 2010 University College Dublin. All rights reserved.
#
class yasara_handler:
def __init__(self,yasaradir='/local/nielsen/bin/yasara/',console=True):
#
# Default on amylase, mac below
#
import os
if not os.path.isdir(yasaradir):
yasaradir='/Users/nielsen/desktop/yasara.app'
dirname=os.path.split(yasaradir)[1]
if dirname.lower()=='yasara.app':
yasaradir=os.path.join(yasaradir,'yasara')
#
import sys,os
sys.path.append(os.path.join(yasaradir,'pym'))
sys.path.append(os.path.join(yasaradir,'plg'))
import yasaramodule as yasara
self.yasara=yasara
if not console:
self.yasara.info.mode='txt'
self.yasara.info.licenseshown=0
self.yasara.Console('Off')
return
def load_mol(self,pdbfile,center=None):
"""Load a molecule"""
#self.yasara.run('DelAll')
import os
obj=self.yasara.run('LOADPDB %s' %os.path.join(os.getcwd(),pdbfile))
self.yasara.run('Style Stick')
self.yasara.run('HideRes Hoh')
res=self.yasara.ColorObj(obj,'Grey')
self.yasara.run('HUD Off')
return obj
def AlignObj(self,obj1,obj2):
"""Align all objects with MOTIF. Return rmsd"""
resultlist=self.yasara.AlignObj(obj1,obj2,method='MOTIF')
return resultlist[0]
def buildobj(self,lines):
"""Build a yasara object from a list of pdblines"""
return self.yasara.BuildPDB(lines)
def col_res(self,obj,residues,color):
"""Color a number of residues a color"""
if type(residues) is type([]):
for res in residues:
self.yasara.ColorRes("%d Obj %d" %(res,obj),color)
else:
for res in residues.keys():
#print "%d Obj %d" %(res,obj),int(180*residues[res])
self.yasara.ColorRes("%d Obj %d" %(res,obj),int(120*residues[res]))
return
|
dmnfarrell/peat
|
Protool/Yasara_handler.py
|
Python
|
mit
| 2,097
|
[
"YASARA"
] |
a860cfb07a66715e4ec75019eae6c9c3d29413cfc24129b75998665c3e244576
|
# Copyright (c) 2021, TU Wien, Department of Geodesy and Geoinformation
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of TU Wien, Department of Geodesy and Geoinformation
# nor the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior written
# permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL TU WIEN DEPARTMENT OF GEODESY AND
# GEOINFORMATION BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Readers for ASCAT Level 2 data for various file formats.
"""
import os
import numpy as np
from datetime import datetime
from ascat.read_native.nc import AscatL2NcFile
from ascat.read_native.bufr import AscatL2BufrFile
from ascat.read_native.eps_native import AscatL2EpsFile
from ascat.utils import get_toi_subset, get_roi_subset
from ascat.file_handling import ChronFiles
class AscatL2File:
"""
Class reading ASCAT Level 2 files.
"""
def __init__(self, filename, file_format=None):
"""
Initialize AscatL2File.
Parameters
----------
filename : str
Filename.
file_format : str, optional
File format: '.nat', '.nc', '.bfr', '.h5' (default: None).
If None file format will be guessed based on the file ending.
"""
self.filename = filename
self.fid = None
if file_format is None:
file_format = get_file_format(self.filename)
self.file_format = file_format
if self.file_format in ['.nat', '.nat.gz']:
self.fid = AscatL2EpsFile(self.filename)
elif self.file_format in ['.nc', '.nc.gz']:
self.fid = AscatL2NcFile(self.filename)
elif self.file_format in ['.bfr', '.bfr.gz', '.buf', 'buf.gz']:
self.fid = AscatL2BufrFile(self.filename)
else:
raise RuntimeError("ASCAT Level 2 file format unknown")
def read(self, toi=None, roi=None, generic=True, to_xarray=False):
"""
Read ASCAT Level 2 data.
Parameters
----------
toi : tuple of datetime, optional
Filter data for given time of interest (default: None).
roi : tuple of 4 float, optional
Filter data for region of interest (default: None).
e.g. latmin, lonmin, latmax, lonmax
generic : boolean, optional
Convert original data field names to generic field names
(default: True).
to_xarray : boolean, optional
Convert data to xarray.Dataset otherwise numpy.ndarray will be
returned (default: False).
Returns
-------
data : xarray.Dataset or numpy.ndarray
ASCAT data.
metadata : dict
Metadata.
"""
data, metadata = self.fid.read(generic=generic, to_xarray=to_xarray)
if toi:
data = get_toi_subset(data, toi)
if roi:
data = get_roi_subset(data, roi)
return data, metadata
def read_period(self, dt_start, dt_end, **kwargs):
"""
Read interval.
Parameters
----------
dt_start : datetime
Start datetime.
dt_end : datetime
End datetime.
Returns
-------
data : xarray.Dataset or numpy.ndarray
ASCAT data.
metadata : dict
Metadata.
"""
return self.read(toi=(dt_start, dt_end), **kwargs)
def close(self):
"""
Close file.
"""
self.fid.close()
def get_file_format(filename):
"""
Try to guess the file format from the extension.
Parameters
----------
filename : str
File name.
Returns
-------
file_format : str
File format indicator.
"""
if os.path.splitext(filename)[1] == '.gz':
file_format = os.path.splitext(os.path.splitext(filename)[0])[1]
else:
file_format = os.path.splitext(filename)[1]
return file_format
class AscatL2BufrFileList(ChronFiles):
"""
Class reading ASCAT L2 BUFR files.
"""
def __init__(self, path, sat, product, filename_template=None):
"""
Initialize.
"""
sat_lut = {'a': 2, 'b': 1, 'c': 3}
self.sat = sat_lut[sat]
self.product = product
if filename_template is None:
filename_template = ('M0{sat}-ASCA-ASC{product}*-*-*-'
'{date}.000000000Z-*-*.bfr')
super().__init__(path, AscatL2File, filename_template)
def _fmt(self, timestamp):
"""
Definition of filename and subfolder format.
Parameters
----------
timestamp : datetime
Time stamp.
Returns
-------
fn_fmt : dict
Filename format.
sf_fmt : dict
Subfolder format.
"""
fn_read_fmt = {'date': timestamp.strftime('%Y%m%d%H%M%S'),
'sat': self.sat, 'product': self.product.upper()}
fn_write_fmt = None
sf_read_fmt = None
sf_write_fmt = sf_read_fmt
return fn_read_fmt, sf_read_fmt, fn_write_fmt, sf_write_fmt
def _parse_date(self, filename):
"""
Parse date from filename.
Parameters
----------
filename : str
Filename.
Returns
-------
date : datetime
Parsed date.
"""
return datetime.strptime(os.path.basename(filename)[25:39],
'%Y%m%d%H%M%S')
def _merge_data(self, data):
"""
Merge data.
Parameters
----------
data : list
List of array.
Returns
-------
data : numpy.ndarray
Data.
"""
if type(data) == list:
if type(data[0]) == tuple:
metadata = [element[1] for element in data]
data = np.hstack([element[0] for element in data])
data = (data, metadata)
else:
data = np.hstack(data)
return data
class AscatL2NcFileList(ChronFiles):
"""
Class reading ASCAT L1b NetCDF files.
"""
def __init__(self, path, sat, product, filename_template=None):
"""
Initialize.
Parameters
----------
path : str
Path to input data.
sat : str
Metop satellite ('a', 'b', 'c').
product : str
Product type ('szf', 'szr', 'szo').
filename_template : str, optional
Filename template (default:
'M0{sat}-ASCA-ASC{product}1B0200-NA-9.1-{date}.000000000Z-*-*.bfr')
"""
self.sat = sat
lut = {'smr': '125', 'smo': '250'}
self.product = lut[product]
if filename_template is None:
filename_template = (
'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,METOP{sat}+'
'ASCAT_C_EUMP_{date}_*_eps_o_{product}_ssm_l2.nc')
super().__init__(path, AscatL2File, filename_template)
def _fmt(self, timestamp):
"""
Definition of filename and subfolder format.
Parameters
----------
timestamp : datetime
Time stamp.
Returns
-------
fn_fmt : dict
Filename format.
sf_fmt : dict
Subfolder format.
"""
fn_read_fmt = {'date': timestamp.strftime('%Y%m%d%H%M%S'),
'sat': self.sat.upper(),
'product': self.product.upper()}
fn_write_fmt = None
sf_read_fmt = None
sf_write_fmt = sf_read_fmt
return fn_read_fmt, sf_read_fmt, fn_write_fmt, sf_write_fmt
def _parse_date(self, filename):
"""
Parse date from filename.
Parameters
----------
filename : str
Filename.
Returns
-------
date : datetime
Parsed date.
"""
return datetime.strptime(os.path.basename(filename)[62:76],
'%Y%m%d%H%M%S')
def _merge_data(self, data):
"""
Merge data.
Parameters
----------
data : list
List of array.
Returns
-------
data : numpy.ndarray
Data.
"""
if type(data) == list:
if type(data[0]) == tuple:
metadata = [element[1] for element in data]
data = np.hstack([element[0] for element in data])
data = (data, metadata)
else:
data = np.hstack(data)
return data
class AscatL2EpsFileList(ChronFiles):
"""
Class reading ASCAT L2 Eps files.
"""
def __init__(self, path, sat, product, filename_template=None):
"""
Initialize.
Parameters
----------
path : str
Path to input data.
sat : str
Metop satellite ('a', 'b', 'c').
product : str
Product type ('szf', 'szr', 'szo').
filename_template : str, optional
Filename template (default:
'ASCA_{product}_02_M0{sat}_{date}Z_*_*_*_*.nat')
"""
sat_lut = {'a': 2, 'b': 1, 'c': 3, '?': '?'}
self.sat = sat_lut[sat]
self.product = product
if filename_template is None:
filename_template = 'ASCA_{product}_02_M0{sat}_{date}Z_*_*_*_*.nat'
super().__init__(path, AscatL2File, filename_template)
def _fmt(self, timestamp):
"""
Definition of filename and subfolder format.
Parameters
----------
timestamp : datetime
Time stamp.
Returns
-------
fn_fmt : dict
Filename format.
sf_fmt : dict
Subfolder format.
"""
fn_read_fmt = {'date': timestamp.strftime('%Y%m%d%H%M%S'),
'sat': self.sat, 'product': self.product.upper()}
fn_write_fmt = None
sf_read_fmt = None
sf_write_fmt = sf_read_fmt
return fn_read_fmt, sf_read_fmt, fn_write_fmt, sf_write_fmt
def _parse_date(self, filename):
"""
Parse date from filename.
Parameters
----------
filename : str
Filename.
Returns
-------
date : datetime
Parsed date.
"""
return datetime.strptime(os.path.basename(filename)[16:30],
'%Y%m%d%H%M%S')
def _merge_data(self, data):
"""
Merge data.
Parameters
----------
data : list
List of array.
Returns
-------
data : numpy.ndarray
Data.
"""
if type(data) == list:
if type(data[0]) == tuple:
metadata = [element[1] for element in data]
data = np.hstack([element[0] for element in data])
data = (data, metadata)
else:
data = np.hstack(data)
return data
|
TUW-GEO/ascat
|
src/ascat/eumetsat/level2.py
|
Python
|
mit
| 12,334
|
[
"NetCDF"
] |
eb4344c8f43ab4b10dacdd7939e9b9484a7fbaf3e628c3c8e9f78263ae41e6dd
|
import ast
import re
from collections import defaultdict, namedtuple
from itertools import count
from ..core.ast_helper import get_call_names_as_string
from .right_hand_side_visitor import RHSVisitor
class CallVisitorResults(
namedtuple(
"CallVisitorResults",
("args", "kwargs", "unknown_args", "unknown_kwargs")
)
):
__slots__ = ()
def all_results(self):
for x in self.args:
yield from x
for x in self.kwargs.values():
yield from x
yield from self.unknown_args
yield from self.unknown_kwargs
class CallVisitor(ast.NodeVisitor):
def __init__(self, trigger_str):
self.unknown_arg_visitor = RHSVisitor()
self.unknown_kwarg_visitor = RHSVisitor()
self.argument_visitors = defaultdict(lambda: RHSVisitor())
self._trigger_str = trigger_str
def visit_Call(self, call_node):
func_name = get_call_names_as_string(call_node.func)
trigger_re = r"(^|\.){}$".format(re.escape(self._trigger_str))
if re.search(trigger_re, func_name):
seen_starred = False
for index, arg in enumerate(call_node.args):
if isinstance(arg, ast.Starred):
seen_starred = True
if seen_starred:
self.unknown_arg_visitor.visit(arg)
else:
self.argument_visitors[index].visit(arg)
for keyword in call_node.keywords:
if keyword.arg is None:
self.unknown_kwarg_visitor.visit(keyword.value)
else:
self.argument_visitors[keyword.arg].visit(keyword.value)
self.generic_visit(call_node)
@classmethod
def get_call_visit_results(cls, trigger_str, node):
visitor = cls(trigger_str)
visitor.visit(node)
arg_results = []
for i in count():
try:
arg_results.append(set(visitor.argument_visitors.pop(i).result))
except KeyError:
break
return CallVisitorResults(
arg_results,
{k: set(v.result) for k, v in visitor.argument_visitors.items()},
set(visitor.unknown_arg_visitor.result),
set(visitor.unknown_kwarg_visitor.result),
)
|
python-security/pyt
|
pyt/helper_visitors/call_visitor.py
|
Python
|
gpl-2.0
| 2,319
|
[
"VisIt"
] |
e653b32165b1d1bd5a7e01c2b84d7dbc2b43b15348b94398c871fb8278359d38
|
#!/usr/bin/env python
import argparse
import logging
import sys
from BCBio import GFF
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqFeature import (
FeatureLocation,
SeqFeature
)
from Bio.SeqRecord import SeqRecord
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
# Patch bcbio gff to work around url encoding issue. This is clearly
# sub-optimal but we should transition to the newer library.
def _new_format_keyvals(self, keyvals):
return ";".join(["%s=%s" % (k, ",".join(v)) for (k, v) in sorted(keyvals.items())])
GFF.GFFOutput.GFF3Writer._format_keyvals = _new_format_keyvals
def parse_xmfa(xmfa):
"""Simple XMFA parser until https://github.com/biopython/biopython/pull/544
"""
current_lcb = []
current_seq = {}
for line in xmfa.readlines():
if line.startswith('#'):
continue
if line.strip() == '=':
if 'id' in current_seq:
current_lcb.append(current_seq)
current_seq = {}
yield current_lcb
current_lcb = []
else:
line = line.strip()
if line.startswith('>'):
if 'id' in current_seq:
current_lcb.append(current_seq)
current_seq = {}
data = line.strip().split()
id, loc = data[1].split(':')
start, end = loc.split('-')
current_seq = {
'rid': '_'.join(data[1:]),
'id': id,
'start': int(start),
'end': int(end),
'strand': 1 if data[2] == '+' else -1,
'seq': ''
}
else:
current_seq['seq'] += line.strip()
def _percent_identity(a, b):
"""Calculate % identity, ignoring gaps in the host sequence
"""
match = 0
mismatch = 0
for char_a, char_b in zip(list(a), list(b)):
if char_a == '-':
continue
if char_a == char_b:
match += 1
else:
mismatch += 1
if match + mismatch == 0:
return 0
return 100 * float(match) / (match + mismatch)
def _id_tn_dict(sequences):
"""Figure out sequence IDs
"""
label_convert = {}
if sequences is not None:
if len(sequences) == 1:
for i, record in enumerate(SeqIO.parse(sequences[0], 'fasta')):
label_convert[str(i + 1)] = record.id
else:
for i, sequence in enumerate(sequences):
for record in SeqIO.parse(sequence, 'fasta'):
label_convert[str(i + 1)] = record.id
continue
return label_convert
def convert_xmfa_to_gff3(xmfa_file, relative_to='1', sequences=None, window_size=1000):
label_convert = _id_tn_dict(sequences)
lcbs = parse_xmfa(xmfa_file)
records = [SeqRecord(Seq("A"), id=label_convert.get(relative_to, relative_to))]
for lcb in lcbs:
ids = [seq['id'] for seq in lcb]
# Doesn't match part of our sequence
if relative_to not in ids:
continue
# Skip sequences that are JUST our "relative_to" genome
if len(ids) == 1:
continue
parent = [seq for seq in lcb if seq['id'] == relative_to][0]
others = [seq for seq in lcb if seq['id'] != relative_to]
for other in others:
other['feature'] = SeqFeature(
FeatureLocation(parent['start'], parent['end'] + 1),
type="match", strand=parent['strand'],
qualifiers={
"source": "progressiveMauve",
"Target": " ".join(map(str, [label_convert.get(other['id'], other['id']), other['start'], other['end'], '+' if other['strand'] > 0 else '-'])),
"ID": label_convert.get(other['id'], 'xmfa_' + other['rid'])
}
)
for i in range(0, len(lcb[0]['seq']), window_size):
block_seq = parent['seq'][i:i + window_size]
real_window_size = len(block_seq)
real_start = abs(parent['start']) - parent['seq'][0:i].count('-') + i
real_end = real_start + real_window_size - block_seq.count('-')
if (real_end - real_start) < 10:
continue
if parent['start'] < 0:
strand = -1
else:
strand = 1
for other in others:
pid = _percent_identity(block_seq, other['seq'][i:i + real_window_size])
# Ignore 0% identity sequences
if pid == 0:
continue
# Support for Biopython 1.68 and above, which removed sub_features
if not hasattr(other['feature'], "sub_features"):
other['feature'].sub_features = []
other['feature'].sub_features.append(
SeqFeature(
FeatureLocation(real_start, real_end),
type="match_part", strand=strand,
qualifiers={
"source": "progressiveMauve",
'score': pid
}
)
)
for other in others:
records[0].features.append(other['feature'])
return records
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert XMFA alignments to gff3', prog='xmfa2gff3')
parser.add_argument('xmfa_file', type=argparse.FileType('r'), help='XMFA File')
parser.add_argument('--window_size', type=int, help='Window size for analysis', default=1000)
parser.add_argument('--relative_to', type=str, help='Index of the parent sequence in the MSA', default='1')
parser.add_argument('--sequences', type=argparse.FileType('r'), nargs='+',
help='Fasta files (in same order) passed to parent for reconstructing proper IDs')
parser.add_argument('--version', action='version', version='%(prog)s 1.0')
args = parser.parse_args()
result = convert_xmfa_to_gff3(**vars(args))
GFF.write(result, sys.stdout)
|
jj-umn/tools-iuc
|
tools/progressivemauve/xmfa2gff3.py
|
Python
|
mit
| 6,214
|
[
"Biopython"
] |
e3791b47ac1c29cce164b127cc2c739a39267ef87b8bbd85430c2237534eccd1
|
"""Excitation lists base classes
"""
from math import sqrt
import numpy as np
import gpaw.mpi as mpi
from gpaw.output import initialize_text_stream
from ase.units import A, m, s, Bohr, _aut, C
class ExcitationList(list):
"""General Excitation List class.
"""
def __init__(self, calculator=None, txt=None):
# initialise empty list
list.__init__(self)
self.calculator = calculator
if not txt and calculator:
txt = calculator.txt
self.txt, firsttime = initialize_text_stream(txt, mpi.rank)
def get_calculator(self):
return self.calculator
def get_energies(self):
"""Get excitation energies in Hartrees"""
el = []
for ex in self:
el.append(ex.get_energy())
return np.array(el)
def get_trk(self):
"""Evaluate the Thomas Reiche Kuhn sum rule"""
trkm = np.zeros((3))
for ex in self:
trkm += ex.get_energy() * ex.get_dipol_me()**2
return 2. * trkm # scale to get the number of electrons XXX spinpol ?
def get_polarizabilities(self, lmax=7):
"""Calculate the Polarisabilities
see Jamorski et al. J. Chem. Phys. 104 (1996) 5134"""
S = np.zeros((lmax+1))
for ex in self:
e = ex.get_energy()
f = ex.get_oscillator_strength()[0]
for l in range(lmax+1):
S[l] += e**(-2 * l) * f
return S
def set_calculator(self, calculator):
self.calculator = calculator
def __str__(self):
string = '# ' + str(type(self))
if len(self) != 0:
string += ', %d excitations:' % len(self)
string += '\n'
for ex in self:
string += '# '+ex.__str__()+"\n"
return string
def get_alpha(self, omega):
"""Return the polarization tensor"""
alpha_cc = np.zeros((3,3))
for ex in self:
alpha_cc += ex.get_alpha(omega)
return alpha_cc
class Excitation:
def get_energy(self):
"""Get the excitations energy relative to the ground state energy
in Hartrees.
"""
return self.energy
def get_dipol_me(self):
"""return the excitations dipole matrix element
including the occupation factor"""
return self.me / sqrt(self.energy)
def get_oscillator_strength(self, form='r'):
"""Return the excitations dipole oscillator strength.
self.me is assumed to be::
form='r': sqrt(f * E) * <I|r|J>,
form='v': sqrt(f / E) * <I|d/(dr)|J>
for f = multiplicity, E = transition energy and initial and
final states::
|I>, |J>
"""
if form == 'r':
# length form
me = self.me
elif form == 'v':
# velocity form
me = self.muv * np.sqrt(self.energy)
else:
raise RuntimeError('Unknown form >' + form + '<')
osz = [0.]
for c in range(3):
val = 2. * me[c]**2
osz.append(val)
osz[0] += val / 3.
return osz
def get_rotatory_strength(self, form='r', units='cgs'):
"""Return rotatory strength"""
if self.magn is None:
raise RuntimeError('Magnetic moment not available.')
if units =='cgs':
# 10^-40 esu cm erg / G
# = 3.33564095 * 10^-15 A^2 m^3 s
# conversion factor after
# T. B. Pedersen and A. E. Hansen,
# Chem. Phys. Lett. 246 (1995) 1
# pre = 471.43
# From TurboMole
pre = 64604.8164
elif uints == 'a.u.':
pre = 1.
else:
raise RuntimeError('Unknown units >' + units + '<')
if form == 'r':
# length form
mu = self.mur
elif form == 'v':
# velocity form
mu = self.muv
else:
raise RuntimeError('Unknown form >' + form + '<')
return pre * np.dot(mu, self.magn)
def set_energy(self, E):
"""Set the excitations energy relative to the ground state energy"""
self.energy = E
def get_alpha(self, omega):
"""Return the polarization tensor"""
me = self.me
alpha_cc = np.zeros((3,3))
for c1 in range(3):
for c2 in range(c1, 3):
alpha_cc[c1, c2] = alpha_cc[c2, c1] = me[c1] * me[c2]
return 2 * self.energy / (self.energy**2 - omega**2) * alpha_cc
|
robwarm/gpaw-symm
|
gpaw/lrtddft/excitation.py
|
Python
|
gpl-3.0
| 4,616
|
[
"ASE",
"GPAW",
"TURBOMOLE"
] |
fc9094a3addfb8c41994538275fe2b377fd2e04f146b31139278694bc2f6842d
|
import sys, os
import vtkCommonCorePython
def vtkLoadPythonTkWidgets(interp):
"""vtkLoadPythonTkWidgets(interp) -- load vtk-tk widget extensions
This is a mess of mixed python and tcl code that searches for the
shared object file that contains the python-vtk-tk widgets. Both
the python path and the tcl path are searched.
"""
X = vtkCommonCorePython.vtkVersion.GetVTKMajorVersion()
Y = vtkCommonCorePython.vtkVersion.GetVTKMinorVersion()
modname = 'vtkRenderingPythonTkWidgets'
name = '%s-%d.%d' % (modname,X,Y)
pkgname = modname.lower().capitalize()
# find out if the module is already loaded
loadedpkgs = interp.call('info', 'loaded')
found = False
try:
# check for result returned as a string
found = (loadedpkgs.find(pkgname) >= 0)
except AttributeError:
# check for result returned as nested tuples
for pkgtuple in loadedpkgs:
found |= (pkgname in pkgtuple)
if found:
return
# create the platform-dependent file name
prefix = ''
if sys.platform == 'cygwin':
prefix = 'cyg'
elif os.name == 'posix':
prefix = 'lib'
extension = interp.call('info', 'sharedlibextension')
filename = prefix+name+extension
# create an extensive list of paths to search
pathlist = sys.path
# add tcl paths, ensure that {} is handled properly
try:
auto_paths = interp.getvar('auto_path').split()
except AttributeError:
auto_paths = interp.getvar('auto_path')
for path in auto_paths:
prev = str(pathlist[-1])
try:
# try block needed when one uses Gordon McMillan's Python
# Installer.
if len(prev) > 0 and prev[0] == '{' and prev[-1] != '}':
pathlist[-1] = prev+' '+path
else:
pathlist.append(path)
except AttributeError:
pass
# a common place for these sorts of things
if os.name == 'posix':
pathlist.append('/usr/local/lib')
# if python 3, there is no separate "unicode" type
if sys.hexversion >= 0x03000000:
unicode = str
# attempt to load
for path in pathlist:
try:
# If the path object is not str, it means that it is a
# Tkinter path object.
if (not isinstance(path, str) and not isinstance(path, unicode)):
path = path.string
# try block needed when one uses Gordon McMillan's Python
# Installer.
if len(path) > 0 and path[0] == '{' and path[-1] == '}':
path = path[1:-1]
fullpath = os.path.join(path, filename)
except AttributeError:
pass
if ' ' in fullpath:
fullpath = '{'+fullpath+'}'
if interp.eval('catch {load '+fullpath+' '+pkgname+'}') == '0':
return
# re-generate the error
interp.call('load', filename, pkgname)
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/Wrapping/Python/vtk/tk/vtkLoadPythonTkWidgets.py
|
Python
|
bsd-3-clause
| 3,047
|
[
"VTK"
] |
c3ca7eeeb0bbd7326df9accfd4143b3d94012c2b6385f80b1dfc661b7dddaf40
|
#!/usr/bin/env python
# generate peak set complete files, binding region files, and report files!
import sys
import time
import optparse
import general
import numpy
import hyper
import pickle
import pdb
import metrn
import modencode
import os
from scipy import stats
print "Command:", " ".join(sys.argv)
print "Timestamp:", time.asctime(time.localtime())
def main():
parser = optparse.OptionParser()
parser.add_option("--path", action = "store", type = "string", dest = "path", help = "Path from script to files")
parser.add_option("--peaks", action = "store", type = "string", dest = "peaks", help = "Peaks to be used for analysis")
parser.add_option("--mode", action = "store", type = "string", dest = "mode", help = "Operations to be performed")
parser.add_option("--organism", action = "store", type = "string", dest = "organism", help = "Target organism for operations...", default="OFF")
parser.add_option("--species", action = "store", type = "string", dest = "species", help = "Species to compare", default="OFF")
parser.add_option("--orthology", action = "store", type = "string", dest = "orthology", help = "Use 'direct', 'family' (Yong's), or 'group' (Pouya's) orthologs?", default="direct")
parser.add_option("--nametag", action = "store", type = "string", dest = "nametag", help = "Orthology nametag: nametagHsCe", default="ortho")
parser.add_option("--commonNames", action = "store", type = "string", dest = "commonNames", help = "Grab common names file?", default="ON")
parser.add_option("--familyFiles", action = "store", type = "string", dest = "familyFiles", help = "Grab cleaned files?", default="formatted")
parser.add_option("--target", action = "store", type = "string", dest = "target", help = "Target identification", default="OFF")
parser.add_option("--label", action = "store", type = "string", dest = "label", help = "How should labels be generated?", default="rebuild")
parser.add_option("--indexes", action = "store", type = "string", dest = "indexes", help = "Indexes for matrix construction...", default="OFF")
parser.add_option("--values", action = "store", type = "string", dest = "values", help = "Values for matrix construction...", default="OFF")
parser.add_option("--contexts", action = "store", type = "string", dest = "contexts", help = "What contexts of development should I track?", default="OFF")
parser.add_option("--source", action = "store", type = "string", dest = "source", help = "Path to source files...", default="OFF")
parser.add_option("--server", action = "store", type = "string", dest = "server", help = "Are we on the server?", default="OFF")
parser.add_option("--name", action = "store", type = "string", dest = "name", help = "Output name?", default="OFF")
parser.add_option("--A", action = "store", type = "string", dest = "a", help = "Paths to files of interest", default="OFF")
parser.add_option("--B", action = "store", type = "string", dest = "b", help = "Files to be hybridized", default="OFF")
parser.add_option("--cutoff", action = "store", type = "float", dest = "cutoff", help = "P-value cutoff for fraction calculation", default=0.05)
parser.add_option("--fraction", action = "store", type = "int", dest = "fraction", help = "Tolerated fractional representation of GO terms", default=10)
parser.add_option("--rename", action = "store", type = "string", dest = "rename", help = "Targets to rename. Comma-separated list of 'target:replacement' pairs to search and replace.", default="OFF")
(option, args) = parser.parse_args()
# import paths:
if option.server == "OFF":
path_dict = modencode.configBuild(option.path + "/input/" + "configure_path.txt")
elif option.server == "ON":
path_dict = modencode.configBuild(option.path + "/input/" + "configure_server.txt")
# specify input and output paths:
inpath = path_dict["input"]
extraspath = path_dict["extras"]
pythonpath = path_dict["python"]
scriptspath = path_dict["scripts"]
downloadpath = path_dict["download"]
fastqpath = path_dict["fastq"]
bowtiepath = path_dict["bowtie"]
bwapath = path_dict["bwa"]
macspath = path_dict["macs"]
memepath = path_dict["meme"]
idrpath = path_dict["idr"]
igvpath = path_dict["igv"]
testpath = path_dict["test"]
processingpath = path_dict["processing"]
annotationspath = path_dict["annotations"]
orthologspath = path_dict["orthologs"]
coassociationspath = path_dict["coassociations"]
cellspath = path_dict["cells"]
peakspath = path_dict["peaks"]
gopath = path_dict["go"]
hotpath = path_dict["hot"]
qsubpath = path_dict["qsub"]
# standardize paths for analysis:
alignerpath = bwapath
indexpath = alignerpath + "index/"
alignmentpath = alignerpath + "alignment/"
qcfilterpath = alignerpath + "qcfilter/"
qcmergepath = alignerpath + "qcmerge/"
# import configuration dictionaries:
source_dict = modencode.configBuild(inpath + "configure_source.txt")
method_dict = modencode.configBuild(inpath + "configure_method.txt")
context_dict = modencode.configBuild(inpath + "configure_context.txt")
# define organisms:
organismTags = ["hs","mm","ce","dm"]
# define organism parameters:
if option.organism == "h.sapiens" or option.organism == "human" or option.organism == "hs":
organismTag = "hs"
contextTag = "cells"
idColumns = ["name", "code", "hgcn","ensembl"]
idComplexList = list()
elif option.organism == "m.musculus" or option.organism == "mouse" or option.organism == "mm":
organismTag = "mm"
contextTag = "cells"
idColumns = ["name", "code", "hgcn","ensembl"]
idComplexList = list()
elif option.organism == "c.elegans" or option.organism == "worm" or option.organism == "ce":
organismTag = "ce"
contextTag = "stage"
idColumns = ["name", "code", "wormbase","ensembl"]
idComplexList = list()
elif option.organism == "d.melanogaster" or option.organism == "fly" or option.organism == "dm":
organismTag = "dm"
contextTag = "stage"
idColumns = ["name", "code", "flybase","ensembl"]
idComplexList = ["dataset",":","url"]
# update analysis path:
#if option.analysis == "families":
# analysispath = orthologspath
# outheader = ["family.id", "species.a", "species.b", "gene.a", "gene.b"]
# matchTag = "family.txt"
#elif option.analysis == "orthologs":
# analysispath = orthologspath + "orthologs/"
# outheader = ["family.id", "species.a", "species.b", "gene.a", "gene.b", "count.a", "count.b"]
# matchTag = "orthologs.txt"
#elif option.analysis == "paralogs":
# analysispath = orthologspath + "paralogous/"
# outheader = ["family.id", "species", "gene.a", "gene.b"]
# matchTag = "paralog.txt"
# define P-value cutoff handle:
pvaluecutoff_handle = "%.0e" % (float(option.cutoff))
# merge (datatypes) matrix mode:
if option.mode == "merge.direct":
# find species-comparison orthologs:
speciesTags = option.species.split(",")
aspecies, bspecies = speciesTags
# make comparison output folders:
comparisonpath = path_dict[option.source] + "comparison/" + aspecies + "/" + bspecies + "/"
general.pathGenerator(comparisonpath)
# define input files:
ainfile = str(path_dict[option.source] + "/" + option.a).replace("//","/")
binfile = str(path_dict[option.source] + "/" + option.b).replace("//","/")
# find target matrix indexes (keys):
aindexes, bindexes = option.indexes.split(",")
ai, aj = aindexes.split(":")
bi, bj = aindexes.split(":")
# find target matrix values:
ax, bx = option.values.split(",")
# load input matrixes:
amatrix = general.matrixBuilder(i=ai, j=aj, x=ax, infile=ainfile, datatype="float")
bmatrix = general.matrixBuilder(i=bi, j=bj, x=bx, infile=binfile, datatype="float")
#print amatrix.keys()
#print bmatrix.keys()
#print set(amatrix.keys()).intersection(set(bmatrix.keys()))
#pdb.set_trace()
# define output file:
f_outfile = comparisonpath + "maphybrid_" + option.source + "_" + option.name + "_combined.txt"
f_output = open(f_outfile, "w")
print >>f_output, "\t".join(["i", "j", "a.value", "b.value", "difference", "log2.ratio"])
# merge matrixes:
for ai in amatrix:
for aj in amatrix:
if ai in bmatrix and aj in bmatrix:
output = [ai, aj, amatrix[ai][aj], bmatrix[ai][aj], amatrix[ai][aj]-bmatrix[ai][aj], numpy.log2(amatrix[ai][aj]/bmatrix[ai][aj])]
print >>f_output, "\t".join(map(str, output))
f_output.close()
# merge ortholog matrix mode:
if option.mode == "merge.matrix":
# import orthologs dictionary:
#orthologs_dict = buildOrthologs(inpath + "configure_orthologs_" + option.analysis + ".txt")
# find species-comparison orthologs:
speciesTags = option.species.split(",")
aspecies, bspecies = speciesTags
# generate output peaks name:
orthologTag = option.nametag + metrn.orthologLabel(aspecies, speciesTags)
# define orthology path:
if option.orthology == "direct":
orthologypath = orthologspath + "orthologs/"
elif option.orthology == "family":
orthologypath = orthologspath + "families/"
elif option.orthology == "groups":
orthologypath = orthologspath + "groups/"
# generate orthology dictionary:
ortholog_dict = metrn.orthologBuilder(speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
# target specie orthologs:
aorthologs = metrn.orthologFinder(aspecies, speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
borthologs = metrn.orthologFinder(bspecies, speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
# define input files:
ainfile = str(path_dict[option.source] + "/" + option.a).replace("//","/")
binfile = str(path_dict[option.source] + "/" + option.b).replace("//","/")
# find target matrix indexes (keys):
aindexes, bindexes = option.indexes.split(",")
ai, aj = aindexes.split(":")
bi, bj = aindexes.split(":")
# find target matrix values:
ax, bx = option.values.split(",")
# load organism matrixes:
amatrix = general.matrixBuilder(i=ai, j=aj, x=ax, infile=ainfile, datatype="float")
bmatrix = general.matrixBuilder(i=bi, j=bj, x=bx, infile=binfile, datatype="float")
# load organism matrixes:
aicontext = general.matrixBuilder(i=ai, j=aj, x="i.context", infile=ainfile)
ajcontext = general.matrixBuilder(i=ai, j=aj, x="j.context", infile=ainfile)
bicontext = general.matrixBuilder(i=bi, j=bj, x="i.context", infile=binfile)
bjcontext = general.matrixBuilder(i=bi, j=bj, x="j.context", infile=binfile)
# build expanded matrixes:
aexpand, bexpand = dict(), dict()
# generate comparison matrix:
ak, acombined, acomplete = 0, dict(), dict()
for aifactor in ortholog_dict[aspecies]:
for ajfactor in ortholog_dict[aspecies]:
for ai in amatrix:
for aj in amatrix[ai]:
if aifactor in ai and ajfactor in aj:
#print ai, aj, aifactor, ajfactor, aicontext[ai][aj], ajcontext[ai][aj]
ak += 1
if not aifactor in acombined:
acombined[aifactor] = dict()
if not ajfactor in acombined[aifactor]:
acombined[aifactor][ajfactor] = list()
acombined[aifactor][ajfactor].append(amatrix[ai][aj])
if not aifactor in acomplete:
acomplete[aifactor] = dict()
if not ai in acomplete[aifactor]:
acomplete[aifactor][ai] = dict()
if not ajfactor in acomplete[aifactor][ai]:
acomplete[aifactor][ai][ajfactor] = dict()
acomplete[aifactor][ai][ajfactor][aj] = amatrix[ai][aj]
# generate comparison matrix:
bk, bcombined, bcomplete = 0, dict(), dict()
for aifactor in ortholog_dict[aspecies]:
for ajfactor in ortholog_dict[aspecies]:
for bifactor in ortholog_dict[aspecies][aifactor][bspecies]:
for bjfactor in ortholog_dict[aspecies][ajfactor][bspecies]:
for bi in bmatrix:
for bj in bmatrix[bi]:
if bifactor in bi and bjfactor in bj:
#print bi, bj, bifactor, bjfactor, bicontext[bi][bj], bjcontext[bi][bj]
bk += 1
if not aifactor in bcombined:
bcombined[aifactor] = dict()
if not ajfactor in bcombined[aifactor]:
bcombined[aifactor][ajfactor] = list()
bcombined[aifactor][ajfactor].append(bmatrix[bi][bj])
if not aifactor in bcomplete:
bcomplete[aifactor] = dict()
if not bi in bcomplete[aifactor]:
bcomplete[aifactor][bi] = dict()
if not ajfactor in bcomplete[aifactor][bi]:
bcomplete[aifactor][bi][ajfactor] = dict()
bcomplete[aifactor][bi][ajfactor][bj] = bmatrix[bi][bj]
# make comparison output folders:
comparisonpath = path_dict[option.source] + "comparison/" + aspecies + "/" + bspecies + "/"
general.pathGenerator(comparisonpath)
# make compined output file:
x = 0
processed = list()
f_outfile = comparisonpath + "maphybrid_" + option.source + "_" + option.name + "_combined.txt"
f_output = open(f_outfile, "w")
print >>f_output, "\t".join(["i", "j", "a.species", "b.species", "label", "a.mean", "b.mean", "a.max", "b.max", "a.std", "b.std"])
for aifactor in acombined:
for ajfactor in acombined:
if aifactor in bcombined and ajfactor in bcombined:
label = ":".join(sorted([aifactor, ajfactor]))
print >>f_output, "\t".join(map(str, [aifactor, ajfactor, aspecies, bspecies, label, numpy.mean(acombined[aifactor][ajfactor]), numpy.mean(bcombined[aifactor][ajfactor]), max(acombined[aifactor][ajfactor]), max(bcombined[aifactor][ajfactor]), numpy.std(acombined[aifactor][ajfactor]), numpy.std(bcombined[aifactor][ajfactor])]))
x += 1
f_output.close()
# make complete output file:
y = 0
f_outfile = comparisonpath + "maphybrid_" + option.source + "_" + option.name + "_complete.txt"
f_output = open(f_outfile, "w")
print >>f_output, "\t".join(["i.ortholog", "j.ortholog", "a.species", "b.species", "a.i", "a.j", "b.i", "b.j", "a.value", "b.value", "a.comparison", "b.comparison", "i", "j"])
for aifactor in acomplete:
for ai in acomplete[aifactor]:
for ajfactor in acomplete[aifactor][ai]:
for aj in acomplete[aifactor][ai][ajfactor]:
process = False
if aifactor in bcomplete:
for bi in bcomplete[aifactor]:
if ajfactor in bcomplete[aifactor][bi]:
for bj in bcomplete[aifactor][bi][ajfactor]:
process = True
if process:
print >>f_output, "\t".join(map(str, [aifactor, ajfactor, aspecies, bspecies, ai, aj, bi, bj, acomplete[aifactor][ai][ajfactor][aj], bcomplete[aifactor][bi][ajfactor][bj], ":".join([ai, aj]), ":".join([bi, bj]), ":".join([aifactor, ai, aj]), ":".join([ajfactor, bi, bj])]))
y += 1
f_output.close()
print ak, bk
print x, y
print
# load GO lines from input files:
#orthologs = list()
#asublines, bsublines = list(), list()
#ahd = general.build_header_dict(option.a)
#bhd = general.build_header_dict(option.b)
#adict = loader(option.a, ahd)
#bdict = loader(option.b, bhd)
# generate a-file and b-file headers, as well as output header:
#aheader, bheader = list(), list()
#for header in general.valuesort(ahd):
# aheader.append(header + ".a")
#for header in general.valuesort(bhd):
# bheader.append(header + ".b")
#outheader = ["i", "j", "items.a", "items.b", "overlap.a", "overlap.b", "overlap.avg", "overlap.sum", "overlap.max", "overlap.count", "items.count"] #"a.only.goids", "b.only.goids", "overlap.goids"]
#print >>f_output, "\t".join(outheader)
"""
# prefilter goids:
print
print "Finding shared GO ids..."
gxids, axids, bxids = list(), list(), list()
ghits, ahits, bhits = list(), list(), list()
for afactor in adict:
for aline in adict[afactor]:
aitems = aline.strip().split("\t")
dataset, strain, factor, stage, institute, method = aitems[ahd["dataset"]], aitems[ahd["strain"]], aitems[ahd["factor"]], aitems[ahd["stage"]], aitems[ahd["institute"]], aitems[ahd["method"]]
goid, goterm, gocount, pvalue = aitems[ahd["go.id"]], aitems[ahd["go.term"]], aitems[ahd["go.count"]], aitems[ahd["adj.pvalue"]]
if float(pvalue) < option.cutoff: # and int(gocount) > 50 and int(gocount) < 500 :
ahits.append(goid)
ghits.append(goid)
axids.append(goid)
gxids.append(goid)
for bfactor in bdict:
for bline in bdict[bfactor]:
bitems = bline.strip().split("\t")
dataset, strain, factor, stage, institute, method = bitems[bhd["dataset"]], bitems[bhd["strain"]], bitems[bhd["factor"]], bitems[bhd["stage"]], bitems[bhd["institute"]], bitems[bhd["method"]]
goid, goterm, gocount, pvalue = bitems[bhd["go.id"]], bitems[bhd["go.term"]], bitems[bhd["go.count"]], bitems[bhd["adj.pvalue"]]
if float(pvalue) < option.cutoff: # and int(gocount) > 50 and int(gocount) < 500 :
bhits.append(goid)
ghits.append(goid)
bxids.append(goid)
gxids.append(goid)
print
"""
# merge ortholog values mode:
if option.mode == "merge.overlap":
# import orthologs dictionary:
#orthologs_dict = buildOrthologs(inpath + "configure_orthologs_" + option.analysis + ".txt")
# find species-comparison orthologs:
speciesTags = option.species.split(",")
aspecies, bspecies = speciesTags
# generate output peaks name:
orthologTag = option.nametag + metrn.orthologLabel(aspecies, speciesTags)
# define orthology path:
if option.orthology == "direct":
orthologypath = orthologspath + "orthologs/"
elif option.orthology == "family":
orthologypath = orthologspath + "families/"
elif option.orthology == "groups":
orthologypath = orthologspath + "groups/"
# generate orthology dictionary:
ortholog_dict = metrn.orthologBuilder(speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
print
print "Evaluating orthologs:"
for targetFactor in ortholog_dict[option.organism]:
for specieTag in ortholog_dict[option.organism][targetFactor]:
print option.organism, targetFactor, specieTag, ":", ",".join(ortholog_dict[option.organism][targetFactor][specieTag])
print
#pdb.set_trace()
# target specie orthologs:
aorthologs = metrn.orthologFinder(aspecies, speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
borthologs = metrn.orthologFinder(bspecies, speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
# define input files:
ainfile = str(path_dict[option.source] + "/" + option.a).replace("//","/")
binfile = str(path_dict[option.source] + "/" + option.b).replace("//","/")
# find target matrix indexes:
#aindexes, bindexes = option.indexes.split(",")
#ai, aj = aindexes.split(":")
#bi, bj = aindexes.split(":")
# find target matrix values:
ax, bx = option.values.split(",")
# load header dictionaries:
aHeader = general.build_header_dict(ainfile)
bHeader = general.build_header_dict(binfile)
# capture universe of values:
universe = list()
# load input dictionaries:
adict, acomplex = dict(), dict()
for inline in open(ainfile).readlines()[1:]:
initems = inline.strip().split("\t")
invalue = initems[aHeader[ax]]
inlabel = metrn.labelExtractor(initems, target="dataset", mode=option.label, headerDict=aHeader)
if int(initems[aHeader["genome.count"]]) < int(initems[aHeader["genome.total"]])/option.fraction:
universe.append(invalue)
if not inlabel in adict:
adict[inlabel] = list()
adict[inlabel].append(invalue)
if option.source == "go":
if not inlabel in acomplex:
acomplex[inlabel] = dict()
acomplex[inlabel][invalue] = [initems[aHeader["dataset.count"]], initems[aHeader["genome.count"]], initems[aHeader["adjusted.pvalue"]]]
bdict, bcomplex = dict(), dict()
for inline in open(binfile).readlines()[1:]:
initems = inline.strip().split("\t")
invalue = initems[bHeader[bx]]
inlabel = metrn.labelExtractor(initems, target="dataset", mode=option.label, headerDict=bHeader)
if int(initems[bHeader["genome.count"]]) < int(initems[bHeader["genome.total"]])/option.fraction:
universe.append(invalue)
if not inlabel in bdict:
bdict[inlabel] = list()
bdict[inlabel].append(invalue)
if option.source == "go":
if not inlabel in bcomplex:
bcomplex[inlabel] = dict()
bcomplex[inlabel][invalue] = [initems[bHeader["dataset.count"]], initems[bHeader["genome.count"]], initems[bHeader["adjusted.pvalue"]]]
# reduce universe of values to set:
universe = set(universe)
# make output folders:
comparisonpath = path_dict[option.source] + "comparison/" + aspecies + "/" + bspecies + "/"
general.pathGenerator(comparisonpath)
# setup output file:
f_outfile = comparisonpath + "maphybrid_" + option.source + "_" + option.name + "_combined.txt"
f_output = open(f_outfile, "w")
print >>f_output, "\t".join(["i", "j", "match", "i.values", "j.values", "overlap", "total", "i.fraction", "j.fraction", "overlap.avg", "overlap.sum", "overlap.max", "pvalue", "adjusted.pvalue", "overlap.values"])
# count number of tests:
adjust = 0
for alabel in adict:
for blabel in bdict:
adjust += 1
# generate matrix:
matrix = dict()
for alabel in adict:
for blabel in bdict:
# extract label info:
aorganism, astrain, afactor, acontext, ainstitute, amethod = alabel.split("_")[:6]
borganism, bstrain, bfactor, bcontext, binstitute, bmethod = blabel.split("_")[:6]
# determine orthology:
if bfactor in ortholog_dict[aorganism][afactor][borganism]:
match = "+"
else:
match = ""
# regenerate labels:
i = metrn.labelGenerator(target=option.target, mode="label", dataset=alabel)
j = metrn.labelGenerator(target=option.target, mode="label", dataset=blabel)
if not alabel in matrix:
matrix[alabel] = dict()
if not blabel in matrix[alabel]:
matrix[alabel][blabel] = dict()
avalues = set(adict[alabel]).intersection(universe)
bvalues = set(bdict[blabel]).intersection(universe)
aonly = set(avalues).difference(set(bvalues))
bonly = set(bvalues).difference(set(avalues))
overlap = set(avalues).intersection(set(bvalues))
total = set(avalues).union(set(bvalues))
#if afactor in ["MXI1", "MDL-1"] and bfactor in ["MXI1", "MDL-1"]:
# print len(avalues)
# print len(bvalues)
# print len(overlap)
# print overlap
# pdb.set_trace()
if len(overlap) == 0:
aoverlap, boverlap, overlap_avg, overlap_max, overlap_sum = 0, 0, 0, 0, 0
pvalue, adjPvalue = 1, 1
else:
aoverlap = float(len(overlap))/len(avalues)
boverlap = float(len(overlap))/len(bvalues)
overlap_avg = numpy.mean([aoverlap, boverlap])
overlap_max = max([aoverlap, boverlap])
overlap_sum = float(len(overlap))/len(total)
# Hypergeometric paramters:
m = len(avalues) # number of white balls in urn
n = len(universe) - len(avalues) # number of black balls in urn
N = len(bvalues) # number of balls drawn from urn
x = len(overlap) # number of white balls in drawn
# If I pull out all balls with elephant tatoos (N), is the draw enriched in white balls?:
pvalue = hyper.fishers(x, m+n, m, N, method="right")
adjPvalue = hyper.limit(pvalue*adjust)
i = i.replace("-S3", "S3").replace("-hESC", "hesc")
j = j.replace("-S3", "S3").replace("-hESC", "hesc")
output = [i, j, match, len(avalues), len(bvalues), len(overlap), len(universe), aoverlap, boverlap, overlap_avg, overlap_sum, overlap_max, pvalue, adjPvalue, ",".join(sorted(list(overlap)))]
matrix[alabel][blabel] = output
print >>f_output, "\t".join(map(str, output))
# close output file:
f_output.close()
# merge ortholog binding frequencies mode:
if option.mode == "merge.binding":
# find species-comparison orthologs:
speciesTags = option.species.split(",")
aspecies, bspecies = speciesTags
# generate output peaks name:
orthologTag = option.nametag + metrn.orthologLabel(aspecies, speciesTags)
# define orthology path:
if option.orthology == "direct":
orthologypath = orthologspath + "orthologs/"
elif option.orthology == "family":
orthologypath = orthologspath + "families/"
elif option.orthology == "groups":
orthologypath = orthologspath + "groups/"
# generate orthology dictionary:
ortholog_dict = metrn.orthologBuilder(speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
#for targetFactor in ortholog_dict[option.organism]:
# for specieTag in ortholog_dict[option.organism][targetFactor]:
# print option.organism, targetFactor, specieTag, ":", ",".join(ortholog_dict[option.organism][targetFactor][specieTag])
# target specie orthologs:
aorthologs = metrn.orthologFinder(aspecies, speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
borthologs = metrn.orthologFinder(bspecies, speciesTags, path=orthologypath, orthology=option.orthology, commonNames=option.commonNames, familyFiles=option.familyFiles, verbose="OFF")
# define input files:
ainfile = str(path_dict[option.source] + "/" + option.a).replace("//","/")
binfile = str(path_dict[option.source] + "/" + option.b).replace("//","/")
# load header dictionaries:
aHeader = general.build_header_dict(ainfile)
bHeader = general.build_header_dict(binfile)
# load binding data:
print
print "Loading binding data..."
aDict = general.build2(ainfile, id_column="dataset")
bDict = general.build2(binfile, id_column="dataset")
# make comparison path
comparisonpath = path_dict[option.source] + "comparison/" + aspecies + "/" + bspecies + "/" + option.name + "/"
general.pathGenerator(comparisonpath)
# explicit lables for the chromatin states or promoter regions:
if option.indexes == "iHMM":
inlabels = ["1_Pro", "2_Enh1", "3_Enh2", "4_Egn1", "5_Egn2", "6_Egn3", "7_Egn4", "8_Egn5", "9_Egn6", "10_Rep1", "11_Rep2", "12_Het1", "13_Het2", "14_Low1", "15_Low2", "16_Low3"]
elif option.indexes == "125kb":
inlabels = ["0:1000", "1001:2000", "2001:3000", "3001:4000", "4001:5000", "others"]
elif option.indexes == "125EN":
inlabels = ["0:500", "501:1000", "1001:2000", "2001:10000", "others", "enhancer"]
# export fractions:
print "Exporting binding ratios..."
index = 1
f_output = open(comparisonpath + "maphybrid_binding_" + option.a.split("/")[0] + "_vs_" + option.b.split("/")[0] + "_summary.txt", "w")
print >>f_output, "\t".join(["i", "j", "index", "label", "type", "color", "i.value", "j.value", "i.fraction", "j.fraction"])
for afactor in sorted(list(set(aDict.keys()).intersection(set(aorthologs)))):
for bfactor in sorted(list(set(ortholog_dict[aspecies][afactor][bspecies]).intersection(set(bDict)))):
label = ":".join([afactor, bfactor])
avalues, bvalues, xratios = list(), list(), list()
color = 1
for inlabel in inlabels:
#print aspecies, bspecies, afactor, bfactor, aDict[afactor][inlabel], bDict[bfactor][inlabel]
avalue = float(aDict[afactor][inlabel])
bvalue = float(bDict[bfactor][inlabel])
if (avalue + bvalue) > 0:
aratio = float(avalue)/(avalue + bvalue)
bratio = float(bvalue)/(avalue + bvalue)
else:
aratio, bratio = 0, 0
avalues.append(avalue)
bvalues.append(bvalue)
xratios.append(aratio)
output = [afactor, bfactor, index, label, inlabel, color, avalue, bvalue, aratio, bratio]
print >>f_output, "\t".join(map(str, output))
color += 1
#print afactor, bfactor
#print avalues
#print bvalues
#print xratios
#print
index += 1
f_output.close()
print
if __name__ == "__main__":
main()
print "Completed:", time.asctime(time.localtime())
#python mapHybrid.py --path ~/meTRN --mode merge.matrix --organism hs --species hs,ce --orthology family --source coassociations --A hs_orthoHsCe_com_cx_xot/promoter_regions/summary/mapcas_report_promoter_regions_p5e-02_matrix.txt --B ce_orthoHsCe_com_cx_xot/promoter_regions/summary/mapcas_report_promoter_regions_p5e-02_matrix.txt --indexes i:j,i:j --values mirror.passing,mirror.passing --name orthoHsCe_com_cx_xot
#python mapHybrid.py --path ~/meTRN --mode merge.overlap --organism hs --species hs,ce --orthology family --source go --A hs_orthoHsCe_com_cx_xot/p5e-1/summary/mapgo_complete_hs_orthoHsCe_com_cx_xot_p5_hc1_hp5e-02_summary --B ce_orthoHsCe_com_cx_xot/p5e-1/summary/mapgo_complete_ce_orthoHsCe_com_cx_xot_p5_hc1_hp5e-02_summary --values id,id --label rebuild --target 'factor(context)' --name orthoHsCe_com_cx_xot
|
claraya/meTRN
|
python/mapHybrid.py
|
Python
|
mit
| 29,002
|
[
"BWA",
"Bowtie"
] |
cbc81f32465f5d40f63a2723d727e5706246a5790dc3e81291129e564619c849
|
# ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
__author__ = 'Brian Wickman'
from sys import version_info as sys_version_info
from numbers import Integral, Real
try:
# CPython 2.x
from cStringIO import StringIO
except ImportError:
try:
# Python 2.x
from StringIO import StringIO
except:
# Python 3.x
from io import StringIO
from io import BytesIO
class SingletonMetaclass(type):
"""
Singleton metaclass.
"""
def __init__(cls, name, bases, attrs):
super(SingletonMetaclass, cls).__init__(name, bases, attrs)
cls.instance = None
def __call__(cls, *args, **kw):
if cls.instance is None:
cls.instance = super(SingletonMetaclass, cls).__call__(*args, **kw)
return cls.instance
Singleton = SingletonMetaclass('Singleton', (object,), {})
try:
from functools import total_ordering
except ImportError:
# Taken from Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/functools.py
def total_ordering(cls):
"""Class decorator that fills in missing ordering methods"""
convert = {
'__lt__': [('__gt__', lambda self, other: not (self < other or self == other)),
('__le__', lambda self, other: self < other or self == other),
('__ge__', lambda self, other: not self < other)],
'__le__': [('__ge__', lambda self, other: not self <= other or self == other),
('__lt__', lambda self, other: self <= other and not self == other),
('__gt__', lambda self, other: not self <= other)],
'__gt__': [('__lt__', lambda self, other: not (self > other or self == other)),
('__ge__', lambda self, other: self > other or self == other),
('__le__', lambda self, other: not self > other)],
'__ge__': [('__le__', lambda self, other: (not self >= other) or self == other),
('__gt__', lambda self, other: self >= other and not self == other),
('__lt__', lambda self, other: not self >= other)]
}
roots = set(dir(cls)) & set(convert)
if not roots:
raise ValueError('must define at least one ordering operation: < > <= >=')
root = max(roots) # prefer __lt__ to __le__ to __gt__ to __ge__
for opname, opfunc in convert[root]:
if opname not in roots:
opfunc.__name__ = opname
opfunc.__doc__ = getattr(int, opname).__doc__
setattr(cls, opname, opfunc)
return cls
class Compatibility(object):
"""2.x + 3.x compatibility"""
PY2 = sys_version_info[0] == 2
PY3 = sys_version_info[0] == 3
StringIO = StringIO
BytesIO = BytesIO if PY3 else StringIO
integer = (Integral,)
real = (Real,)
numeric = integer + real
string = (str,) if PY3 else (str, unicode)
bytes = (bytes,)
if PY2:
@staticmethod
def to_bytes(st):
return str(st)
else:
@staticmethod
def to_bytes(st):
return bytes(st, encoding='utf8')
if PY3:
@staticmethod
def exec_function(ast, globals_map):
locals_map = globals_map
exec(ast, globals_map, locals_map)
return locals_map
else:
eval(compile(
"""
@staticmethod
def exec_function(ast, globals_map):
locals_map = globals_map
exec ast in globals_map, locals_map
return locals_map
""", "<exec_function>", "exec"))
__all__ = [
'Singleton',
'Compatibility',
]
|
imsut/commons
|
src/python/twitter/common/lang/__init__.py
|
Python
|
apache-2.0
| 4,283
|
[
"Brian"
] |
030dcf2f342c87784c74a0d5e6722fb63a85f1d4197e45b49ad4aac9b961f73a
|
# License: BSD 3 clause
import pickle
import itertools
import numpy as np
import pytest
from sklearn.metrics import DistanceMetric
from sklearn.neighbors._ball_tree import (
BallTree,
kernel_norm,
DTYPE,
ITYPE,
NeighborsHeap as NeighborsHeapBT,
simultaneous_sort as simultaneous_sort_bt,
nodeheap_sort as nodeheap_sort_bt,
)
from sklearn.neighbors._kd_tree import (
KDTree,
NeighborsHeap as NeighborsHeapKDT,
simultaneous_sort as simultaneous_sort_kdt,
nodeheap_sort as nodeheap_sort_kdt,
)
from sklearn.utils import check_random_state
from numpy.testing import assert_array_almost_equal, assert_allclose
rng = np.random.RandomState(42)
V_mahalanobis = rng.rand(3, 3)
V_mahalanobis = np.dot(V_mahalanobis, V_mahalanobis.T)
DIMENSION = 3
METRICS = {
"euclidean": {},
"manhattan": {},
"minkowski": dict(p=3),
"chebyshev": {},
"seuclidean": dict(V=rng.random_sample(DIMENSION)),
"wminkowski": dict(p=3, w=rng.random_sample(DIMENSION)),
"mahalanobis": dict(V=V_mahalanobis),
}
KD_TREE_METRICS = ["euclidean", "manhattan", "chebyshev", "minkowski"]
BALL_TREE_METRICS = list(METRICS)
def dist_func(x1, x2, p):
return np.sum((x1 - x2) ** p) ** (1.0 / p)
def compute_kernel_slow(Y, X, kernel, h):
d = np.sqrt(((Y[:, None, :] - X) ** 2).sum(-1))
norm = kernel_norm(h, X.shape[1], kernel)
if kernel == "gaussian":
return norm * np.exp(-0.5 * (d * d) / (h * h)).sum(-1)
elif kernel == "tophat":
return norm * (d < h).sum(-1)
elif kernel == "epanechnikov":
return norm * ((1.0 - (d * d) / (h * h)) * (d < h)).sum(-1)
elif kernel == "exponential":
return norm * (np.exp(-d / h)).sum(-1)
elif kernel == "linear":
return norm * ((1 - d / h) * (d < h)).sum(-1)
elif kernel == "cosine":
return norm * (np.cos(0.5 * np.pi * d / h) * (d < h)).sum(-1)
else:
raise ValueError("kernel not recognized")
def brute_force_neighbors(X, Y, k, metric, **kwargs):
D = DistanceMetric.get_metric(metric, **kwargs).pairwise(Y, X)
ind = np.argsort(D, axis=1)[:, :k]
dist = D[np.arange(Y.shape[0])[:, None], ind]
return dist, ind
@pytest.mark.parametrize("Cls", [KDTree, BallTree])
@pytest.mark.parametrize(
"kernel", ["gaussian", "tophat", "epanechnikov", "exponential", "linear", "cosine"]
)
@pytest.mark.parametrize("h", [0.01, 0.1, 1])
@pytest.mark.parametrize("rtol", [0, 1e-5])
@pytest.mark.parametrize("atol", [1e-6, 1e-2])
@pytest.mark.parametrize("breadth_first", [True, False])
def test_kernel_density(
Cls, kernel, h, rtol, atol, breadth_first, n_samples=100, n_features=3
):
rng = check_random_state(1)
X = rng.random_sample((n_samples, n_features))
Y = rng.random_sample((n_samples, n_features))
dens_true = compute_kernel_slow(Y, X, kernel, h)
tree = Cls(X, leaf_size=10)
dens = tree.kernel_density(
Y, h, atol=atol, rtol=rtol, kernel=kernel, breadth_first=breadth_first
)
assert_allclose(dens, dens_true, atol=atol, rtol=max(rtol, 1e-7))
@pytest.mark.parametrize("Cls", [KDTree, BallTree])
def test_neighbor_tree_query_radius(Cls, n_samples=100, n_features=10):
rng = check_random_state(0)
X = 2 * rng.random_sample(size=(n_samples, n_features)) - 1
query_pt = np.zeros(n_features, dtype=float)
eps = 1e-15 # roundoff error can cause test to fail
tree = Cls(X, leaf_size=5)
rad = np.sqrt(((X - query_pt) ** 2).sum(1))
for r in np.linspace(rad[0], rad[-1], 100):
ind = tree.query_radius([query_pt], r + eps)[0]
i = np.where(rad <= r + eps)[0]
ind.sort()
i.sort()
assert_array_almost_equal(i, ind)
@pytest.mark.parametrize("Cls", [KDTree, BallTree])
def test_neighbor_tree_query_radius_distance(Cls, n_samples=100, n_features=10):
rng = check_random_state(0)
X = 2 * rng.random_sample(size=(n_samples, n_features)) - 1
query_pt = np.zeros(n_features, dtype=float)
eps = 1e-15 # roundoff error can cause test to fail
tree = Cls(X, leaf_size=5)
rad = np.sqrt(((X - query_pt) ** 2).sum(1))
for r in np.linspace(rad[0], rad[-1], 100):
ind, dist = tree.query_radius([query_pt], r + eps, return_distance=True)
ind = ind[0]
dist = dist[0]
d = np.sqrt(((query_pt - X[ind]) ** 2).sum(1))
assert_array_almost_equal(d, dist)
@pytest.mark.parametrize("Cls", [KDTree, BallTree])
@pytest.mark.parametrize("dualtree", (True, False))
def test_neighbor_tree_two_point(Cls, dualtree, n_samples=100, n_features=3):
rng = check_random_state(0)
X = rng.random_sample((n_samples, n_features))
Y = rng.random_sample((n_samples, n_features))
r = np.linspace(0, 1, 10)
tree = Cls(X, leaf_size=10)
D = DistanceMetric.get_metric("euclidean").pairwise(Y, X)
counts_true = [(D <= ri).sum() for ri in r]
counts = tree.two_point_correlation(Y, r=r, dualtree=dualtree)
assert_array_almost_equal(counts, counts_true)
@pytest.mark.parametrize("NeighborsHeap", [NeighborsHeapBT, NeighborsHeapKDT])
def test_neighbors_heap(NeighborsHeap, n_pts=5, n_nbrs=10):
heap = NeighborsHeap(n_pts, n_nbrs)
rng = check_random_state(0)
for row in range(n_pts):
d_in = rng.random_sample(2 * n_nbrs).astype(DTYPE, copy=False)
i_in = np.arange(2 * n_nbrs, dtype=ITYPE)
for d, i in zip(d_in, i_in):
heap.push(row, d, i)
ind = np.argsort(d_in)
d_in = d_in[ind]
i_in = i_in[ind]
d_heap, i_heap = heap.get_arrays(sort=True)
assert_array_almost_equal(d_in[:n_nbrs], d_heap[row])
assert_array_almost_equal(i_in[:n_nbrs], i_heap[row])
@pytest.mark.parametrize("nodeheap_sort", [nodeheap_sort_bt, nodeheap_sort_kdt])
def test_node_heap(nodeheap_sort, n_nodes=50):
rng = check_random_state(0)
vals = rng.random_sample(n_nodes).astype(DTYPE, copy=False)
i1 = np.argsort(vals)
vals2, i2 = nodeheap_sort(vals)
assert_array_almost_equal(i1, i2)
assert_array_almost_equal(vals[i1], vals2)
@pytest.mark.parametrize(
"simultaneous_sort", [simultaneous_sort_bt, simultaneous_sort_kdt]
)
def test_simultaneous_sort(simultaneous_sort, n_rows=10, n_pts=201):
rng = check_random_state(0)
dist = rng.random_sample((n_rows, n_pts)).astype(DTYPE, copy=False)
ind = (np.arange(n_pts) + np.zeros((n_rows, 1))).astype(ITYPE, copy=False)
dist2 = dist.copy()
ind2 = ind.copy()
# simultaneous sort rows using function
simultaneous_sort(dist, ind)
# simultaneous sort rows using numpy
i = np.argsort(dist2, axis=1)
row_ind = np.arange(n_rows)[:, None]
dist2 = dist2[row_ind, i]
ind2 = ind2[row_ind, i]
assert_array_almost_equal(dist, dist2)
assert_array_almost_equal(ind, ind2)
@pytest.mark.parametrize("Cls", [KDTree, BallTree])
def test_gaussian_kde(Cls, n_samples=1000):
# Compare gaussian KDE results to scipy.stats.gaussian_kde
from scipy.stats import gaussian_kde
rng = check_random_state(0)
x_in = rng.normal(0, 1, n_samples)
x_out = np.linspace(-5, 5, 30)
for h in [0.01, 0.1, 1]:
tree = Cls(x_in[:, None])
gkde = gaussian_kde(x_in, bw_method=h / np.std(x_in))
dens_tree = tree.kernel_density(x_out[:, None], h) / n_samples
dens_gkde = gkde.evaluate(x_out)
assert_array_almost_equal(dens_tree, dens_gkde, decimal=3)
# TODO: Remove filterwarnings in 1.3 when wminkowski is removed
@pytest.mark.filterwarnings("ignore:WMinkowskiDistance:FutureWarning:sklearn")
@pytest.mark.parametrize(
"Cls, metric",
itertools.chain(
[(KDTree, metric) for metric in KD_TREE_METRICS],
[(BallTree, metric) for metric in BALL_TREE_METRICS],
),
)
@pytest.mark.parametrize("k", (1, 3, 5))
@pytest.mark.parametrize("dualtree", (True, False))
@pytest.mark.parametrize("breadth_first", (True, False))
def test_nn_tree_query(Cls, metric, k, dualtree, breadth_first):
rng = check_random_state(0)
X = rng.random_sample((40, DIMENSION))
Y = rng.random_sample((10, DIMENSION))
kwargs = METRICS[metric]
kdt = Cls(X, leaf_size=1, metric=metric, **kwargs)
dist1, ind1 = kdt.query(Y, k, dualtree=dualtree, breadth_first=breadth_first)
dist2, ind2 = brute_force_neighbors(X, Y, k, metric, **kwargs)
# don't check indices here: if there are any duplicate distances,
# the indices may not match. Distances should not have this problem.
assert_array_almost_equal(dist1, dist2)
@pytest.mark.parametrize(
"Cls, metric",
[(KDTree, "euclidean"), (BallTree, "euclidean"), (BallTree, dist_func)],
)
@pytest.mark.parametrize("protocol", (0, 1, 2))
def test_pickle(Cls, metric, protocol):
rng = check_random_state(0)
X = rng.random_sample((10, 3))
if hasattr(metric, "__call__"):
kwargs = {"p": 2}
else:
kwargs = {}
tree1 = Cls(X, leaf_size=1, metric=metric, **kwargs)
ind1, dist1 = tree1.query(X)
s = pickle.dumps(tree1, protocol=protocol)
tree2 = pickle.loads(s)
ind2, dist2 = tree2.query(X)
assert_array_almost_equal(ind1, ind2)
assert_array_almost_equal(dist1, dist2)
assert isinstance(tree2, Cls)
|
manhhomienbienthuy/scikit-learn
|
sklearn/neighbors/tests/test_neighbors_tree.py
|
Python
|
bsd-3-clause
| 9,221
|
[
"Gaussian"
] |
22fe40daa1f82d00699cdf02758c84d972e2cac531e7eb42b11397731fbf5248
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
import datetime
from io import BytesIO
from os.path import dirname, join
from reportlab.lib.colors import black
from reportlab.lib.pagesizes import A4
from reportlab.lib.styles import StyleSheet1, ParagraphStyle
from reportlab.lib.units import cm
from reportlab.platypus import SimpleDocTemplate, Paragraph, Image, Table, Spacer
from reportlab.platypus.flowables import HRFlowable
from reportlab.pdfbase.pdfmetrics import stringWidth
from reportlab.rl_config import defaultPageSize
from reportlab.lib.enums import TA_JUSTIFY, TA_RIGHT, TA_CENTER
from reportlab.graphics.barcode import qr
from reportlab.graphics.shapes import Drawing
from pycroft.model.finance import BankAccount
from pycroft import config
ASSETS_DIRECTORY = join(dirname(__file__), 'assets')
ASSETS_LOGO_FILENAME = join(ASSETS_DIRECTORY, 'logo.png')
ASSETS_EMAIL_FILENAME = join(ASSETS_DIRECTORY, 'email.png')
ASSETS_FACEBOOK_FILENAME = join(ASSETS_DIRECTORY, 'facebook.png')
ASSETS_TWITTER_FILENAME = join(ASSETS_DIRECTORY, 'twitter.png')
ASSETS_WEB_FILENAME = join(ASSETS_DIRECTORY, 'web.png')
ASSETS_HOUSE_FILENAME = join(ASSETS_DIRECTORY, 'house.png')
def generate_user_sheet(user, user_id, plain_password, generation_purpose=''):
"""Create a „new member“ datasheet for the given user
:param User user: A pycroft user
:param str user_id: The user's ID. It has to be given extra,
because the user_id is not appearent given the ORM object
itself; encoding is done in the library.
:param str plain_password: The password
"""
# Anlegen des PDF Dokuments, Seitengröße DIN A4 Hochformat)
buf = BytesIO()
pdf = SimpleDocTemplate(buf, pagesize=A4,
rightMargin=2 * cm,
leftMargin=2 * cm,
topMargin=0.5 * cm,
bottomMargin=0.5 * cm)
style = getStyleSheet()
story = []
PAGE_WIDTH = defaultPageSize[0]
PAGE_HEIGHT = defaultPageSize[1]
# HEADER
im_web = Image(ASSETS_WEB_FILENAME, 0.4 * cm, 0.4 * cm)
im_house = Image(ASSETS_HOUSE_FILENAME, 0.4 * cm, 0.4 * cm)
im_email = Image(ASSETS_EMAIL_FILENAME, 0.4 * cm, 0.4 * cm)
im_fb = Image(ASSETS_FACEBOOK_FILENAME, 0.4 * cm, 0.4 * cm)
im_t = Image(ASSETS_TWITTER_FILENAME, 0.4 * cm, 0.4 * cm)
im_logo = Image(ASSETS_LOGO_FILENAME, 3.472 * cm, 1 * cm)
if user.room:
shortinfo = Paragraph('{dorm}<br/>{name}<br/>{level}/{room}'.format(
dorm=str(user.room.building.short_name),
name=user.name,
level=str(user.room.level),
room=str(user.room.number)
), style['RightText'])
else:
shortinfo = Paragraph('{name}'.format(
name=user.name
), style['RightText'])
data = [
[im_web, 'https://agdsn.de', im_t, '/ag_dsn'],
[im_email, 'support@agdsn.de', im_fb, '/DresdnerStudentenNetz']
]
social = Table(data, colWidths=[0.5 * cm, 3.5 * cm, 0.5 * cm],
rowHeights=[0.5 * cm] * 2)
data = [[im_logo, social, shortinfo]]
t = Table(data, colWidths=[3.972 * cm, 9.5 * cm, 4 * cm],
style=[
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
])
story.append(t)
######################
story.append(HRFlowable(width="100%",
thickness=1,
color=black,
spaceBefore=0.0 * cm,
spaceAfter=0.8 * cm))
story.append(
Paragraph('Welcome as a member of the AG DSN, {}!'
.format(user.name),
style['BodyText']))
story.append(
Paragraph('We are proud to announce that your network access has been '
'activated. If you encounter any problems, drop us a mail or '
'visit us during our office hours. You can find contact '
'information below on this page.',
style['BodyText']))
story.append(
Paragraph(
'Please make sure to pay your membership contribution in time.'
' You can find further details on the bottom of this sheet.',
style['Bold']))
story.append(Paragraph('Wishing you all the best,', style['BodyText']))
story.append(Paragraph('Your AG DSN', style['BodyText']))
story.append(HRFlowable(width="100%",
thickness=3,
color=black,
spaceBefore=0.4 * cm,
spaceAfter=0.4 * cm))
macs = []
for user_host in user.hosts:
for ip in user_host.ips:
macs.append(ip.interface.mac)
data = [['Name:', user.name, 'User-ID:', user_id],
['Username:', user.login, 'MAC-Address:', ', '.join(macs)],
['Password:', plain_password, 'Location:',
str(user.room) if user.room else ""],
['E-Mail:', user.email, "", ""]]
t = Table(data,
style=[
('FONTNAME', (1, 2), (1, 2), 'Courier'),
],
colWidths=[pdf.width * 0.15, pdf.width * 0.34] * 2,)
story.append(t)
story.append(
HRFlowable(width="100%", thickness=3, color=black, spaceBefore=0.4 * cm,
spaceAfter=0.6 * cm))
# offices
im_web = Image(ASSETS_WEB_FILENAME, 0.4 * cm, 0.4 * cm)
im_house = Image(ASSETS_HOUSE_FILENAME, 0.4 * cm, 0.4 * cm)
im_email = Image(ASSETS_EMAIL_FILENAME, 0.4 * cm, 0.4 * cm)
im_fb = Image(ASSETS_FACEBOOK_FILENAME, 0.4 * cm, 0.4 * cm)
im_t = Image(ASSETS_TWITTER_FILENAME, 0.4 * cm, 0.4 * cm)
data = [
['', im_house, 'Wundtstraße 5', im_house, 'Hochschulstr. 46', im_house,
'Borsbergstr. 34'],
['', '', 'Doorbell 0100', '', 'Basement', '', '7th floor'],
['', '', '01217 Dresden', '', '01069 Dresden', '', '01309 Dresden'],
['', '', '', '', '', '', ''],
['Office hours:', '', 'Mon, 7pm - 8pm', '', 'Mon, 7pm - 7.30pm', '',
'Mon, 8pm - 9pm'],
['', '', 'Thu, 7pm - 8pm', '', 'Thu, 7pm - 7.30pm', '',
'Thu, 8pm - 9pm']
]
rowHeight = 0.4 * cm
t = Table(data, colWidths=[2.5 * cm, 0.5 * cm, 3.5 * cm, 0.5 * cm, 3.5 * cm,
0.5 * cm, 3.5 * cm],
rowHeights=[rowHeight, rowHeight, rowHeight, rowHeight, rowHeight,
rowHeight],
hAlign='CENTER'
)
story.append(t)
story.append(
Paragraph('''<b>Interested in our work?</b>
In the podcast MultiCast you can hear about the latest developments and
our day-to-day work in the students network: https://podcast.agdsn.de/''', \
style['JustifyText']))
story.append(Paragraph('''<b>Join us:</b>\nThe student network was created and is run by students like yourself. If you are interested in our work don’t
hesitate to visit us at our office. There are many ways of contribution to our cause without the need of being a
computer science engineer. Just to mention some possible contributions: Administration and finances, network
maintenance, software development and many more. Besides, you can add some extra extracurricular
activity to your CV and have the opportunity to see and work with usually hidden technology. We would be
happy to welcome you with us. Be our guest at our office hours.''',
style['JustifyText']))
story.append(
HRFlowable(width="100%", thickness=3, color=black, spaceBefore=0.4 * cm,
spaceAfter=0.4 * cm))
# Payment details
contribution = 500 # monthly membership contribution in EUR
story.append(Paragraph('''<b>Payment details:</b> As a member, you have to transfer a monthly contribution of {0:1.2f}€ to our bank account.
Paying cash is not possible. The contribution is due at the end of each month. You can pay as much in advance as you want, we will simply subtract
the monthly contribution at the end of each month. We recommend that you pay at the beginning of each semester in advance, meaning you transact
six monthly contributions at once.'''.format(
(contribution / 100)), style['JustifyText']))
bank = BankAccount.q.filter_by(account_number='3120219540').first()
bank = config.membership_fee_bank_account
recipient = 'Studentenrat TUD - AG DSN'
if user.room:
purpose = '{id}, {name}, {dorm} {level} {room}'.format(
id=user_id,
name=user.name,
dorm=str(user.room.building.short_name),
level=str(user.room.level),
room=str(user.room.number)
)
else:
purpose = '{id}, {name}'.format(
id=user_id,
name=user.name
)
amount = contribution / 100
data = [
['Beneficiary:', recipient],
['Bank:', bank.bank],
['IBAN:', bank.iban],
['BIC:', bank.bic],
['Purpose/Intended use/\nDescription:', purpose],
['Amount', '{0:1.2f}€'.format(amount)]
]
payment_table = Table(data, colWidths=[4 * cm, 4 * cm],
style=[
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
])
qr_size = 4 * cm
qr_code = qr.QrCodeWidget(
generate_epc_qr_code(bank, recipient, amount, purpose))
bounds = qr_code.getBounds()
width = bounds[2] - bounds[0]
height = bounds[3] - bounds[1]
girocode = Drawing(qr_size, qr_size,
transform=[qr_size / width, 0, 0, qr_size / height, 0,
0])
girocode.add(qr_code)
data = [[payment_table, girocode]]
t = Table(data, colWidths=[13 * cm, 4 * cm],
style=[
('VALIGN', (0, 0), (-1, -1), 'BOTTOM'),
])
story.append(t)
story.append(
Paragraph(
'<i>Scan the QR-Code with your banking app to import the payment details.</i>',
style['CenterText'])
)
if generation_purpose:
generation_purpose = ' ({})'.format(generation_purpose)
story.append(
Paragraph(
'<i>Generated on {date}{purpose}</i>'.format(
date=datetime.date.today(),
purpose=generation_purpose
),
ParagraphStyle(name='SmallRightText',
parent=style['Normal'],
alignment=TA_RIGHT,
fontSize=8,
spaceBefore=15))
)
# PDF generieren und speichern
pdf.build(story)
return buf.getvalue()
def generate_wifi_user_sheet(user, user_id, plain_password):
"""Create a „wifi“ datasheet for the given user
:param User user: A pycroft user
:param str user_id: The user's ID. It has to be given extra,
because the user_id is not appearent given the ORM object
itself; encoding is done in the library.
:param str plain_password: The password
"""
# Anlegen des PDF Dokuments, Seitengröße DIN A4 Hochformat)
buf = BytesIO()
pdf = SimpleDocTemplate(buf, pagesize=A4,
rightMargin=2 * cm,
leftMargin=2 * cm,
topMargin=0.5 * cm,
bottomMargin=0.5 * cm)
style = getStyleSheet()
story = []
PAGE_WIDTH = defaultPageSize[0]
PAGE_HEIGHT = defaultPageSize[1]
# HEADER
im_web = Image(ASSETS_WEB_FILENAME, 0.4 * cm, 0.4 * cm)
im_house = Image(ASSETS_HOUSE_FILENAME, 0.4 * cm, 0.4 * cm)
im_email = Image(ASSETS_EMAIL_FILENAME, 0.4 * cm, 0.4 * cm)
im_fb = Image(ASSETS_FACEBOOK_FILENAME, 0.4 * cm, 0.4 * cm)
im_t = Image(ASSETS_TWITTER_FILENAME, 0.4 * cm, 0.4 * cm)
im_logo = Image(ASSETS_LOGO_FILENAME, 3.472 * cm, 1 * cm)
if user.room:
shortinfo = Paragraph('{dorm}<br/>{name}<br/>{level}/{room}'.format(
dorm=str(user.room.building.short_name),
name=user.name,
level=str(user.room.level),
room=str(user.room.number)
), style['RightText'])
else:
shortinfo = Paragraph('{name}'.format(
name=user.name
), style['RightText'])
data = [
[im_web, 'https://agdsn.de', im_t, '/ag_dsn'],
[im_email, 'support@agdsn.de', im_fb, '/DresdnerStudentenNetz']
]
social = Table(data, colWidths=[0.5 * cm, 3.5 * cm, 0.5 * cm],
rowHeights=[0.5 * cm] * 2)
data = [[im_logo, social, shortinfo]]
t = Table(data, colWidths=[3.972 * cm, 9.5 * cm, 4 * cm],
style=[
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
])
story.append(t)
######################
story.append(HRFlowable(width="100%",
thickness=1,
color=black,
spaceBefore=0.0 * cm,
spaceAfter=0.8 * cm))
story.append(
Paragraph('Hello {},'
.format(user.name),
style['BodyText']))
story.append(
Paragraph("We've been working on provisioning Wi-Fi for all members lately. "
"To gain experience with operating an Wi-Fi in larger scale, "
"we are putting it into test operation at the Gret-Palucca-Strasse dormitory. "
"With this test operation we want to find out, among other things, "
"how good the coverage of the signal is in different parts of the building and "
"whether the underlying services for managing the networks and "
"the conversion to the public IP address work. ",
style['BodyText']))
story.append(
Paragraph(
'You can find instructions to connect, further information and data protection notices at: '
'https://agdsn.de/sipa/pages/service/wlan-test',
style['BodyText'])
)
story.append(
Paragraph(
'We would really like, if you try our wifi. '
'If you have any questions, feedback or problems, please come to our office or write to us.',
style['BodyText'])
)
data = [
['Username:', user.login],
['Password:', plain_password],
['SSID:', 'agdsn-gps-test'],
]
credential_table = Table(data, colWidths=[3 * cm, 4 * cm],
style=[
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
])
qr_size = 4 * cm
qr_code = qr.QrCodeWidget('https://agdsn.de/sipa/pages/service/wlan-test')
bounds = qr_code.getBounds()
width = bounds[2] - bounds[0]
height = bounds[3] - bounds[1]
qrcode = Drawing(qr_size, qr_size,
transform=[qr_size / width, 0, 0, qr_size / height, 0,
0])
qrcode.add(qr_code)
data = [[credential_table, qrcode]]
t = Table(data, colWidths=[13 * cm, 4 * cm],
style=[
('VALIGN', (0, 0), (-1, -1), 'MIDDLE'),
])
story.append(t)
story.append(Paragraph('Best regards,', style['BodyText']))
story.append(Paragraph('Your AG DSN', style['BodyText']))
s = Spacer(width=1 * cm, height=10 * cm)
story.append(s)
story.append(HRFlowable(width="100%",
thickness=3,
color=black,
spaceBefore=0.4 * cm,
spaceAfter=0.4 * cm))
# offices
im_web = Image(ASSETS_WEB_FILENAME, 0.4 * cm, 0.4 * cm)
im_house = Image(ASSETS_HOUSE_FILENAME, 0.4 * cm, 0.4 * cm)
im_email = Image(ASSETS_EMAIL_FILENAME, 0.4 * cm, 0.4 * cm)
im_fb = Image(ASSETS_FACEBOOK_FILENAME, 0.4 * cm, 0.4 * cm)
im_t = Image(ASSETS_TWITTER_FILENAME, 0.4 * cm, 0.4 * cm)
data = [
['', im_house, 'Wundtstraße 5', im_house, 'Hochschulstr. 46', im_house,
'Borsbergstr. 34'],
['', '', 'Doorbell 0100', '', 'Basement', '', '7th floor'],
['', '', '01217 Dresden', '', '01069 Dresden', '', '01309 Dresden'],
['', '', '', '', '', '', ''],
['Office hours:', '', 'Mon, 7pm - 8pm', '', 'Mon, 7pm - 7.30pm', '',
'Mon, 8pm - 9pm'],
['', '', 'Thu, 7pm - 8pm', '', 'Thu, 7pm - 7.30pm', '',
'Thu, 8pm - 9pm']
]
rowHeight = 0.4 * cm
t = Table(data, colWidths=[2.5 * cm, 0.5 * cm, 3.5 * cm, 0.5 * cm, 3.5 * cm,
0.5 * cm, 3.5 * cm],
rowHeights=[rowHeight, rowHeight, rowHeight, rowHeight, rowHeight,
rowHeight],
hAlign='CENTER'
)
story.append(t)
story.append(
Paragraph(
'<i>Generated on {date}</i>'.format(
date=datetime.date.today(),
),
ParagraphStyle(name='SmallRightText',
parent=style['Normal'],
alignment=TA_RIGHT,
fontSize=8,
spaceBefore=15))
)
# PDF generieren und speichern
pdf.build(story)
return buf.getvalue()
def getStyleSheet():
"""Returns a stylesheet object"""
stylesheet = StyleSheet1()
stylesheet.add(ParagraphStyle(name='Normal',
fontName="Helvetica",
fontSize=10,
leading=12))
stylesheet.add(ParagraphStyle(name='BodyText',
parent=stylesheet['Normal'],
spaceBefore=7))
stylesheet.add(ParagraphStyle(name='RightText',
parent=stylesheet['Normal'],
alignment=TA_RIGHT,
spaceBefore=14))
stylesheet.add(ParagraphStyle(name='JustifyText',
parent=stylesheet['Normal'],
alignment=TA_JUSTIFY,
spaceBefore=14))
stylesheet.add(ParagraphStyle(name='CenterText',
parent=stylesheet['Normal'],
alignment=TA_CENTER,
spaceBefore=14))
stylesheet.add(ParagraphStyle(name='Bold',
parent=stylesheet['BodyText'],
fontName="Helvetica-Bold"))
return stylesheet
def generate_epc_qr_code(bank: BankAccount, recipient, amount, purpose):
# generate content for epc-qr-code (also known as giro-code)
EPC_FORMAT = \
"BCD\n001\n1\nSCT\n{bic}\n{recipient}\n{iban}\nEUR{amount}\n\n\n{purpose}\n\n"
return EPC_FORMAT.format(
bic=bank.bic,
recipient=recipient,
iban=bank.iban,
amount=amount,
purpose=purpose)
|
lukasjuhrich/pycroft
|
pycroft/helpers/printing/__init__.py
|
Python
|
apache-2.0
| 19,274
|
[
"VisIt"
] |
2385467d4ae95ad3e69449c3bf78847d358b930e01618e1ac6124ff7c6f647db
|
#
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2019 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of Psi4.
#
# Psi4 is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# Psi4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with Psi4; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
import time
from abc import ABC, abstractmethod
import numpy as np
from psi4 import core
from .exceptions import ValidationError
"""
Generalized iterative solvers for Psi4.
"""
def cg_solver(rhs_vec, hx_function, preconditioner, guess=None, printer=None, printlvl=1, maxiter=20, rcond=1.e-6):
"""
Solves the Ax = b linear equations via Conjugate Gradient. The `A` matrix must be a hermitian, positive definite matrix.
Parameters
----------
rhs_vec : list of :py:class:`~psi4.core.Matrix`
The RHS vector in the Ax=b equation.
hx_function : function
Takes in a list of :py:class:`~psi4.core.Matrix` objects and a mask of active indices. Returns the Hessian-vector product.
preconditioner : function
Takes in a list of :py:class:`~psi4.core.Matrix` objects and a mask of active indices. Returns the preconditioned value.
guess : list of :py:class:`~psi4.core.Matrix`, optional
Starting vectors, if None use a preconditioner(rhs) guess
printer : function, optional
Takes in a list of current x and residual vectors and provides a print function. This function can also
return a value that represents the current residual.
printlvl : int, optional
The level of printing provided by this function.
maxiter : int, optional
The maximum number of iterations this function will take.
rcond : float, optional
The residual norm for convergence.
Returns
-------
ret : tuple, list of :py:class:`~psi4.core.Matrix`
Returns the solved `x` vectors and `r` vectors.
Notes
-----
This is a generalized cg solver that can also take advantage of solving multiple RHS's simultaneously when
it is advantageous to do so.
Examples
--------
"""
tstart = time.time()
if printlvl:
core.print_out("\n -----------------------------------------------------\n")
core.print_out(" " + "Generalized CG Solver".center(52) + "\n")
core.print_out(" " + "by Daniel. G. A. Smith".center(52) + "\n")
core.print_out(" -----------------------------------------------------\n")
core.print_out(" Maxiter = %11d\n" % maxiter)
core.print_out(" Convergence = %11.3E\n" % rcond)
core.print_out(" Number of equations = %11ld\n\n" % len(rhs_vec))
core.print_out(" %4s %14s %12s %6s %6s\n" % ("Iter", "Residual RMS", "Max RMS", "Remain", "Time [s]"))
core.print_out(" -----------------------------------------------------\n")
nrhs = len(rhs_vec)
active_mask = [True for x in range(nrhs)]
# Start function
if guess is None:
x_vec = preconditioner(rhs_vec, active_mask)
else:
if len(guess) != len(rhs_vec):
raise ValidationError("CG Solver: Guess vector length does not match RHS vector length.")
x_vec = [x.clone() for x in guess]
Ax_vec = hx_function(x_vec, active_mask)
# Set it up
r_vec = [] # Residual vectors
for x in range(nrhs):
tmp_r = rhs_vec[x].clone()
tmp_r.axpy(-1.0, Ax_vec[x])
r_vec.append(tmp_r)
z_vec = preconditioner(r_vec, active_mask)
p_vec = [x.clone() for x in z_vec]
# First RMS
grad_dot = [x.sum_of_squares() for x in rhs_vec]
resid = [(r_vec[x].sum_of_squares() / grad_dot[x])**0.5 for x in range(nrhs)]
if printer:
resid = printer(0, x_vec, r_vec)
elif printlvl:
# core.print_out(' CG Iteration Guess: Rel. RMS = %1.5e\n' % np.mean(resid))
core.print_out(" %5s %14.3e %12.3e %7d %9d\n" % ("Guess", np.mean(resid), np.max(resid), len(z_vec),
time.time() - tstart))
rms = np.mean(resid)
rz_old = [0.0 for x in range(nrhs)]
alpha = [0.0 for x in range(nrhs)]
active = np.where(active_mask)[0]
# CG iterations
for rot_iter in range(maxiter):
# Build old RZ so we can discard vectors
for x in active:
rz_old[x] = r_vec[x].vector_dot(z_vec[x])
# Build Hx product
Ap_vec = hx_function(p_vec, active_mask)
# Update x and r
for x in active:
alpha[x] = rz_old[x] / Ap_vec[x].vector_dot(p_vec[x])
if np.isnan(alpha)[0]:
core.print_out("CG: Alpha is NaN for vector %d. Stopping vector." % x)
active_mask[x] = False
continue
x_vec[x].axpy(alpha[x], p_vec[x])
r_vec[x].axpy(-alpha[x], Ap_vec[x])
resid[x] = (r_vec[x].sum_of_squares() / grad_dot[x])**0.5
# Print out or compute the resid function
if printer:
resid = printer(rot_iter + 1, x_vec, r_vec)
# Figure out active updated active mask
for x in active:
if (resid[x] < rcond):
active_mask[x] = False
# Print out if requested
if printlvl:
core.print_out(" %5d %14.3e %12.3e %7d %9d\n" % (rot_iter + 1, np.mean(resid), np.max(resid),
sum(active_mask), time.time() - tstart))
active = np.where(active_mask)[0]
if sum(active_mask) == 0:
break
# Update p
z_vec = preconditioner(r_vec, active_mask)
for x in active:
beta = r_vec[x].vector_dot(z_vec[x]) / rz_old[x]
p_vec[x].scale(beta)
p_vec[x].axpy(1.0, z_vec[x])
if printlvl:
core.print_out(" -----------------------------------------------------\n")
return x_vec, r_vec
class DIIS(object):
"""
An object to assist in the DIIS extrpolation procedure.
"""
def __init__(self, max_vec=6, removal_policy="OLDEST"):
"""
An object to assist in the DIIS extrpolation procedure.
Parameters
----------
max_vect : int, optional
The maximum number of error and state vectors to hold. These are pruned based off the removal policy.
removal_policy : {"OLDEST", "LARGEST"}, optional
How the state and error vectors are removed once at the maximum. OLDEST will remove the oldest vector while
largest will remove the residual with the largest RMS value.
"""
self.error = []
self.state = []
self.max_vec = max_vec
self.removal_policy = removal_policy.upper()
if self.removal_policy not in ["LARGEST", "OLDEST"]:
raise ValidationError("DIIS: removal_policy must either be oldest or largest.")
def add(self, state, error):
"""
Adds a DIIS state and error vector to the DIIS object.
state : :py:class:`~psi4.core.Matrix`
The current state vector.
error : :py:class:`~psi4.core.Matrix`
The current error vector.
"""
self.error.append(error.clone())
self.state.append(state.clone())
def extrapolate(self, out=None):
"""
Extrapolates next state vector from the current set of state and error vectors.
Parameters
----------
out : :py:class:`~psi4.core.Matrix`, optional
A array in which to place the next state vector.
Returns
-------
ret : :py:class:`~psi4.core.Matrix`
Returns the next state vector.
"""
# Limit size of DIIS vector
diis_count = len(self.state)
if diis_count == 0:
raise ValidationError("DIIS: No previous vectors.")
if diis_count == 1:
return self.state[0]
if diis_count > self.max_vec:
if self.removal_policy == "OLDEST":
pos = 0
else:
pos = np.argmax([x.rms() for x in self.error])
del self.state[pos]
del self.error[pos]
diis_count -= 1
# Build error matrix B
B = np.empty((diis_count + 1, diis_count + 1))
B[-1, :] = 1
B[:, -1] = 1
B[-1, -1] = 0
for num1, e1 in enumerate(self.error):
B[num1, num1] = e1.vector_dot(e1)
for num2, e2 in enumerate(self.error):
if num2 >= num1:
continue
val = e1.vector_dot(e2)
B[num1, num2] = B[num2, num1] = val
# Build residual vector
resid = np.zeros(diis_count + 1)
resid[-1] = 1
# Solve pulay equations
# Yea, yea this is unstable make it stable
iszero = np.any(np.diag(B)[:-1] <= 0.0)
if iszero:
S = np.ones((diis_count + 1))
else:
S = np.diag(B).copy()
S[:-1] **= -0.5
S[-1] = 1
# Then we gotta do a custom inverse
B *= S[:, None] * S
invB = core.Matrix.from_array(B)
invB.power(-1.0, 1.e-12)
ci = np.dot(invB, resid)
ci *= S
# combination of previous fock matrices
if out is None:
out = core.Matrix("DIIS result", self.state[0].rowdim(), self.state[1].coldim())
else:
out.zero()
for num, c in enumerate(ci[:-1]):
out.axpy(c, self.state[num])
return out
def _diag_print_heading(title_lines, solver_name, max_ss_size, nroot, e_tol, r_tol, maxiter, verbose=1):
"""Print a message to the output file when the solver has processed all options and is ready to begin"""
if verbose < 1:
# no printing
return
# show title if not silent
core.print_out("\n\n")
core.print_out("\n".join([x.center(77) for x in title_lines]))
core.print_out("\n")
if verbose > 1:
# summarize options for verbose
core.print_out(" " + "{} options".format(solver_name) + "\n")
core.print_out("\n -----------------------------------------------------\n")
core.print_out(" Maxiter = {:<5d}\n".format(maxiter))
core.print_out(" Eigenvalue tolerance = {:11.5e}\n".format(e_tol))
core.print_out(" Eigenvector tolerance = {:11.5e}\n".format(r_tol))
core.print_out(" Max number of expansion vectors = {:<5d}\n".format(max_ss_size))
core.print_out("\n")
# show iteration info headings if not silent
core.print_out(" => Iterations <=\n")
if verbose == 1:
# default printing one line per iter max delta value and max residual norm
core.print_out(" {} {} {}\n".format(" " * len(solver_name), "Max[D[value]]", "Max[|R|]"))
else:
# verbose printing, value, delta, and |R| for each root
core.print_out(" {} {} {} {}\n".format(" " * len(solver_name), "value", "D[value]", "|R|"))
def _diag_print_info(solver_name, info, verbose=1):
"""Print a message to the output file at each iteration"""
if verbose < 1:
# no printing
return
elif verbose == 1:
# print iter maxde max|R| conv/restart
flags = []
if info['collapse']:
flags.append("Restart")
if info['done']:
flags.append("Converged")
core.print_out(" {name} iter {ni:3d}: {m_de:-11.5e} {m_r:12.5e} {flgs}\n".format(
name=solver_name,
ni=info['count'],
m_de=np.max(info['delta_val']),
m_r=np.max(info['res_norm']),
flgs="/".join(flags)))
else:
# print iter / ssdim folowed by de/|R| for each root
core.print_out(" {name} iter {ni:3d}: {nv:4d} guess vectors\n".format(
name=solver_name, ni=info['count'], nv=info['nvec']))
for i, (e, de, rn) in enumerate(zip(info['val'], info['delta_val'], info['res_norm'])):
core.print_out(" {nr:2d}: {s:} {e:-11.5f} {de:-11.5e} {rn:12.5e}\n".format(
nr=i + 1, s=" " * (len(solver_name) - 8), e=e, de=de, rn=rn))
if info['done']:
core.print_out(" Solver Converged! all roots\n\n")
elif info['collapse']:
core.print_out(" Subspace limits exceeded restarting\n\n")
def _diag_print_converged(solver_name, stats, vals, verbose=1, **kwargs):
"""Print a message to the output file when the solver is converged."""
if verbose < 1:
# no printing
return
if verbose >= 1:
# print values summary + number of iterations + # of "big" product evals
core.print_out(" {} converged in {} iterations\n".format(solver_name, stats[-1]['count']))
core.print_out(" Root # eigenvalue\n")
for (i, vi) in enumerate(vals):
core.print_out(" {:^6} {:20.12f}\n".format(i + 1, vi))
max_nvec = max(istat['nvec'] for istat in stats)
core.print_out(" Computed a total of {} Large products\n\n".format(stats[-1]['product_count']))
def _print_array(name, arr, verbose):
"""print an subspace quantity (numpy array) to the output file
Parameters
----------
name : str
The name to print above the array
arr : :py:class:`np.ndarray`
The array to print
verbose : int
The amount of information to print. Only prints for verbose > 2
"""
if verbose > 2:
core.print_out("\n\n{}:\n{}\n".format(name, str(arr)))
def _gs_orth(engine, U, V, thresh):
"""Perform GS orthonormalization of a set V against a previously orthonormalized set U
Parameters
----------
engine : object
The engine passed to the solver, required to define vector algebraic operations needed
U : list of `vector`
A set of orthonormal vectors, len(U) = l; satisfies ||I^{lxl}-U^tU|| < thresh
V : list of `vectors`
The vectors used to augment U
thresh : float
If the orthogonalized vector has a norm smaller than this value it is considered LD to the set
Returns
-------
U_aug : list of `vector`
The orthonormal set of vectors U' with span(U') = span(U) + span(V), len(U) <= len(U_aug) <= len(U) + len(V)
"""
for vi in V:
for j in range(len(U)):
dij = engine.vector_dot(vi, U[j])
Vi = engine.vector_axpy(-1.0 * dij, U[j], vi)
norm_vi = np.sqrt(engine.vector_dot(vi, vi))
if norm_vi >= thresh:
U.append(engine.vector_scale(1.0 / norm_vi, vi))
return U
def _best_vectors(engine, ss_vectors, basis_vectors):
r"""Compute the best approximation of the true eigenvectors as a linear combination of basis vectors:
..math:: V_{k} = \Sum_{i} \tilde{V}_{i,k}X_{i}
Where :math:`\tilde{V} is the matrix with columns that are eigenvectors of the subspace matrix. And
:math:`X_{i}` is a basis vector.
Parameters
----------
engine : object
The engine passed to the solver, required to define vector algebraic operations needed
ss_vectors : :py:class:`np.ndarray` {l, k}
The k eigenvectors of the subspace problem, l = dimension of the subspace basis, and k is the number of roots
basis_vectors : list of `vector` {l}
The current basis vectors
Returns
-------
new_vecs : list of `vector` {k}
The approximations of the k true eigenvectors.
"""
l, n = ss_vectors.shape
new_vecs = []
for i in range(n):
cv_i = engine.new_vector()
for j in range(l):
cv_i = engine.vector_axpy(ss_vectors[j, i], basis_vectors[j], cv_i)
new_vecs.append(cv_i)
return new_vecs
class SolverEngine(ABC):
"""Abstract Base Class defining the API required by solver engines
Engines implement the correct product functions for iterative solvers that do not require the target matrix be stored directly.
Classes intended to be used as an `engine` for :func:`davidson_solver` or :func:`hamiltonian_solver` should inherit from this base class
to ensure that the required methods are defined.
..note:: The `vector` referred to here is intentionally vague, the solver does not care what it is and only
holds individual or sets of them. In fact an individual `vector` could be split across two elements in a list,
such as for different spin. Whatever data type is used and individual vector should be a single element in a list such that
len(list) returns the number of vector-like objects.
"""
@abstractmethod
def compute_products(self, X):
r"""Compute a Matrix * trial vector products
Parameters
----------
X : list of `vectors`
Returns
-------
Expected by :func:`davidson_solver`
AX : list of `vectors`
The product :math:`A x X_{i}` for each `X_{i}` in `X`, in that order. Where `A` is the hermitian matrix to be diagonalized. `len(AX) == len(X)`
n : int
The number of products that were evaluated. If the object implements product caching this may be less than len(X)
Expected by :func:`hamiltonian_solver`
H1X : list of `vectors`
The product :math:`H1 x X_{i}` for each `X_{i}` in `X`, in that order. Where H1 is described in :func:`hamiltonian_solver`. `len(H1X) == len(X)`
H2X : list of `vectors`
The product :math:`H2 x X_{i}` for each `X_{i}` in `X`, in that order. Where H2 is described in :func:`hamiltonian_solver`. `len(H2X) == len(X)`
"""
pass
@abstractmethod
def precondition(self, R_k, w_k):
r"""Apply the preconditioner to a Residual vector
The preconditioner is usually defined as :math:`(w_k - D_{i})^-1` where `D` is an approximation of the diagonal of the
matrix that is being diagonalized.
Parameters
----------
R_k : single `vector`
The residual vector
w_k : float
The eigenvalue associated with this vector
Returns
-------
new_X_k : single `vector`
The preconditioned residual vector, a correction vector that will be used to augment the guess space
"""
pass
@abstractmethod
def new_vector(self):
"""Return a new `vector` object.
The solver is oblivious to the data structure used for a `vector` this method provides the engine with a means to create `vector`
like quantities.
Parameters
----------
The engine calls this method with no arguments. So any defined by the engine for its own use should be optional
Returns
-------
X : singlet `vector`
This should be a new vector object with the correct dimensions, assumed to be zeroed out
"""
pass
@staticmethod
@abstractmethod
def vector_dot(X, Y):
"""Compute a dot product between two `vectors`
Parameters
----------
X : single `vector`
Y : single `vector`
Returns
-------
a : float
The dot product (X x Y)
"""
pass
@abstractmethod
def vector_axpy(a, X, Y):
"""Compute scaled `vector` addition operation `a*X + Y`
Parameters
----------
a : float
The scale factor applied to `X`
X : singlet `vector`
The `vector` which will be scaled and added to `Y`
Y : single `vector`
The `vector` which the result of `a*X` is added to
Returns
-------
Y : single `vector`
The solver assumes that Y is updated, and returned. So it is safe to avoid a copy of Y if possible
"""
pass
@abstractmethod
def vector_scale(a, X):
"""Scale a vector by some factor
Parameters
----------
a : float
The scale facor
X : single `vector`
The vector that will be scaled
Returns
-------
X : single `vector`
The solver assumes that the passed vector is modifed. So it is save to avoid a copy of X if possible.
"""
pass
@abstractmethod
def vector_copy(X):
"""Make a copy of a `vector`
Parameters
----------
X : single `vector`
The `vector` to copy
Returns
-------
X' : single `vector`
A copy of `X` should be distinct object that can be modified independently of the passed object, Has the same data when returned.
"""
pass
def davidson_solver(engine,
guess,
e_tol=1.0E-6,
r_tol=1.0E-8,
nroot=1,
max_vecs_per_root=20,
maxiter=100,
verbose=1,
schmidt_tol=1.0e-8):
"""
Solves for the lowest few eigenvalues and eigenvectors of a large problem emulated through an engine.
If the large matrix `A` has dimension `{NxN}` and N is very large, and only a small number of roots, `k`
are desired this algorithm is preferable to standard methods as uses on the order of `N * k` memory.
One only needs to have the ability to compute the product of a times a vector.
For non-hermitan `A` the basis of the algorithm breaks down. However in practice, for strongly diagonally-dominant `A`
such as the similarity transformed hamiltonian in EOM-CC this algorithm commonly still used.
Parameters
-----------
engine : object (subclass of :class:`SolverEngine`)
The engine drive all operations involving data structures that have at least one "large" dimension. See :class:`SolverEngine` for requirements
guess : list {engine dependent}
At least `nroot` initial expansion vectors
e_tol : float
Convergence tolerance for eigenvalues
r_tol : float
Convergence tolerance for residual vectors
nroot : int
Number of roots desired
maxiter : int
The maximum number of iterations
schmidt_tol : float
Correction vectors must have norm larger than this value to be added to the guess space
verbose : int
The amount of logging info to print (0 -> none, 1 -> some, >1 -> everything)
Returns
-------
best_values : np.ndarray (nroots, )
The best approximation of the eigenvalues of A, computed on the last iteration of the solver
best_vectors: list of `vector` (nroots)
The best approximation of the eigenvectors of A, computed on the last iteration of the solver
stats : list of `dict`
Statistics collected on each iteration
count : int, iteration number
res_norm : np.ndarray (nroots, ), the norm of residual vector for each roots
val : np.ndarray (nroots, ), the eigenvalue corresponding to each root
delta_val : np.ndarray (nroots, ), the change in eigenvalue from the last iteration to this ones
collapse : bool, if a subspace collapse was performed
product_count : int, the running total of product evaluations that was performed
done : bool, if all roots were converged
.. note:: The solver will return even when ``maxiter`` iterations are performed without convergence. The caller should check `stats[-1]['done']`
for convergence/ failure and handle each case accordingly.
"""
nk = nroot
iter_info = {
"count": 0,
"res_norm": np.zeros((nk)),
"val": np.zeros((nk)),
"delta_val": np.zeros((nk)),
# conv defaults to true, and will be flipped when a non-conv root is hit
"done": True,
"nvec": 0,
"collapse": False,
"product_count": 0,
}
print_name = "DavidsonSolver"
title_lines = ["Generalized Davidson Solver", "By Ruhee Dcunha"]
max_ss_size = max_vecs_per_root * nk
_diag_print_heading(title_lines, print_name, max_ss_size, nroot, r_tol, e_tol, maxiter, verbose)
vecs = guess
stats = []
best_eigvecs = []
best_eigvals = []
while iter_info['count'] < maxiter:
# increment iteration/ save old vals
iter_info['count'] += 1
old_vals = iter_info['val'].copy()
# reset flags
iter_info['collapse'] = False
iter_info['done'] = True
# get subspace dimension
l = len(vecs)
iter_info['nvec'] = l
# check if ss dimension has exceeded limits
if l >= max_ss_size:
iter_info['collapse'] = True
# compute A times trial vector products
Ax, nprod = engine.compute_products(vecs)
iter_info['product_count'] += nprod
# Build Subspace matrix
G = np.zeros((l, l))
for i in range(l):
for j in range(i):
G[i, j] = G[j, i] = engine.vector_dot(vecs[i], Ax[j])
G[i, i] = engine.vector_dot(vecs[i], Ax[i])
_print_array("SS transformed A", G, verbose)
# diagonalize subspace matrix
lam, alpha = np.linalg.eigh(G)
_print_array("SS eigenvectors", alpha, verbose)
_print_array("SS eigenvalues", lam, verbose)
# remove zeros/negatives
alpha = alpha[:, lam > 1.0e-10]
lam = lam[lam > 1.0e-10]
# sort/truncate to nroot
idx = np.argsort(lam)
lam = lam[idx]
alpha = alpha[:, idx]
# update best_solution
best_eigvecs = _best_vectors(engine, alpha[:, :nk], vecs)
best_eigvals = lam[:nk]
# check convergence of each solution
new_vecs = []
for k in range(nk):
# residual vector
Rk = engine.new_vector()
lam_k = lam[k]
for i in range(l):
Axi = Ax[i]
Rk = engine.vector_axpy(alpha[i, k], Axi, Rk)
Rk = engine.vector_axpy(-1.0 * lam_k, best_eigvecs[k], Rk)
norm = engine.vector_dot(Rk, Rk)
norm = np.sqrt(norm)
iter_info['val'][k] = lam_k
iter_info['delta_val'][k] = abs(old_vals[k] - lam_k)
iter_info['res_norm'][k] = norm
converged = (norm < r_tol) and (abs(old_vals[k] - lam_k) < e_tol)
# augment guess vector for non-converged roots
if (not converged):
iter_info['done'] = False
Qk = engine.precondition(Rk, lam_k)
new_vecs.append(Qk)
# print iteration info to output
_diag_print_info(print_name, iter_info, verbose)
# save stats for this iteration
stats.append(iter_info.copy())
if iter_info['done']:
# finished
_diag_print_converged(print_name, stats, best_eigvals, verbose)
break
elif iter_info['collapse']:
# restart needed
vecs = best_eigvecs
else:
# Regular subspace update, orthonormalize preconditioned residuals and add to the trial set
vecs = _gs_orth(engine, vecs, new_vecs, schmidt_tol)
# always return, the caller should check stats[-1]['done'] == True for convergence
return best_eigvals, best_eigvecs, stats
def hamiltonian_solver(engine,
guess,
e_tol=1.0E-6,
r_tol=1.0E-8,
nroot=1,
max_vecs_per_root=20,
maxiter=100,
verbose=1,
schmidt_tol=1.0e-8):
"""
Finds the smallest eigenvalues and associated right and left hand eigenvectors of a large real Hamiltonian eigenvalue problem
emulated through an engine.
A hamiltonian EVP has the structure with A, B of some large dimension N the problem is 2Nx2N:
[A B][X] = [1 0](w)[X]
[B A][Y] [0 -1](w)[Y]
Which can be written as the NxN, non-hermitian EVP:
(A+B)(A-B)(X+Y) = w^2(X+Y)
With left-hand eigenvectors:
(X-Y)(A-B)(A+B) = w^2(X-Y)
if (A-B) is positive definite, we can transform the problem to arrive at the hermitian NxN EVP:
(A-B)^1/2(A+B)(A-B)^1/2 = w^2 T
Where T = (A-B)^-1/2(X+Y).
We use a Davidson like iteration where we transform (A+B) (H1) and (A-B) (H2) in to the subspace defined by the trial vectors.
The subspace analog of the NxN hermitian EVP is diagonalized and left (X-Y) and right (X+Y) eigenvectors of the NxN
non-hermitian EVP are approximated. Residual vectors are formed for both and the guess space is augmented with two
correction vectors per iteration. The advantages and properties of this algorithm are described in the literature [stratmann:1998]_ .
Parameters
-----------
engine : object (subclass of :class:`SolverEngine`)
The engine drive all operations involving data structures that have at least one "large" dimension. See :class:`SolverEngine` for requirements
guess : list {engine dependent}
At least `nroot` initial expansion vectors
e_tol : float
Convergence tolerance for eigenvalues
r_tol : float
Convergence tolerance for residual vectors
nroot : int
Number of roots desired
maxiter : int
The maximum number of iterations
schmidt_tol : float
Correction vectors must have norm larger than this value to be added to the guess space
verbose : int
The amount of logging info to print (0 -> none, 1 -> some, >1 -> everything)
Returns
-------
best_values : np.ndarray (nroots, )
The best approximation of the eigenvalues of `w`, computed on the last iteration of the solver
best_R: list of `vector` (nroots)
The best approximation of the right hand eigenvectors, `X+Y`, computed on the last iteration of the solver.
best_L: list of `vector` (nroots)
The best approximation of the left hand eigenvectors, `X-Y`, computed on the last iteration of the solver.
stats : list of `dict`
Statistics collected on each iteration
count : int, iteration number
res_norm : np.ndarray (nroots, ), the norm of residual vector for each roots
val : np.ndarray (nroots, ), the eigenvalue corresponding to each root
delta_val : np.ndarray (nroots, ), the change in eigenvalue from the last iteration to this ones
collapse : bool, if a subspace collapse was performed
product_count : int, the running total of product evaluations that was performed
done : bool, if all roots were converged
.. note:: The solver will return even when ``maxiter`` iterations are performed without convergence. The caller should check `stats[-1]['done']`
for convergence/ failure and handle each case accordingly.
References
----------
.. [stratmann:1998] R. Eric Stratmann, G. E. Scuseria, and M. J. Frisch, "An efficient implementation of time-dependent density-functional
theory for the calculation of excitation energies of large molecules." J. Chem. Phys., 109, 8218 (1998)
"""
nk = nroot
iter_info = {
"count": 0,
"res_norm": np.zeros((nk)),
"val": np.zeros((nk)),
"delta_val": np.zeros((nk)),
# conv defaults to true, and will be flipped when a non-conv root is hit
"conv": True,
"nvec": 0,
"product_count": 0,
}
print_name = "HamiltonianSolver"
title_lines = ["Generalized Hamiltonian Solver", "By Andrew M. James"]
ss_max = max_vecs_per_root * nk
_diag_print_heading(title_lines, print_name, ss_max, nroot, r_tol, e_tol, maxiter, verbose)
vecs = guess
best_L = []
best_R = []
best_vals = []
stats = []
while iter_info['count'] < maxiter:
# increment iteration/ save old vals
iter_info['count'] += 1
old_w = iter_info['val'].copy()
# reset flags
iter_info['collapse'] = False
iter_info['done'] = True
# get subspace dimension
l = len(vecs)
iter_info['nvec'] = l
# check if subspace dimension has exceeded limits
if l >= ss_max:
iter_info['collapse'] = True
# compute [A+B]*v(H1x) and [A-B]*v (H2x)
H1x, H2x, nprod = engine.compute_products(vecs)
iter_info['product_count'] += nprod
# form x*H1x (H1_ss) and x*H2x (H2_ss)
H1_ss = np.zeros((l, l))
H2_ss = np.zeros((l, l))
for i in range(l):
for j in range(l):
H1_ss[i, j] = engine.vector_dot(vecs[i], H1x[j])
H2_ss[i, j] = engine.vector_dot(vecs[i], H2x[j])
_print_array("Subspace Transformed (A+B)", H1_ss, verbose)
_print_array("Subspace Transformed (A-B)", H2_ss, verbose)
# Diagonalize H2 in the subspace (eigen-decomposition to compute H2^(1/2))
H2_ss_val, H2_ss_vec = np.linalg.eigh(H2_ss)
_print_array("eigenvalues H2_ss", H2_ss_val, verbose)
_print_array("eigenvectors H2_ss", H2_ss_vec, verbose)
# Check H2 is PD
# NOTE: If this triggers failure the SCF solution is not stable. A few ways to handle this
# 1. Use davidson solver where product function evaluates (H2 * (H1 * X))
# - Poor convergen
# 2. Switch to CIS/TDA
# - User would probably not expect this
# 3. Perform Stability update and restart with new reference
if np.any(H2_ss_val < 0.0):
raise Exception("H2 is not Positive Definite")
# Build H2^(1/2)
H2_ss_half = np.dot(H2_ss_vec, np.diag(np.sqrt(H2_ss_val))).dot(H2_ss_vec.T)
_print_array("SS Transformed (A-B)^(1/2)", H2_ss_half, verbose)
# Build Hermitian SS product (H2)^(1/2)(H1)(H2)^(1/2)
Hss = np.einsum('ij,jk,km->im', H2_ss_half, H1_ss, H2_ss_half)
_print_array("(H2)^(1/2)(H1)(H2)^(1/2)", Hss, verbose)
#diagonalize Hss -> w^2, Tss
w2, Tss = np.linalg.eigh(Hss)
_print_array("Eigenvalues (A-B)^(1/2)(A+B)(A-B)^(1/2)", w2, verbose)
_print_array("Eigvectors (A-B)^(1/2)(A+B)(A-B)^(1/2)", Tss, verbose)
# pick positive roots
Tss = Tss[:, w2 > 1.0e-10]
w2 = w2[w2 > 1.0e-10]
# check for invalid eigvals
with np.errstate(invalid='raise'):
w = np.sqrt(w2)
# sort roots
idx = w.argsort()[:nk]
Tss = Tss[:, idx]
w = w[idx]
# Extract Rss = H2^{1/2}Tss
Rss = np.dot(H2_ss_half, Tss)
# Extract Lss = (H1 R)/ w
Lss = np.dot(H1_ss, Rss).dot(np.diag(1.0 / w))
# Save best R/L vectors and eigenvalues
best_R = _best_vectors(engine, Rss[:, :nk], vecs)
best_L = _best_vectors(engine, Lss[:, :nk], vecs)
best_vals = w[:nk]
# check convergence of each solution
new_vecs = []
for k in range(nk):
# residual vectors for right and left eigenvectors
WR_k = engine.new_vector()
WL_k = engine.new_vector()
wk = w[k]
for i in range(l):
H1x_i = H1x[i]
H2x_i = H2x[i]
WL_k = engine.vector_axpy(Rss[i, k], H1x_i, WL_k)
WR_k = engine.vector_axpy(Lss[i, k], H2x_i, WR_k)
WL_k = engine.vector_axpy(-1.0 * wk, best_L[k], WL_k)
WR_k = engine.vector_axpy(-1.0 * wk, best_R[k], WR_k)
norm_R = np.sqrt(engine.vector_dot(WR_k, WR_k))
norm_L = np.sqrt(engine.vector_dot(WL_k, WL_k))
norm = norm_R + norm_L
WL_k = engine.vector_scale(norm_L, WL_k)
WR_k = engine.vector_scale(norm_R, WR_k)
iter_info['res_norm'][k] = norm
iter_info['delta_val'][k] = np.abs(old_w[k] - w[k])
iter_info['val'][k] = w[k]
# augment the guess space for non-converged roots
if (iter_info['res_norm'][k] > r_tol) or (iter_info['delta_val'][k] > e_tol):
iter_info['done'] = False
new_vecs.append(engine.precondition(WR_k, w[k]))
new_vecs.append(engine.precondition(WL_k, w[k]))
# print iteration info to output
_diag_print_info(print_name, iter_info, verbose)
# save stats for this iteration
stats.append(iter_info.copy())
if iter_info['done']:
# Finished
_diag_print_converged(print_name, stats, w[:nk], rvec=best_R, lvec=best_L, verbose=verbose)
break
elif iter_info['collapse']:
# need to orthonormalize union of the Left/Right solutions on restart
vecs = _gs_orth(engine, [], best_R + best_L, schmidt_tol)
else:
# Regular subspace update, orthonormalize preconditioned residuals and add to the trial set
vecs = _gs_orth(engine, vecs, new_vecs, schmidt_tol)
# always return the caller should check stats[-1]['done'] == True for convergence
return best_vals, best_R, best_L, stats
|
CDSherrill/psi4
|
psi4/driver/p4util/solvers.py
|
Python
|
lgpl-3.0
| 37,711
|
[
"Psi4"
] |
f766c530342223a1add7ce17ab92c8456a2455a89cb3073ed3a14e1e87fd102a
|
import random
import numpy as np
"""
Various useful functions for use within TPG, and for using TPG, like metrics,
etc.
"""
"""
Coin flips, at varying levels of success based on prob.
"""
def flip(prob):
return random.uniform(0.0,1.0) < prob
"""
Returns the teams that this team references, either immediate or
recursively.
"""
def getTeams(team, rec=True, visited=None, result=None):
if rec:
# recursively search all teams
nTeams = 0
# track visited teams to not repeat
if visited is None:
visited = set()
result = list()
visited.add(str(team.id))
if team not in result:
result.append(team)
# get team count from each learner that has a team
for lrnr in team.learners:
lrnrTeam = lrnr.getActionTeam()
if lrnrTeam is not None and str(lrnrTeam.id) not in visited:
getTeams(lrnrTeam, rec=True, visited=visited, result=result)
if len(visited) != len(result):
print("[getTeams]Visited {} teams but got {} teans. Something is a miss!".format(len(visited), len(result)))
print("[getTeams]visited team ids:")
for cursor in visited:
print(cursor)
print("[getTeams]result team id's")
for cursor in result:
print(cursor.id)
return result
else:
# just the teams attached directly to this team
return [lrnr.getActionTeam() for lrnr in team.learners
if not lrnr.isActionAtomic()]
"""
Returns the learners on this team, immediately or recursively.
"""
def getLearners(team, rec=True, tVisited=None, lVisited=None, result=None, map=None):
if rec:
# track visited learners/teams to not repeat
if tVisited is None:
tVisited = set()
lVisited = set()
result = []
map = {}
tVisited.add(str(team.id))
[lVisited.add(str(lrnr.id)) for lrnr in team.learners]
for cursor in team.learners:
if str(team.id) not in map:
map[str(team.id)] = [str(cursor.id)]
else:
map[str(team.id)].append(str(cursor.id))
if cursor not in result:
result.append(cursor)
# get learner count from each learner that has a team
for lrnr in team.learners:
lrnrTeam = lrnr.getActionTeam()
if lrnrTeam is not None and str(lrnrTeam.id) not in tVisited:
getLearners(lrnrTeam, rec=True, tVisited=tVisited, lVisited=lVisited, result=result, map=map)
if len(lVisited) != len(result):
print("[getLearners]Visited {} learners but got {} learners. Something is a miss!".format(len(lVisited), len(result)))
print("[getLearners]visited learner ids:")
for cursor in lVisited:
print(cursor)
print("[getLearners]result learner id's")
freq = {}
for cursor in result:
if str(cursor.id) not in freq:
freq[str(cursor.id)] = 1
else:
freq[str(cursor.id)] = freq[str(cursor.id)] + 1
print(freq)
for cursor in freq.items():
if cursor[1] > 1:
first = None
second = None
for j in result:
if str(j.id) == cursor[0]:
if first == None:
first = j
else:
second = j
break
print("first == second? {}".format(first.debugEq(second)))
print("id appears in the following teams: ")
for entry in map.items():
if str(first.id) in entry[1]:
print(entry[0])
return result
else:
# just the teams attached directly to this team
return list(team.learners)
"""
"""
def outDegree():
pass
"""
"""
def meanLearners():
pass
"""
Returns a dictionary containing counts of each type of instruction and other basic
stats relating to instructions.
"learners" is a list of learners that you want the stats from. "operations" is a
list of strings representing the current operation set, can be obtained from Program.
"""
def learnerInstructionStats(learners, operations):
# stats tracked for each operation and overall
partialStats = {
"total": 0,
"min": float("inf"),
"max": 0,
"avg": 0
}
# dictionary that we put results in and return
results = {"overall": partialStats.copy()}
for op in operations:
results[op] = partialStats.copy()
# get instruction data from all provided learners
for lrnr in learners:
insts = lrnr.program.instructions
results["overall"]["total"] += len(insts)
results["overall"]["min"] = min(len(insts), results["overall"]["min"])
results["overall"]["max"] = max(len(insts), results["overall"]["max"])
results["overall"]["avg"] += len(insts)/len(learners)
for i, op in enumerate(operations):
opCount = np.count_nonzero(insts[:,1]==i)
results[op]["total"] += opCount
results[op]["min"] = min(opCount, results[op]["min"])
results[op]["max"] = max(opCount, results[op]["max"])
results[op]["avg"] += opCount/len(learners)
return results
"""
Returns a dictionary containing counts of each type of instruction and other basic
stats relating to instructions in action programs.
"learners" is a list of learners that you want the stats from. "operations" is a
list of strings representing the current operation set, can be obtained from Program.
"""
def actionInstructionStats(learners, operations):
# stats tracked for each operation and overall
partialStats = {
"total": 0,
"min": float("inf"),
"max": 0,
"avg": 0
}
# dictionary that we put results in and return
results = {"overall": partialStats.copy()}
for op in operations:
results[op] = partialStats.copy()
results["numActPrograms"] = 0
# get instruction data from all provided real atomic action learners
for lrnr in learners:
if not lrnr.isActionAtomic() or lrnr.actionObj.actionLength == 0:
continue
insts = lrnr.actionObj.program.instructions
results["overall"]["total"] += len(insts)
results["overall"]["min"] = min(len(insts), results["overall"]["min"])
results["overall"]["max"] = max(len(insts), results["overall"]["max"])
results["overall"]["avg"] += len(insts)/len(learners)
for i, op in enumerate(operations):
opCount = np.count_nonzero(insts[:,1]==i)
results[op]["total"] += opCount
results[op]["min"] = min(opCount, results[op]["min"])
results[op]["max"] = max(opCount, results[op]["max"])
results[op]["avg"] += opCount/len(learners)
results["numActPrograms"] += 1
return results
"""
Obtains the longest execution possible in the graph from the starting (root) team.
"""
def pathDepths(team, prevDepth=0, parents=[]):
# depth is one deeper than the last
myDepth = prevDepth + 1
depths = [myDepth]
# don't revisit this team again in this depth first recursion
parents.append(team.id)
# the teams to visit from the learners that have team actions
nextTeams = [lrn.getActionTeam() for lrn in team.learners
if not lrn.isActionAtomic() and not lrn.getActionTeam().id in parents]
# obtain depths from each child team
for nTeam in nextTeams:
depths.extend(pathDepths(nTeam, myDepth, list(parents)))
return depths
|
Ryan-Amaral/PyTPG
|
tpg/utils.py
|
Python
|
mit
| 7,985
|
[
"VisIt"
] |
e16d7c8d693c217833ba6fe3b25d982908fec251da4bea88e8d519cda58f2e38
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module is used to estimate the cost of various compounds. Costs are taken
from the a CostDB instance, for example a CSV file via CostDBCSV.
For compounds with no cost listed, a Phase Diagram style convex hull
optimization is performed to determine a set of compositions that can be mixed
to give the desired compound with lowest total cost.
"""
from __future__ import division, unicode_literals
import abc
from collections import defaultdict
import csv
import os
import itertools
from monty.design_patterns import singleton
from monty.string import unicode2str
import six
from pymatgen import Composition, Element
from pymatgen.core.physical_constants import AVOGADROS_CONST
from pymatgen.matproj.snl import is_valid_bibtex
from pymatgen.phasediagram.entries import PDEntry
from pymatgen.phasediagram.pdanalyzer import PDAnalyzer
from pymatgen.phasediagram.pdmaker import PhaseDiagram
from io import open
__author__ = 'Anubhav Jain'
__copyright__ = 'Copyright 2013, The Materials Project'
__version__ = '0.1'
__maintainer__ = 'Anubhav Jain'
__email__ = 'ajain@lbl.gov'
__date__ = 'Aug 27, 2013'
module_dir = os.path.dirname(os.path.abspath(__file__))
class CostEntry(PDEntry):
"""
Extends PDEntry to include a BibTeX reference and include language about
cost
"""
def __init__(self, composition, cost, name, reference):
"""
Args:
composition:
Composition as a pymatgen.core.structure.Composition
cost:
Cost (per mol, NOT per kg) of the full Composition
name:
Optional parameter to name the entry. Defaults to the reduced
chemical formula as in PDEntry.
reference:
Reference data as BiBTeX string
"""
super(CostEntry, self).__init__(composition, cost, name)
if reference and not is_valid_bibtex(reference):
raise ValueError(
"Invalid format for cost reference! Should be BibTeX string.")
self.reference = reference
def __repr__(self):
return "CostEntry : {} with cost = {:.4f}".format(self.composition,
self.energy)
class CostDB(six.with_metaclass(abc.ABCMeta)):
"""
Abstract class for representing a Cost database.
Can be extended, e.g. for file-based or REST-based databases
"""
@abc.abstractmethod
def get_entries(self, chemsys):
"""
For a given chemical system, return an array of CostEntries
Args:
chemsys:
array of Elements defining the chemical system.
Returns:
array of CostEntries
"""
return
class CostDBCSV(CostDB):
"""
Read a CSV file to get costs
Format is formula,cost_per_kg,name,BibTeX
"""
def __init__(self, filename):
# read in data from file
self._chemsys_entries = defaultdict(list)
filename = os.path.join(os.path.dirname(__file__), filename)
reader = csv.reader(open(filename, "rt"), quotechar=unicode2str("|"))
for row in reader:
comp = Composition(row[0])
cost_per_mol = float(row[1]) * comp.weight.to("kg") * \
AVOGADROS_CONST
pde = CostEntry(comp.formula, cost_per_mol, row[2], row[3])
chemsys = "-".join(sorted([el.symbol
for el in pde.composition.elements]))
self._chemsys_entries[chemsys].append(pde)
def get_entries(self, chemsys):
chemsys = "-".join(sorted([el.symbol for el in chemsys]))
return self._chemsys_entries[chemsys]
@singleton
class CostDBElements(CostDBCSV):
"""
Singleton object that provides the cost data for elements
"""
def __init__(self):
CostDBCSV.__init__(
self, os.path.join(module_dir, "costdb_elements.csv"))
class CostAnalyzer(object):
"""
Given a CostDB, figures out the minimum cost solutions via convex hull
"""
def __init__(self, costdb):
self.costdb = costdb
def get_lowest_decomposition(self, composition):
"""
Get the decomposition leading to lowest cost
Args:
composition:
Composition as a pymatgen.core.structure.Composition
Returns:
Decomposition as a dict of {Entry: amount}
"""
entries_list = []
elements = [e.symbol for e in composition.elements]
for i in range(len(elements)):
for combi in itertools.combinations(elements, i + 1):
chemsys = [Element(e) for e in combi]
x = self.costdb.get_entries(chemsys)
entries_list.extend(x)
try:
pd = PhaseDiagram(entries_list)
return PDAnalyzer(pd).get_decomposition(composition)
except IndexError:
raise ValueError("Error during PD building; most likely, "
"cost data does not exist!")
def get_cost_per_mol(self, comp):
"""
Get best estimate of minimum cost/mol based on known data
Args:
comp:
Composition as a pymatgen.core.structure.Composition
Returns:
float of cost/mol
"""
comp = comp if isinstance(comp, Composition) else Composition(comp)
decomp = self.get_lowest_decomposition(comp)
return sum(k.energy_per_atom * v * comp.num_atoms for k, v in
decomp.items())
def get_cost_per_kg(self, comp):
"""
Get best estimate of minimum cost/kg based on known data
Args:
comp:
Composition as a pymatgen.core.structure.Composition
Returns:
float of cost/kg
"""
comp = comp if isinstance(comp, Composition) else Composition(comp)
return self.get_cost_per_mol(comp) / (
comp.weight.to("kg") * AVOGADROS_CONST)
|
migueldiascosta/pymatgen
|
pymatgen/analysis/cost/cost.py
|
Python
|
mit
| 6,120
|
[
"pymatgen"
] |
cab762e80f40a15453f477a47c21794bb3f8bc1b013a1e171463e4757aa93c56
|
# Orca
# Copyright (C) 2016 UrbanSim Inc.
# See full license in LICENSE.
import pandas as pd
import pytest
from .. import orca
from ..utils.testing import assert_frames_equal
def setup_function(func):
orca.clear_all()
def teardown_function(func):
orca.clear_all()
@pytest.fixture
def dfa():
return orca.DataFrameWrapper('a', pd.DataFrame(
{'a1': [1, 2, 3],
'a2': [4, 5, 6],
'a3': [7, 8, 9]},
index=['aa', 'ab', 'ac']))
@pytest.fixture
def dfz():
return orca.DataFrameWrapper('z', pd.DataFrame(
{'z1': [90, 91],
'z2': [92, 93],
'z3': [94, 95],
'z4': [96, 97],
'z5': [98, 99]},
index=['za', 'zb']))
@pytest.fixture
def dfb():
return orca.DataFrameWrapper('b', pd.DataFrame(
{'b1': range(10, 15),
'b2': range(15, 20),
'a_id': ['ac', 'ac', 'ab', 'aa', 'ab'],
'z_id': ['zb', 'zb', 'za', 'za', 'zb']},
index=['ba', 'bb', 'bc', 'bd', 'be']))
@pytest.fixture
def dfc():
return orca.DataFrameWrapper('c', pd.DataFrame(
{'c1': range(20, 30),
'c2': range(30, 40),
'b_id': ['ba', 'bd', 'bb', 'bc', 'bb', 'ba', 'bb', 'bc', 'bd', 'bb']},
index=['ca', 'cb', 'cc', 'cd', 'ce', 'cf', 'cg', 'ch', 'ci', 'cj']))
@pytest.fixture
def dfg():
return orca.DataFrameWrapper('g', pd.DataFrame(
{'g1': [1, 2, 3]},
index=['ga', 'gb', 'gc']))
@pytest.fixture
def dfh():
return orca.DataFrameWrapper('h', pd.DataFrame(
{'h1': range(10, 15),
'g_id': ['ga', 'gb', 'gc', 'ga', 'gb']},
index=['ha', 'hb', 'hc', 'hd', 'he']))
def all_broadcasts():
orca.broadcast('a', 'b', cast_index=True, onto_on='a_id')
orca.broadcast('z', 'b', cast_index=True, onto_on='z_id')
orca.broadcast('b', 'c', cast_index=True, onto_on='b_id')
orca.broadcast('g', 'h', cast_index=True, onto_on='g_id')
def test_recursive_getitem():
assert orca._recursive_getitem({'a': {}}, 'a') == {'a': {}}
assert orca._recursive_getitem(
{'a': {'b': {'c': {'d': {}, 'e': {}}}}}, 'e') == {'d': {}, 'e': {}}
with pytest.raises(KeyError):
orca._recursive_getitem({'a': {'b': {'c': {'d': {}, 'e': {}}}}}, 'f')
def test_dict_value_to_pairs():
assert sorted(orca._dict_value_to_pairs({'c': {'a': 1, 'b': 2}}),
key=lambda d: next(iter(d))) == \
[{'a': 1}, {'b': 2}]
def test_is_leaf_node():
assert orca._is_leaf_node({'b': {'a': {}}}) is False
assert orca._is_leaf_node({'a': {}}) is True
def test_next_merge():
assert orca._next_merge({'d': {'c': {}, 'b': {'a': {}}}}) == \
{'b': {'a': {}}}
assert orca._next_merge({'b': {'a': {}, 'z': {}}}) == \
{'b': {'a': {}, 'z': {}}}
def test_merge_tables_raises(dfa, dfz, dfb, dfg, dfh):
all_broadcasts()
with pytest.raises(RuntimeError):
orca.merge_tables('b', [dfa, dfb, dfz, dfg, dfh])
def test_merge_tables1(dfa, dfz, dfb):
all_broadcasts()
merged = orca.merge_tables('b', [dfa, dfz, dfb])
expected = pd.merge(
dfa.to_frame(), dfb.to_frame(), left_index=True, right_on='a_id')
expected = pd.merge(
expected, dfz.to_frame(), left_on='z_id', right_index=True)
assert_frames_equal(merged, expected)
def test_merge_tables2(dfa, dfz, dfb, dfc):
all_broadcasts()
merged = orca.merge_tables(dfc, [dfa, dfz, dfb, dfc])
expected = pd.merge(
dfa.to_frame(), dfb.to_frame(), left_index=True, right_on='a_id')
expected = pd.merge(
expected, dfz.to_frame(), left_on='z_id', right_index=True)
expected = pd.merge(
expected, dfc.to_frame(), left_index=True, right_on='b_id')
assert_frames_equal(merged, expected)
def test_merge_tables_cols(dfa, dfz, dfb, dfc):
all_broadcasts()
merged = orca.merge_tables(
'c', [dfa, dfz, dfb, dfc], columns=['a1', 'b1', 'z1', 'c1'])
expected = pd.DataFrame(
{'c1': range(20, 30),
'b1': [10, 13, 11, 12, 11, 10, 11, 12, 13, 11],
'a1': [3, 1, 3, 2, 3, 3, 3, 2, 1, 3],
'z1': [91, 90, 91, 90, 91, 91, 91, 90, 90, 91]},
index=['ca', 'cb', 'cc', 'cd', 'ce', 'cf', 'cg', 'ch', 'ci', 'cj'])
assert_frames_equal(merged, expected)
def test_merge_tables3():
df_a = pd.DataFrame(
{'a': [0, 1]},
index=['a0', 'a1'])
df_b = pd.DataFrame(
{'b': [2, 3, 4, 5, 6],
'a_id': ['a0', 'a1', 'a1', 'a0', 'a1']},
index=['b0', 'b1', 'b2', 'b3', 'b4'])
df_c = pd.DataFrame(
{'c': [7, 8, 9]},
index=['c0', 'c1', 'c2'])
df_d = pd.DataFrame(
{'d': [10, 11, 12, 13, 15, 16, 16, 17, 18, 19],
'b_id': ['b2', 'b0', 'b3', 'b3', 'b1', 'b4', 'b1', 'b4', 'b3', 'b3'],
'c_id': ['c0', 'c1', 'c1', 'c0', 'c0', 'c2', 'c1', 'c2', 'c1', 'c2']},
index=['d0', 'd1', 'd2', 'd3', 'd4', 'd5', 'd6', 'd7', 'd8', 'd9'])
orca.add_table('a', df_a)
orca.add_table('b', df_b)
orca.add_table('c', df_c)
orca.add_table('d', df_d)
orca.broadcast(cast='a', onto='b', cast_index=True, onto_on='a_id')
orca.broadcast(cast='b', onto='d', cast_index=True, onto_on='b_id')
orca.broadcast(cast='c', onto='d', cast_index=True, onto_on='c_id')
df = orca.merge_tables(target='d', tables=['a', 'b', 'c', 'd'])
expected = pd.merge(df_a, df_b, left_index=True, right_on='a_id')
expected = pd.merge(expected, df_d, left_index=True, right_on='b_id')
expected = pd.merge(df_c, expected, left_index=True, right_on='c_id')
assert_frames_equal(df, expected)
def test_merge_tables_dup_columns():
# I'm intentionally setting the zone-ids to something different when joined
# in a real case they'd likely be the same but the whole point of this
# test is to see if we can get them back with different names tied to each
# table and they need to be different to test if that's working
hh_df = pd.DataFrame({'zone_id': [1, 1, 2], 'building_id': [5, 5, 6]})
orca.add_table('households', hh_df)
bldg_df = pd.DataFrame(
{'zone_id': [2, 3], 'parcel_id': [0, 1]}, index=[5, 6])
orca.add_table('buildings', bldg_df)
parcels_df = pd.DataFrame({'zone_id': [4, 5]}, index=[0, 1])
orca.add_table('parcels', parcels_df)
orca.broadcast(
'buildings', 'households', cast_index=True, onto_on='building_id')
orca.broadcast('parcels', 'buildings', cast_index=True, onto_on='parcel_id')
df = orca.merge_tables(
target='households', tables=['households', 'buildings', 'parcels'])
expected = pd.DataFrame(
{'building_id': [5, 5, 6], 'parcel_id': [0, 0, 1], 'zone_id': [1, 1, 2]})
assert_frames_equal(df, expected)
df = orca.merge_tables(
target='households',
tables=['households', 'buildings', 'parcels'],
drop_intersection=False)
expected = pd.DataFrame({
'building_id': [5, 5, 6],
'parcel_id': [0, 0, 1],
'zone_id_households': [1, 1, 2],
'zone_id_buildings': [2, 2, 3],
'zone_id_parcels': [4, 4, 5]
})
assert_frames_equal(df, expected)
df = orca.merge_tables(
target='households',
tables=['households', 'buildings'],
drop_intersection=False)
expected = pd.DataFrame({
'building_id': [5, 5, 6],
'parcel_id': [0, 0, 1],
'zone_id_households': [1, 1, 2],
'zone_id_buildings': [2, 2, 3]
})
assert_frames_equal(df, expected)
df = orca.merge_tables(
target='households',
tables=['households', 'buildings']
)
expected = pd.DataFrame({
'building_id': [5, 5, 6],
'parcel_id': [0, 0, 1],
'zone_id': [1, 1, 2]
})
assert_frames_equal(df, expected)
|
UDST/orca
|
orca/tests/test_mergetables.py
|
Python
|
bsd-3-clause
| 7,756
|
[
"ORCA"
] |
fef5a125b0150c6a7fae10c63c3a51decc7cac38cf5664a1932ead2a15f8b8f0
|
#!/usr/bin/python2
try:
import gi
gi.require_version('NumCosmo', '1.0')
gi.require_version('NumCosmoMath', '1.0')
except:
pass
import matplotlib as mpl
import matplotlib.pyplot as plot
from scipy.stats import norm
from scipy.stats import beta
import numpy as np
import math
from gi.repository import NumCosmo as Nc
from gi.repository import NumCosmoMath as Ncm
#
# Initializing the library objects, this must be called before
# any other library function.
#
Ncm.cfg_init ()
np.random.seed (0)
#
# n = number of points to reconstruct the distribution
# Sampling from a Gaussian distribution
#
cut_l = -0.4
cut_u = 0.4
peak1 = 0.5
sigma1 = 0.2
peak2 = -0.5
sigma2 = 0.2
# Cumulative distribution function
def true_cdf (x):
return 0.5 * (norm.cdf (x, peak1, sigma1) + norm.cdf (x, peak2, sigma2))
cdf_cut_l = true_cdf (cut_l)
cdf_cut_u = 1.0 - true_cdf (cut_u)
cdf_cut = cdf_cut_l + cdf_cut_u
rbnorm = 1.0 - cdf_cut
# Probability density function
def true_p (x):
return 0.5 * (norm.pdf (x, peak1, sigma1) + norm.pdf (x, peak2, sigma2)) / rbnorm
n = 800000
s = np.concatenate ((np.random.normal (peak1, sigma1, n), np.random.normal (peak2, sigma2, n)), axis = 0)
sa = []
for si in s:
if si >= cut_l and si <= cut_u:
sa.append (si)
s = sa
n = len (s)
print "# Number of points = %u" % (n)
#
# Creating a new Ncm.StatsDist1dEPDF object with
# Maximum of 200 saved samples (exceeding points are joined with
# their nearest points), standard deviation scale of 0.1 factor
# and minimum distance scale of 0.001.
#
epdf = Ncm.StatsDist1dEPDF.new_full (2000, Ncm.StatsDist1dEPDFBw.AUTO, 0.01, 0.001)
epdf_rot = Ncm.StatsDist1dEPDF.new_full (2000, Ncm.StatsDist1dEPDFBw.ROT, 0.01, 0.001)
#
# Adding the points to the epdf object.
#
for i in range (n):
epdf.add_obs (s[i])
epdf_rot.add_obs (s[i])
#
# Preparing the object from the given sample.
#
epdf.prepare ()
epdf_rot.prepare ()
#
# Plotting the results.
#
p_a = []
p_rot_a = []
pdf_a = []
pdf_rot_a = []
x_a = []
inv_pdf_a = []
inv_pdf_rot_a = []
u_a = []
for i in range (1000):
x = epdf.xi + (epdf.xf - epdf.xi) / 999.0 * i;
u = 1.0 / 1000.0 * i
x_a.append (x)
u_a.append (u)
p_a.append (epdf.eval_p (x))
p_rot_a.append (epdf_rot.eval_p (x))
pdf_a.append (epdf.eval_pdf (x))
pdf_rot_a.append (epdf_rot.eval_pdf (x))
inv_pdf_a.append (epdf.eval_inv_pdf (u))
inv_pdf_rot_a.append (epdf_rot.eval_inv_pdf (u))
#
# Plotting the probability density function.
#
fig = plot.subplot ()
plot.title ("PDF")
fig.plot (x_a, p_a, label = "auto-bw")
fig.plot (x_a, p_rot_a, label = "RoT-bw")
fig.plot (x_a, true_p (x_a), label = "true dist")
fig.legend(loc = "upper center")
plot.savefig ("epdf1d_pdf.svg")
plot.clf ()
#
# Plotting the relative difference of the reconstructed distributions and the true one.
#
fig = plot.subplot ()
plot.title ("PDF relative difference with respect to the true distribution")
fig.plot (x_a, np.abs (np.array ((p_a - true_p (x_a)) / true_p (x_a))), label = "auto-bw")
fig.plot (x_a, np.abs (np.array ((p_rot_a - true_p (x_a)) / true_p (x_a))), label = "RoT-bw")
fig.set_ylim ([1.0e-6, 1.0e1])
fig.grid ()
fig.legend(loc = "upper right")
fig.set_yscale ("log")
plot.savefig ("epdf1d_pdf_diff.svg")
plot.clf ()
#
# Plotting the cumulative distribution.
#
fig = plot.subplot ()
plot.title ("CDF")
fig.plot (x_a, pdf_a, label = "auto-bw")
fig.plot (x_a, pdf_rot_a, label = "RoT-bw")
fig.plot (x_a, (true_cdf (x_a) - cdf_cut_l) / rbnorm, label = "true dist")
fig.legend(loc = "upper left")
plot.savefig ("epdf1d_cdf.svg")
plot.clf ()
#
# Plotting the relative difference of the reconstructed cumulative distributions and the true one.
#
fig = plot.subplot ()
plot.title ("CDF relative difference with respect to the true distribution")
fig.plot (x_a, np.abs (pdf_a / ( (true_cdf (x_a) - cdf_cut_l) / rbnorm ) - 1.0), label = "auto-bw")
fig.plot (x_a, np.abs (pdf_rot_a / ( (true_cdf (x_a) - cdf_cut_l) / rbnorm ) - 1.0), label = "RoT-bw")
fig.grid ()
fig.legend(loc = "upper right")
fig.set_yscale ("log")
plot.savefig ("epdf1d_cdf_diff.svg")
plot.clf ()
#
# Plotting the inverse cumulative distribution.
#
fig = plot.subplot ()
plot.title ("Inverse CDF")
fig.plot (u_a, inv_pdf_a, label = "auto-bw")
fig.plot (u_a, inv_pdf_rot_a, label = "RoT-bw")
fig.legend(loc = "upper left")
plot.savefig ("epdf1d_invcdf.svg")
plot.clf ()
|
NumCosmo/NumCosmo.github.io
|
examples/example_epdf1d.py
|
Python
|
apache-2.0
| 4,394
|
[
"Gaussian"
] |
3460fc39e6c20bd443a12ffa76a78f0522b5a3bf06f433bc46afd388dd0c73cf
|
# Copyright 2007-2012 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
from nose.tools import assert_true, assert_equal, assert_not_equal
from hyperspy._signals.spectrum import Spectrum
from hyperspy.hspy import create_model
from hyperspy.components import Gaussian
class TestFitOneComponent:
def setUp(self):
g = Gaussian()
g.A.value = 10000.0
g.centre.value = 5000.0
g.sigma.value = 500.0
axis = np.arange(10000)
s = Spectrum(g.function(axis))
m = create_model(s)
self.model = m
self.g = g
self.axis = axis
self.rtol = 0.00
def test_fit_component(self):
m = self.model
axis = self.axis
g1 = Gaussian()
m.append(g1)
m.fit_component(g1, signal_range=(4000, 6000))
assert_true(
np.allclose(
self.g.function(axis),
g1.function(axis),
rtol=self.rtol))
class TestFitSeveralComponent:
def setUp(self):
gs1 = Gaussian()
gs1.A.value = 10000.0
gs1.centre.value = 5000.0
gs1.sigma.value = 500.0
gs2 = Gaussian()
gs2.A.value = 60000.0
gs2.centre.value = 2000.0
gs2.sigma.value = 300.0
gs3 = Gaussian()
gs3.A.value = 20000.0
gs3.centre.value = 6000.0
gs3.sigma.value = 100.0
axis = np.arange(10000)
total_signal = (gs1.function(axis) +
gs2.function(axis) +
gs3.function(axis))
s = Spectrum(total_signal)
m = create_model(s)
g1 = Gaussian()
g2 = Gaussian()
g3 = Gaussian()
m.append(g1)
m.append(g2)
m.append(g3)
self.model = m
self.gs1 = gs1
self.gs2 = gs2
self.gs3 = gs3
self.g1 = g1
self.g2 = g2
self.g3 = g3
self.axis = axis
self.rtol = 0.01
def test_fit_component_active_state(self):
m = self.model
axis = self.axis
g1 = self.g1
g2 = self.g2
g3 = self.g3
g2.active = True
g3.active = False
m.fit_component(g1, signal_range=(4500, 5200), fit_independent=True)
assert_true(
np.allclose(
self.gs1.function(axis),
g1.function(axis),
rtol=self.rtol))
assert_true(g1.active)
assert_true(g2.active)
assert_true(not g3.active)
def test_fit_component_free_state(self):
m = self.model
axis = self.axis
g1 = self.g1
g2 = self.g2
g3 = self.g3
g2.A.free = False
g2.sigma.free = False
m.fit_component(g1, signal_range=(4500, 5200))
assert_true(
np.allclose(
self.gs1.function(axis),
g1.function(axis),
rtol=self.rtol))
assert_true(g1.A.free)
assert_true(g1.sigma.free)
assert_true(g1.centre.free)
assert_true(not g2.A.free)
assert_true(not g2.sigma.free)
assert_true(g2.centre.free)
assert_true(g3.A.free)
assert_true(g3.sigma.free)
assert_true(g3.centre.free)
def test_fit_multiple_component(self):
m = self.model
g1 = self.g1
g2 = self.g2
g3 = self.g3
m.fit_component(g1, signal_range=(4500, 5200))
m.fit_component(g2, signal_range=(1500, 2200))
m.fit_component(g3, signal_range=(5800, 6150))
assert_true(
np.allclose(
self.model.spectrum.data,
m(),
rtol=self.rtol))
|
pburdet/hyperspy
|
hyperspy/tests/model/test_fit_component.py
|
Python
|
gpl-3.0
| 4,321
|
[
"Gaussian"
] |
4b336d804ff596a05fc727d27456893f76a835ba8ad3d8bb5451171e22014565
|
"""
*******************************************************************
Copyright (c) 2013, 2014 IBM Corp.
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
and Eclipse Distribution License v1.0 which accompany this distribution.
The Eclipse Public License is available at
http://www.eclipse.org/legal/epl-v10.html
and the Eclipse Distribution License is available at
http://www.eclipse.org/org/documents/edl-v10.php.
Contributors:
Ian Craggs - initial implementation and/or documentation
*******************************************************************
"""
from __future__ import print_function
"""
Assertions are used to validate incoming data, but are omitted from outgoing packets. This is
so that the tests that use this package can send invalid data for error testing.
"""
import logging
logger = logging.getLogger("mqttsas")
# Low-level protocol interface
class MQTTException(Exception):
pass
# Message types
CONNECT, CONNACK, PUBLISH, PUBACK, PUBREC, PUBREL, \
PUBCOMP, SUBSCRIBE, SUBACK, UNSUBSCRIBE, UNSUBACK, \
PINGREQ, PINGRESP, DISCONNECT = range(1, 15)
packetNames = [ "reserved", \
"Connect", "Connack", "Publish", "Puback", "Pubrec", "Pubrel", \
"Pubcomp", "Subscribe", "Suback", "Unsubscribe", "Unsuback", \
"Pingreq", "Pingresp", "Disconnect"]
classNames = [ "reserved", \
"Connects", "Connacks", "Publishes", "Pubacks", "Pubrecs", "Pubrels", \
"Pubcomps", "Subscribes", "Subacks", "Unsubscribes", "Unsubacks", \
"Pingreqs", "Pingresps", "Disconnects"]
def MessageType(byte):
if byte != None:
rc = ord(byte[0]) >> 4
else:
rc = None
return rc
def getPacket(aSocket):
"receive the next packet"
buf = aSocket.recv(1) # get the first byte fixed header
if buf == b"":
return None
if str(aSocket).find("[closed]") != -1:
closed = True
else:
closed = False
if closed:
return None
# now get the remaining length
multiplier = 1
remlength = 0
while 1:
next = aSocket.recv(1)
while len(next) == 0:
next = aSocket.recv(1)
buf += next
digit = ord(buf[-1])
remlength += (digit & 127) * multiplier
if digit & 128 == 0:
break
multiplier *= 128
# receive the remaining length if there is any
rest = ''
if remlength > 0:
while len(rest) < remlength:
rest += aSocket.recv(remlength-len(rest))
assert len(rest) == remlength
return buf + rest
class FixedHeaders:
def __init__(self, aMessageType):
self.MessageType = aMessageType
self.DUP = False
self.QoS = 0
self.RETAIN = False
self.remainingLength = 0
def __eq__(self, fh):
return self.MessageType == fh.MessageType and \
self.DUP == fh.DUP and \
self.QoS == fh.QoS and \
self.RETAIN == fh.RETAIN # and \
# self.remainingLength == fh.remainingLength
def __repr__(self):
"return printable representation of our data"
return classNames[self.MessageType]+'(DUP='+repr(self.DUP)+ \
", QoS="+repr(self.QoS)+", Retain="+repr(self.RETAIN)
def pack(self, length):
"pack data into string buffer ready for transmission down socket"
buffer = bytes([(self.MessageType << 4) | (self.DUP << 3) |\
(self.QoS << 1) | self.RETAIN])
self.remainingLength = length
buffer += self.encode(length)
return buffer
def encode(self, x):
assert 0 <= x <= 268435455
buffer = b''
while 1:
digit = x % 128
x //= 128
if x > 0:
digit |= 0x80
buffer += bytes([digit])
if x == 0:
break
return buffer
def unpack(self, buffer):
"unpack data from string buffer into separate fields"
b0 = ord(buffer[0])
self.MessageType = b0 >> 4
self.DUP = ((b0 >> 3) & 0x01) == 1
self.QoS = (b0 >> 1) & 0x03
self.RETAIN = (b0 & 0x01) == 1
(self.remainingLength, bytes) = self.decode(buffer[1:])
return bytes + 1 # length of fixed header
def decode(self, buffer):
multiplier = 1
value = 0
bytes = 0
while 1:
bytes += 1
digit = ord(buffer[0])
buffer = buffer[1:]
value += (digit & 127) * multiplier
if digit & 128 == 0:
break
multiplier *= 128
return (value, bytes)
def writeInt16(length):
return bytes([length // 256, length % 256])
def readInt16(buf):
return ord(buf[0])*256 + ord(buf[1])
def writeUTF(data):
# data could be a string, or bytes. If string, encode into bytes with utf-8
return writeInt16(len(data)) + (data if type(data) == type(b"") else bytes(data, "utf-8"))
def readUTF(buffer, maxlen):
if maxlen >= 2:
length = readInt16(buffer)
else:
raise MQTTException("Not enough data to read string length")
maxlen -= 2
if length > maxlen:
raise MQTTException("Length delimited string too long")
buf = buffer[2:2+length].decode("utf-8")
logger.info("[MQTT-4.7.3-2] topic names and filters not include null")
zz = buf.find("\x00") # look for null in the UTF string
if zz != -1:
raise MQTTException("[MQTT-1.5.3-2] Null found in UTF data "+buf)
"""for c in range (0xD800, 0xDFFF):
zz = buf.find(chr(c)) # look for D800-DFFF in the UTF string
if zz != -1:
raise MQTTException("[MQTT-1.5.3-1] D800-DFFF found in UTF data "+buf)
"""
if buf.find("\uFEFF") != -1:
logger.info("[MQTT-1.5.3-3] U+FEFF in UTF string")
return buf
def writeBytes(buffer):
return writeInt16(len(buffer)) + buffer
def readBytes(buffer):
length = readInt16(buffer)
return buffer[2:2+length]
class Packets:
def pack(self):
buffer = self.fh.pack(0)
return buffer
def __repr__(self):
return repr(self.fh)
def __eq__(self, packet):
return self.fh == packet.fh if packet else False
class Connects(Packets):
def __init__(self, buffer = None):
self.fh = FixedHeaders(CONNECT)
# variable header
self.ProtocolName = "MQTT"
self.ProtocolVersion = 4
self.CleanSession = True
self.WillFlag = False
self.WillQoS = 0
self.WillRETAIN = 0
self.KeepAliveTimer = 30
self.usernameFlag = False
self.passwordFlag = False
# Payload
self.ClientIdentifier = "" # UTF-8
self.WillTopic = None # UTF-8
self.WillMessage = None # binary
self.username = None # UTF-8
self.password = None # binary
if buffer != None:
self.unpack(buffer)
def pack(self):
connectFlags = bytes([(self.CleanSession << 1) | (self.WillFlag << 2) | \
(self.WillQoS << 3) | (self.WillRETAIN << 5) | \
(self.usernameFlag << 6) | (self.passwordFlag << 7)])
buffer = writeUTF(self.ProtocolName) + bytes([self.ProtocolVersion]) + \
connectFlags + writeInt16(self.KeepAliveTimer)
buffer += writeUTF(self.ClientIdentifier)
if self.WillFlag:
buffer += writeUTF(self.WillTopic)
buffer += writeBytes(self.WillMessage)
if self.usernameFlag:
buffer += writeUTF(self.username)
if self.passwordFlag:
buffer += writeBytes(self.password)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == CONNECT
try:
fhlen = self.fh.unpack(buffer)
packlen = fhlen + self.fh.remainingLength
assert len(buffer) >= packlen, "buffer length %d packet length %d" % (len(buffer), packlen)
curlen = fhlen # points to after header + remaining length
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1] QoS was not 0, was %d" % self.fh.QoS
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
self.ProtocolName = readUTF(buffer[curlen:], packlen - curlen)
curlen += len(self.ProtocolName) + 2
assert self.ProtocolName == "MQTT", "Wrong protocol name %s" % self.ProtocolName
self.ProtocolVersion = ord(buffer[curlen])
curlen += 1
connectFlags = ord(buffer[curlen])
assert (connectFlags & 0x01) == 0, "[MQTT-3.1.2-3] reserved connect flag must be 0"
self.CleanSession = ((connectFlags >> 1) & 0x01) == 1
self.WillFlag = ((connectFlags >> 2) & 0x01) == 1
self.WillQoS = (connectFlags >> 3) & 0x03
self.WillRETAIN = (connectFlags >> 5) & 0x01
self.passwordFlag = ((connectFlags >> 6) & 0x01) == 1
self.usernameFlag = ((connectFlags >> 7) & 0x01) == 1
curlen +=1
if self.WillFlag:
assert self.WillQoS in [0, 1, 2], "[MQTT-3.1.2-14] will qos must not be 3"
else:
assert self.WillQoS == 0, "[MQTT-3.1.2-13] will qos must be 0, if will flag is false"
assert self.WillRETAIN == False, "[MQTT-3.1.2-14] will retain must be false, if will flag is false"
self.KeepAliveTimer = readInt16(buffer[curlen:])
curlen += 2
logger.info("[MQTT-3.1.3-3] Clientid must be present, and first field")
logger.info("[MQTT-3.1.3-4] Clientid must be Unicode, and between 0 and 65535 bytes long")
self.ClientIdentifier = readUTF(buffer[curlen:], packlen - curlen)
curlen += len(self.ClientIdentifier) + 2
if self.WillFlag:
self.WillTopic = readUTF(buffer[curlen:], packlen - curlen)
curlen += len(self.WillTopic) + 2
self.WillMessage = readBytes(buffer[curlen:])
curlen += len(self.WillMessage) + 2
logger.info("[[MQTT-3.1.2-9] will topic and will message fields must be present")
else:
self.WillTopic = self.WillMessage = None
if self.usernameFlag:
assert len(buffer) > curlen+2, "Buffer too short to read username length"
self.username = readUTF(buffer[curlen:], packlen - curlen)
curlen += len(self.username) + 2
logger.info("[MQTT-3.1.2-19] username must be in payload if user name flag is 1")
else:
logger.info("[MQTT-3.1.2-18] username must not be in payload if user name flag is 0")
assert self.passwordFlag == False, "[MQTT-3.1.2-22] password flag must be 0 if username flag is 0"
if self.passwordFlag:
assert len(buffer) > curlen+2, "Buffer too short to read password length"
self.password = readBytes(buffer[curlen:])
curlen += len(self.password) + 2
logger.info("[MQTT-3.1.2-21] password must be in payload if password flag is 0")
else:
logger.info("[MQTT-3.1.2-20] password must not be in payload if password flag is 0")
if self.WillFlag and self.usernameFlag and self.passwordFlag:
logger.info("[MQTT-3.1.3-1] clientid, will topic, will message, username and password all present")
assert curlen == packlen, "Packet is wrong length curlen %d != packlen %d"
except:
logger.exception("[MQTT-3.1.4-1] server must validate connect packet and close connection without connack if it does not conform")
raise
def __repr__(self):
buf = repr(self.fh)+", ProtocolName="+str(self.ProtocolName)+", ProtocolVersion=" +\
repr(self.ProtocolVersion)+", CleanSession="+repr(self.CleanSession) +\
", WillFlag="+repr(self.WillFlag)+", KeepAliveTimer=" +\
repr(self.KeepAliveTimer)+", ClientId="+str(self.ClientIdentifier) +\
", usernameFlag="+repr(self.usernameFlag)+", passwordFlag="+repr(self.passwordFlag)
if self.WillFlag:
buf += ", WillQoS=" + repr(self.WillQoS) +\
", WillRETAIN=" + repr(self.WillRETAIN) +\
", WillTopic='"+ self.WillTopic +\
"', WillMessage='"+str(self.WillMessage)+"'"
if self.username:
buf += ", username="+self.username
if self.password:
buf += ", password="+str(self.password)
return buf+")"
def __eq__(self, packet):
rc = Packets.__eq__(self, packet) and \
self.ProtocolName == packet.ProtocolName and \
self.ProtocolVersion == packet.ProtocolVersion and \
self.CleanSession == packet.CleanSession and \
self.WillFlag == packet.WillFlag and \
self.KeepAliveTimer == packet.KeepAliveTimer and \
self.ClientIdentifier == packet.ClientIdentifier and \
self.WillFlag == packet.WillFlag
if rc and self.WillFlag:
rc = self.WillQoS == packet.WillQoS and \
self.WillRETAIN == packet.WillRETAIN and \
self.WillTopic == packet.WillTopic and \
self.WillMessage == packet.WillMessage
return rc
class Connacks(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, ReturnCode=0):
self.fh = FixedHeaders(CONNACK)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
self.flags = 0
self.returnCode = ReturnCode
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = bytes([self.flags, self.returnCode])
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 4
assert MessageType(buffer) == CONNACK
self.fh.unpack(buffer)
assert self.fh.remainingLength == 2, "Connack packet is wrong length %d" % self.fh.remainingLength
assert ord(buffer[2]) in [0, 1], "Connect Acknowledge Flags"
self.returnCode = ord(buffer[3])
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1]"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
def __repr__(self):
return repr(self.fh)+", Session present="+str((self.flags & 0x01) == 1)+", ReturnCode="+repr(self.returnCode)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.returnCode == packet.returnCode
class Disconnects(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False):
self.fh = FixedHeaders(DISCONNECT)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
if buffer != None:
self.unpack(buffer)
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == DISCONNECT
self.fh.unpack(buffer)
assert self.fh.remainingLength == 0, "Disconnect packet is wrong length %d" % self.fh.remainingLength
logger.info("[MQTT-3.14.1-1] disconnect reserved bits must be 0")
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1]"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
def __repr__(self):
return repr(self.fh)+")"
class Publishes(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, MsgId=0, TopicName="", Payload=b""):
self.fh = FixedHeaders(PUBLISH)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.topicName = TopicName
self.messageIdentifier = MsgId
# payload
self.data = Payload
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeUTF(self.topicName)
if self.fh.QoS != 0:
buffer += writeInt16(self.messageIdentifier)
buffer += self.data
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PUBLISH
fhlen = self.fh.unpack(buffer)
assert self.fh.QoS in [0, 1, 2], "QoS in Publish must be 0, 1, or 2"
packlen = fhlen + self.fh.remainingLength
assert len(buffer) >= packlen
curlen = fhlen
try:
self.topicName = readUTF(buffer[fhlen:], packlen - curlen)
except UnicodeDecodeError:
logger.info("[MQTT-3.3.2-1] topic name in publish must be utf-8")
raise
curlen += len(self.topicName) + 2
if self.fh.QoS != 0:
self.messageIdentifier = readInt16(buffer[curlen:])
logger.info("[MQTT-2.3.1-1] packet indentifier must be in publish if QoS is 1 or 2")
curlen += 2
assert self.messageIdentifier > 0, "[MQTT-2.3.1-1] packet indentifier must be > 0"
else:
logger.info("[MQTT-2.3.1-5] no packet indentifier in publish if QoS is 0")
self.messageIdentifier = 0
self.data = buffer[curlen:fhlen + self.fh.remainingLength]
if self.fh.QoS == 0:
assert self.fh.DUP == False, "[MQTT-2.1.2-4]"
return fhlen + self.fh.remainingLength
def __repr__(self):
rc = repr(self.fh)
if self.fh.QoS != 0:
rc += ", MsgId="+repr(self.messageIdentifier)
rc += ", TopicName="+repr(self.topicName)+", Payload="+repr(self.data)+")"
return rc
def __eq__(self, packet):
rc = Packets.__eq__(self, packet) and \
self.topicName == packet.topicName and \
self.data == packet.data
if rc and self.fh.QoS != 0:
rc = self.messageIdentifier == packet.messageIdentifier
return rc
class Pubacks(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, MsgId=0):
self.fh = FixedHeaders(PUBACK)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PUBACK
fhlen = self.fh.unpack(buffer)
assert self.fh.remainingLength == 2, "Puback packet is wrong length %d" % self.fh.remainingLength
assert len(buffer) >= fhlen + self.fh.remainingLength
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.fh.DUP == False, "[MQTT-2.1.2-1] Puback reserved bits must be 0"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1] Puback reserved bits must be 0"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1] Puback reserved bits must be 0"
return fhlen + 2
def __repr__(self):
return repr(self.fh)+", MsgId "+repr(self.messageIdentifier)
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier
class Pubrecs(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, MsgId=0):
self.fh = FixedHeaders(PUBREC)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PUBREC
fhlen = self.fh.unpack(buffer)
assert self.fh.remainingLength == 2, "Pubrec packet is wrong length %d" % self.fh.remainingLength
assert len(buffer) >= fhlen + self.fh.remainingLength
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.fh.DUP == False, "[MQTT-2.1.2-1] Pubrec reserved bits must be 0"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1] Pubrec reserved bits must be 0"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1] Pubrec reserved bits must be 0"
return fhlen + 2
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier
class Pubrels(Packets):
def __init__(self, buffer=None, DUP=False, QoS=1, Retain=False, MsgId=0):
self.fh = FixedHeaders(PUBREL)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PUBREL
fhlen = self.fh.unpack(buffer)
assert self.fh.remainingLength == 2, "Pubrel packet is wrong length %d" % self.fh.remainingLength
assert len(buffer) >= fhlen + self.fh.remainingLength
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.fh.DUP == False, "[MQTT-2.1.2-1] DUP should be False in PUBREL"
assert self.fh.QoS == 1, "[MQTT-2.1.2-1] QoS should be 1 in PUBREL"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1] RETAIN should be False in PUBREL"
logger.info("[MQTT-3.6.1-1] bits in fixed header for pubrel are ok")
return fhlen + 2
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier
class Pubcomps(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, MsgId=0):
self.fh = FixedHeaders(PUBCOMP)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PUBCOMP
fhlen = self.fh.unpack(buffer)
assert len(buffer) >= fhlen + self.fh.remainingLength
assert self.fh.remainingLength == 2, "Pubcomp packet is wrong length %d" % self.fh.remainingLength
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.fh.DUP == False, "[MQTT-2.1.2-1] DUP should be False in Pubcomp"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1] QoS should be 0 in Pubcomp"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1] Retain should be false in Pubcomp"
return fhlen + 2
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier
class Subscribes(Packets):
def __init__(self, buffer=None, DUP=False, QoS=1, Retain=False, MsgId=0, Data=[]):
self.fh = FixedHeaders(SUBSCRIBE)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
# payload - list of topic, qos pairs
self.data = Data[:]
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
for d in self.data:
buffer += writeUTF(d[0]) + bytes([d[1]])
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == SUBSCRIBE
fhlen = self.fh.unpack(buffer)
assert len(buffer) >= fhlen + self.fh.remainingLength
logger.info("[MQTT-2.3.1-1] packet indentifier must be in subscribe")
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.messageIdentifier > 0, "[MQTT-2.3.1-1] packet indentifier must be > 0"
leftlen = self.fh.remainingLength - 2
self.data = []
while leftlen > 0:
topic = readUTF(buffer[-leftlen:], leftlen)
leftlen -= len(topic) + 2
qos = ord(buffer[-leftlen])
assert qos in [0, 1, 2], "[MQTT-3-8.3-2] reserved bits must be zero"
leftlen -= 1
self.data.append((topic, qos))
assert len(self.data) > 0, "[MQTT-3.8.3-1] at least one topic, qos pair must be in subscribe"
assert leftlen == 0
assert self.fh.DUP == False, "[MQTT-2.1.2-1] DUP must be false in subscribe"
assert self.fh.QoS == 1, "[MQTT-2.1.2-1] QoS must be 1 in subscribe"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1] RETAIN must be false in subscribe"
return fhlen + self.fh.remainingLength
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+\
", Data="+repr(self.data)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier and \
self.data == packet.data
class Subacks(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, MsgId=0, Data=[]):
self.fh = FixedHeaders(SUBACK)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
# payload - list of qos
self.data = Data[:]
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
for d in self.data:
buffer += bytes([d])
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == SUBACK
fhlen = self.fh.unpack(buffer)
assert len(buffer) >= fhlen + self.fh.remainingLength
self.messageIdentifier = readInt16(buffer[fhlen:])
leftlen = self.fh.remainingLength - 2
self.data = []
while leftlen > 0:
qos = buffer[-leftlen]
assert ord(qos) in [0, 1, 2, 0x80], "[MQTT-3.9.3-2] return code in QoS must be 0, 1, 2 or 0x80, was "+ord(qos)
leftlen -= 1
self.data.append(qos)
assert leftlen == 0
assert self.fh.DUP == False, "[MQTT-2.1.2-1] DUP should be false in suback"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1] QoS should be 0 in suback"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1] Retain should be false in suback"
return fhlen + self.fh.remainingLength
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+\
", Data="+repr(self.data)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier and \
self.data == packet.data
class Unsubscribes(Packets):
def __init__(self, buffer=None, DUP=False, QoS=1, Retain=False, MsgId=0, Data=[]):
self.fh = FixedHeaders(UNSUBSCRIBE)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
# payload - list of topics
self.data = Data[:]
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
for d in self.data:
buffer += writeUTF(d)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == UNSUBSCRIBE
fhlen = self.fh.unpack(buffer)
assert len(buffer) >= fhlen + self.fh.remainingLength
logger.info("[MQTT-2.3.1-1] packet indentifier must be in unsubscribe")
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.messageIdentifier > 0, "[MQTT-2.3.1-1] packet indentifier must be > 0"
leftlen = self.fh.remainingLength - 2
self.data = []
while leftlen > 0:
topic = readUTF(buffer[-leftlen:], leftlen)
leftlen -= len(topic) + 2
self.data.append(topic)
assert leftlen == 0
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 1, "[MQTT-2.1.2-1]"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
logger.info("[MQTT-3-10.1-1] fixed header bits are 0,0,1,0")
return fhlen + self.fh.remainingLength
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+\
", Data="+repr(self.data)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier and \
self.data == packet.data
class Unsubacks(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False, MsgId=0):
self.fh = FixedHeaders(UNSUBACK)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
# variable header
self.messageIdentifier = MsgId
if buffer != None:
self.unpack(buffer)
def pack(self):
buffer = writeInt16(self.messageIdentifier)
buffer = self.fh.pack(len(buffer)) + buffer
return buffer
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == UNSUBACK
fhlen = self.fh.unpack(buffer)
assert len(buffer) >= fhlen + self.fh.remainingLength
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.messageIdentifier > 0, "[MQTT-2.3.1-1] packet indentifier must be > 0"
self.messageIdentifier = readInt16(buffer[fhlen:])
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1]"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
return fhlen + self.fh.remainingLength
def __repr__(self):
return repr(self.fh)+", MsgId="+repr(self.messageIdentifier)+")"
def __eq__(self, packet):
return Packets.__eq__(self, packet) and \
self.messageIdentifier == packet.messageIdentifier
class Pingreqs(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False):
self.fh = FixedHeaders(PINGREQ)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
if buffer != None:
self.unpack(buffer)
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PINGREQ
fhlen = self.fh.unpack(buffer)
assert self.fh.remainingLength == 0
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1]"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
return fhlen
def __repr__(self):
return repr(self.fh)+")"
class Pingresps(Packets):
def __init__(self, buffer=None, DUP=False, QoS=0, Retain=False):
self.fh = FixedHeaders(PINGRESP)
self.fh.DUP = DUP
self.fh.QoS = QoS
self.fh.Retain = Retain
if buffer != None:
self.unpack(buffer)
def unpack(self, buffer):
assert len(buffer) >= 2
assert MessageType(buffer) == PINGRESP
fhlen = self.fh.unpack(buffer)
assert self.fh.remainingLength == 0
assert self.fh.DUP == False, "[MQTT-2.1.2-1]"
assert self.fh.QoS == 0, "[MQTT-2.1.2-1]"
assert self.fh.RETAIN == False, "[MQTT-2.1.2-1]"
return fhlen
def __repr__(self):
return repr(self.fh)+")"
classes = [None, Connects, Connacks, Publishes, Pubacks, Pubrecs,
Pubrels, Pubcomps, Subscribes, Subacks, Unsubscribes,
Unsubacks, Pingreqs, Pingresps, Disconnects]
def unpackPacket(buffer):
if MessageType(buffer) != None:
packet = classes[MessageType(buffer)]()
packet.unpack(buffer)
else:
packet = None
return packet
if __name__ == "__main__":
fh = FixedHeaders(CONNECT)
tests = [0, 56, 127, 128, 8888, 16383, 16384, 65535, 2097151, 2097152,
20555666, 268435454, 268435455]
for x in tests:
try:
assert x == fh.decode(fh.encode(x))[0]
except AssertionError:
print("Test failed for x =", x, fh.decode(fh.encode(x)))
try:
fh.decode(fh.encode(268435456))
print("Error")
except AssertionError:
pass
for packet in classes[1:]:
before = str(packet())
after = str(unpackPacket(packet().pack()))
try:
assert before == after
except:
print("before:", before, "\nafter:", after)
print("End")
|
eclipse/paho.mqtt.embedded-c
|
test/MQTTV3112.py
|
Python
|
epl-1.0
| 31,004
|
[
"FEFF"
] |
52d7262dc11bea958baa6324f5a894b3ac919e0eb1e9de78fb84b0d4d414551d
|
#!/usr/bin/env python
# -*- coding:utf-8 mode:python; tab-width:4; indent-tabs-mode:nil; py-indent-offset:4 -*-
##
"""
test_energy_hf_psi4
~~~~~~~~~~~~~~
Test Psi4 implementations for Hartree-Fock energy jobs.
"""
import sys
import geoprep
from adapters import psi4
from tests.common_testcode import runSuite
from tests import energy_hf as eh
class Psi4HFEnergyTestCase(eh.HFEnergyTestCase):
def setUp(self):
self.G = geoprep.Geotool()
self.C = psi4.Psi4()
def runTests():
try:
test_name = sys.argv[1]
except IndexError:
test_name = None
if test_name:
result = runSuite(Psi4HFEnergyTestCase, name=test_name)
else:
result = runSuite(Psi4HFEnergyTestCase)
return result
if __name__ == '__main__':
runTests()
|
mattbernst/polyhartree
|
tests/test_energy_hf_psi4.py
|
Python
|
gpl-3.0
| 812
|
[
"Psi4"
] |
c476a0d65250b5c0cc9251df5ea0f1dd4bcc917501c74270b4840660e672bd27
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Chris Houseknecht <@chouseknecht>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: k8s_raw
short_description: Manage Kubernetes (K8s) objects
version_added: "2.5"
author: "Chris Houseknecht (@chouseknecht)"
description:
- Use the OpenShift Python client to perform CRUD operations on K8s objects.
- Pass the object definition from a source file or inline. See examples for reading
files and using Jinja templates.
- Access to the full range of K8s APIs.
- Authenticate using either a config file, certificates, password or token.
- Supports check mode.
extends_documentation_fragment:
- k8s_state_options
- k8s_name_options
- k8s_resource_options
- k8s_auth_options
requirements:
- "python >= 2.7"
- "openshift >= 0.3"
- "PyYAML >= 3.11"
'''
EXAMPLES = '''
- name: Create a k8s namespace
k8s_raw:
name: testing
api_version: v1
kind: Namespace
state: present
- name: Create a Service object from an inline definition
k8s_raw:
state: present
definition:
apiVersion: v1
kind: Service
metadata:
name: web
namespace: testing
labels:
app: galaxy
service: web
spec:
selector:
app: galaxy
service: web
ports:
- protocol: TCP
targetPort: 8000
name: port-8000-tcp
port: 8000
- name: Create a Service object by reading the definition from a file
k8s_raw:
state: present
src: /testing/service.yml
- name: Get an existing Service object
k8s_raw:
api_version: v1
kind: Service
name: web
namespace: testing
register: web_service
- name: Get a list of all service objects
k8s_raw:
api_version: v1
kind: ServiceList
namespace: testing
register: service_list
- name: Remove an existing Service object
k8s_raw:
state: absent
api_version: v1
kind: Service
namespace: testing
name: web
# Passing the object definition from a file
- name: Create a Deployment by reading the definition from a local file
k8s_raw:
state: present
src: /testing/deployment.yml
- name: Read definition file from the Ansible controller file system
k8s_raw:
state: present
definition: "{{ lookup('file', '/testing/deployment.yml') | from_yaml }}"
- name: Read definition file from the Ansible controller file system after Jinja templating
k8s_raw:
state: present
definition: "{{ lookup('template', '/testing/deployment.yml') | from_yaml }}"
'''
RETURN = '''
result:
description:
- The created, patched, or otherwise present object. Will be empty in the case of a deletion.
returned: success
type: complex
contains:
api_version:
description: The versioned schema of this representation of an object.
returned: success
type: str
kind:
description: Represents the REST resource this object represents.
returned: success
type: str
metadata:
description: Standard object metadata. Includes name, namespace, annotations, labels, etc.
returned: success
type: complex
spec:
description: Specific attributes of the object. Will vary based on the I(api_version) and I(kind).
returned: success
type: complex
status:
description: Current status details for the object.
returned: success
type: complex
items:
description: Returned only when the I(kind) is a List type resource. Contains a set of objects.
returned: when resource is a List
type: list
'''
from ansible.module_utils.k8s.raw import KubernetesRawModule
def main():
KubernetesRawModule().execute_module()
if __name__ == '__main__':
main()
|
haad/ansible
|
lib/ansible/modules/clustering/k8s/k8s_raw.py
|
Python
|
gpl-3.0
| 4,091
|
[
"Galaxy"
] |
55feea5ba0ae91264a8afcecd71b6664464af789ef9c9b49a5e7a505a75ad290
|
'''
Extract virulence genes by genus from the VFDB database at http://www.mgc.ac.cn/VFs/Down/CP_VFs.ffn.gz
'''
import sys, re
from argparse import ArgumentParser
# BioPython modules for reading and writing sequences
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio.Alphabet import IUPAC
def parse_args():
parser = ArgumentParser(description='Extract virulence genes by genus from the VFDB database available at http://www.mgc.ac.cn/VFs/Down/CP_VFs.ffn.gz')
parser.add_argument('--infile',
required = True,
help = 'Raw VFDB sequences file (fasta, e.g. download from http://www.mgc.ac.cn/VFs/Down/CP_VFs.ffn.gz)')
parser.add_argument('--genus',
required = False,
help = 'Genus to extract (if not specified, all genera will be extracted to individual files)')
return parser.parse_args()
def main():
args = parse_args()
db = {} # key = genus, value = list of sequences
for record in SeqIO.parse(open(args.infile, "r"), "fasta"):
full_name = record.description
genus = full_name.split("[")[-1].split()[0]
if (not args.genus) or (genus == args.genus):
if genus in db:
db[genus].append(record)
else:
db[genus] = [record]
# Save all alleles from the same genus to separate fasta files
for genus in db:
records = db[genus] # list of records
SeqIO.write(records, (genus + ".fsa"), "fasta")
if __name__ == '__main__':
sys.exit(main())
|
ingled/srst2
|
database_clustering/VFDBgenus.py
|
Python
|
bsd-3-clause
| 1,482
|
[
"Biopython"
] |
10289bf37af34a0d1314a7775e83b1e14983fdaad7a2e921c01e8c450d287aa8
|
__author__ = 'jiataogu'
import theano
theano.config.exception_verbosity = 'high'
import logging
import copy
import emolga.basic.objectives as objectives
import emolga.basic.optimizers as optimizers
from emolga.layers.recurrent import *
from emolga.layers.ntm_minibatch import Controller, BernoulliController
from emolga.layers.embeddings import *
from core import Model
logger = logging.getLogger(__name__)
RNN = JZS3 # change it here for other RNN models.
class RecurrentBase(Model):
"""
The recurrent base for SimpleRNN, GRU, JZS3, LSTM and Neural Turing Machines
"""
def __init__(self, config, model='RNN', prefix='enc', use_contxt=True, name=None):
super(RecurrentBase, self).__init__()
self.config = config
self.model = model
self.prefix = prefix
self.use_contxt = use_contxt
if not name:
self.name = self.prefix
else:
self.name = name
if self.config['binary']:
NTM = BernoulliController
else:
NTM = Controller
def _build_RNN():
logger.info('BUILD::>>>>>>>> Gated Recurrent Units.')
core = RNN(
self.config['{}_embedd_dim'.format(self.prefix)],
self.config['{}_hidden_dim'.format(self.prefix)],
self.config['{}_contxt_dim'.format(self.prefix)] if use_contxt else None,
name='{}_rnn'.format(self.prefix)
)
if self.config['bias_code']:
init = Dense(
self.config['{}_contxt_dim'.format(self.prefix)],
self.config['{}_hidden_dim'.format(self.prefix)],
activation='tanh',
name='{}_init'.format(self.prefix)
)
else:
init = Zero()
return core, [init]
def _build_NTM():
"""
Build a simple Neural Turing Machine.
We use a feedforward controller here.
"""
logger.info('BUILD::>>>>>>>> Controller Units.')
core = NTM(
self.config['{}_embedd_dim'.format(self.prefix)],
self.config['{}_memory_dim'.format(self.prefix)],
self.config['{}_memory_wdth'.format(self.prefix)],
self.config['{}_hidden_dim'.format(self.prefix)],
self.config['{}_shift_width'.format(self.prefix)],
name="{}_ntm".format(self.prefix),
readonly=self.config['{}_read-only'.format(self.prefix)],
curr_input=self.config['{}_curr_input'.format(self.prefix)],
recurrence=self.config['{}_recurrence'.format(self.prefix)]
)
if self.config['bias_code']:
raise NotImplementedError
else:
init_w = T.nnet.softmax(initializations.get('glorot_uniform')((1, self.config['{}_memory_dim'.format(self.prefix)])))
init_r = T.nnet.softmax(initializations.get('glorot_uniform')((1, self.config['{}_memory_dim'.format(self.prefix)])))
init_c = initializations.get('glorot_uniform')((1, self.config['{}_hidden_dim'.format(self.prefix)]))
return core, [init_w, init_r, init_c]
if model == 'RNN':
self.core, self.init = _build_RNN()
elif model == 'NTM':
self.core, self.init = _build_NTM()
else:
raise NotImplementedError
self._add(self.core)
if model == 'RNN':
for init in self.init:
self._add(init)
self.set_name(name)
# *****************************************************************
# For Theano inputs.
def get_context(self, context):
# get context if "use_context" is True
info = dict()
# if self.use_contxt:
if self.model == 'RNN':
# context is a matrix (nb_samples, context_dim)
info['C'] = context
info['init_h'] = self.init[0](context)
elif self.model == 'NTM':
# context is a tensor (nb_samples, memory_dim, memory_width)
info['M'] = context
if self.config['bias_code']:
raise NotImplementedError
else:
info['init_ww'] = T.repeat(self.init[0], context.shape[0], axis=0)
info['init_wr'] = T.repeat(self.init[1], context.shape[0], axis=0)
info['init_c'] = T.repeat(self.init[2], context.shape[0], axis=0)
else:
raise NotImplementedError
return info
def loop(self, X, X_mask, info=None, return_sequence=False, return_full=False):
if self.model == 'NTM':
info['return_full'] = return_full
Z = self.core(X, X_mask, return_sequence=return_sequence, **info)
self._monitoring()
return Z
def step(self, X, prev_info):
# run one step of the Recurrence
if self.model == 'RNN':
out = self.core(X, one_step=True, **prev_info)
next_state = out
next_info = {'init_h': out, 'C': prev_info['C']}
elif self.model == 'NTM':
out = self.core(X, one_step=True, **prev_info)
next_state = out[3]
next_info = dict()
next_info['M'] = out[0]
next_info['init_ww'] = out[1]
next_info['init_wr'] = out[2]
next_info['init_c'] = out[3]
else:
raise NotImplementedError
return next_state, next_info
def build_(self):
# build a sampler in theano function for sampling.
if self.model == 'RNN':
context = T.matrix() # theano variable.
logger.info('compile the function: get_init_state')
info = self.get_context(context)
self.get_init_state \
= theano.function([context], info['init_h'],
name='get_init_state')
# **************************************************** #
context = T.matrix() # theano variable.
prev_X = T.matrix('prev_X', dtype='float32')
prev_stat = T.matrix('prev_state', dtype='float32')
prev_info = dict()
prev_info['C'] = context
prev_info['init_h'] = prev_stat
next_stat, next_info \
= self.step(prev_X, prev_info)
logger.info('compile the function: sample_next_state')
inputs = [prev_X, prev_stat, context]
outputs = next_stat
self.sample_next_state = theano.function(inputs, outputs, name='sample_next_state')
elif self.model == 'NTM':
memory = T.tensor3() # theano variable
logger.info('compile the funtion: get_init_state')
info = self.get_context(memory)
self.get_init_wr = theano.function([memory], info['init_wr'], name='get_init_wr')
self.get_init_ww = theano.function([memory], info['init_ww'], name='get_init_ww')
self.get_init_c = theano.function([memory], info['init_c'], name='get_init_c')
# **************************************************** #
memory = T.tensor3() # theano variable
prev_X = T.matrix('prev_X', dtype='float32')
prev_ww = T.matrix('prev_ww', dtype='float32')
prev_wr = T.matrix('prev_wr', dtype='float32')
prev_stat = T.matrix('prev_stat', dtype='float32')
prev_info = {'M': memory, 'init_ww': prev_ww, 'init_wr': prev_wr, 'init_c': prev_stat}
logger.info('compile the function: sample_next_0123')
next_stat, next_info = self.step(prev_X, prev_info)
inputs = [prev_X, prev_ww, prev_wr, memory, prev_stat]
outputs = [next_info['M'], next_info['init_ww'], next_info['init_wr'], next_stat]
self.sample_next_state = theano.function(inputs, outputs, name='sample_next_state')
else:
raise NotImplementedError
logger.info('done.')
# *****************************************************************
# For Numpy inputs.
def get_init(self, context):
info = dict()
if self.model == 'RNN':
info['init_h'] = self.get_init_state(context)
info['C'] = context
elif self.model == 'NTM':
if hasattr(self, 'get_init_ww'):
info['init_ww'] = self.get_init_ww(context)
if hasattr(self, 'get_init_wr'):
info['init_wr'] = self.get_init_wr(context)
if hasattr(self, 'get_init_c'):
info['init_c'] = self.get_init_c(context)
info['M'] = context
else:
raise NotImplementedError
return info
def get_next_state(self, prev_X, prev_info):
if self.model == 'RNN':
next_state = self.sample_next_state(
prev_X, prev_info['init_h'], prev_info['C'])
next_info = dict()
next_info['C'] = prev_info['C']
next_info['init_h'] = next_state
elif self.model == 'NTM':
next_info = dict()
assert 'init_ww' in prev_info
assert 'init_wr' in prev_info
assert 'init_c' in prev_info
assert 'M' in prev_info
next_info['M'], next_info['init_ww'], \
next_info['init_wr'], next_info['init_c'] = self.sample_next_state(
prev_X, prev_info['init_ww'], prev_info['init_wr'],
prev_info['M'], prev_info['init_c'])
next_state = next_info['init_c']
else:
raise NotImplementedError
return next_state, next_info
class Encoder(Model):
"""
Recurrent Neural Network/Neural Turing Machine-based Encoder
It is used to compute the context vector.
"""
def __init__(self,
config, rng, prefix='enc',
mode='RNN', embed=None):
"""
mode = RNN: use a RNN Encoder
mode = NTM: use a NTM Encoder
"""
super(Encoder, self).__init__()
self.config = config
self.rng = rng
self.prefix = prefix
self.mode = mode
self.name = prefix
"""
Create all elements of the Encoder's Computational graph
"""
# create Embedding layers
logger.info("{}_create embedding layers.".format(self.prefix))
if embed:
self.Embed = embed
else:
self.Embed = Embedding(
self.config['enc_voc_size'],
self.config['enc_embedd_dim'],
name="{}_embed".format(self.prefix))
self._add(self.Embed)
# create Recurrent Base
logger.info("{}_create Recurrent layers.".format(self.prefix))
if self.mode == 'RNN' and self.config['bidirectional']:
self.Forward = RecurrentBase(self.config, model=self.mode, name='forward',
prefix='enc', use_contxt=self.config['enc_use_contxt'])
self.Bakward = RecurrentBase(self.config, model=self.mode, name='backward',
prefix='enc', use_contxt=self.config['enc_use_contxt'])
self._add(self.Forward)
self._add(self.Bakward)
else:
self.Recurrence = RecurrentBase(self.config, model=self.mode, name='encoder',
prefix='enc', use_contxt=self.config['enc_use_contxt'])
self._add(self.Recurrence)
# there is no readout layers for encoder.
def build_encoder(self, source, context=None):
"""
Build the Encoder Computational Graph
"""
if self.mode == 'RNN':
# we use a Recurrent Neural Network Encoder (GRU)
if not self.config['bidirectional']:
X, X_mask = self.Embed(source, True)
info = self.Recurrence.get_context(context)
X_out = self.Recurrence.loop(X, X_mask, info, return_sequence=False)
else:
source_back = source[:, ::-1]
X1, X1_mask = self.Embed(source, True)
X2, X2_mask = self.Embed(source_back, True)
info = self.Forward.get_context(context)
X_out1 = self.Forward.loop(X1, X1_mask, info, return_sequence=False)
info = self.Bakward.get_context(context)
X_out2 = self.Bakward.loop(X2, X2_mask, info, return_sequence=False)
# X_out = T.concatenate([X_out1, X_out2], axis=1)
X_out = 0.5 * X_out1 + 0.5 * X_out2
elif self.mode == 'NTM':
if not self.config['bidirectional']:
X, X_mask = self.Embed(source, True)
else:
source_back = source[:, ::-1]
X1, X1_mask = self.Embed(source, True)
X2, X2_mask = self.Embed(source_back, True)
X = T.concatenate([X1, X2], axis=1)
X_mask = T.concatenate([X1_mask, X2_mask], axis=1)
info = self.Recurrence.get_context(context)
# X_out here is the extracted memorybook. which can be used as a the initial memory of NTM Decoder.
X_out = self.Recurrence.loop(X, X_mask, info, return_sequence=False, return_full=True)[0]
else:
raise NotImplementedError
self._monitoring()
return X_out
class Decoder(Model):
"""
Recurrent Neural Network-based Decoder.
It is used for:
(1) Evaluation: compute the probability P(Y|X)
(2) Prediction: sample the best result based on P(Y|X)
(3) Beam-search
(4) Scheduled Sampling (how to implement it?)
"""
def __init__(self,
config, rng, prefix='dec',
mode='RNN', embed=None):
"""
mode = RNN: use a RNN Decoder
mode = NTM: use a NTM Decoder (Neural Turing Machine)
"""
super(Decoder, self).__init__()
self.config = config
self.rng = rng
self.prefix = prefix
self.name = prefix
self.mode = mode
"""
Create all elements of the Decoder's computational graph.
"""
# create Embedding layers
logger.info("{}_create embedding layers.".format(self.prefix))
if embed:
self.Embed = embed
else:
self.Embed = Embedding(
self.config['dec_voc_size'],
self.config['dec_embedd_dim'],
name="{}_embed".format(self.prefix))
self._add(self.Embed)
# create Recurrent Base.
logger.info("{}_create Recurrent layers.".format(self.prefix))
self.Recurrence = RecurrentBase(self.config, model=self.mode, name='decoder',
prefix='dec', use_contxt=self.config['dec_use_contxt'])
# create readout layers
logger.info("_create Readout layers")
# 1. hidden layers readout.
self.hidden_readout = Dense(
self.config['dec_hidden_dim'],
self.config['output_dim']
if self.config['deep_out']
else self.config['dec_voc_size'],
activation='linear',
name="{}_hidden_readout".format(self.prefix)
)
# 2. previous word readout
self.prev_word_readout = None
if self.config['bigram_predict']:
self.prev_word_readout = Dense(
self.config['dec_embedd_dim'],
self.config['output_dim']
if self.config['deep_out']
else self.config['dec_voc_size'],
activation='linear',
name="{}_prev_word_readout".format(self.prefix),
learn_bias=False
)
# 3. context readout
self.context_readout = None
if self.config['context_predict']:
self.context_readout = Dense(
self.config['dec_contxt_dim'],
self.config['output_dim']
if self.config['deep_out']
else self.config['dec_voc_size'],
activation='linear',
name="{}_context_readout".format(self.prefix),
learn_bias=False
)
# option: deep output (maxout)
if self.config['deep_out']:
self.activ = Activation(config['deep_out_activ'])
# self.dropout = Dropout(rng=self.rng, p=config['dropout'])
self.output_nonlinear = [self.activ] # , self.dropout]
self.output = Dense(
self.config['output_dim'] / 2
if config['deep_out_activ'] == 'maxout2'
else self.config['output_dim'],
self.config['dec_voc_size'],
activation='softmax',
name="{}_output".format(self.prefix),
learn_bias=False
)
else:
self.output_nonlinear = []
self.output = Activation('softmax')
# registration:
self._add(self.Recurrence)
self._add(self.hidden_readout)
self._add(self.context_readout)
self._add(self.prev_word_readout)
self._add(self.output)
if self.config['deep_out']:
self._add(self.activ)
# self._add(self.dropout)
logger.info("create decoder ok.")
@staticmethod
def _grab_prob(probs, X):
assert probs.ndim == 3
batch_size = probs.shape[0]
max_len = probs.shape[1]
vocab_size = probs.shape[2]
probs = probs.reshape((batch_size * max_len, vocab_size))
return probs[T.arange(batch_size * max_len), X.flatten(1)].reshape(X.shape) # advanced indexing
"""
Build the decoder for evaluation
"""
def prepare_xy(self, target):
# Word embedding
Y, Y_mask = self.Embed(target, True) # (nb_samples, max_len, embedding_dim)
if self.config['use_input']:
X = T.concatenate([alloc_zeros_matrix(Y.shape[0], 1, Y.shape[2]), Y[:, :-1, :]], axis=1)
else:
X = 0 * Y
# option ## drop words.
X_mask = T.concatenate([T.ones((Y.shape[0], 1)), Y_mask[:, :-1]], axis=1)
Count = T.cast(T.sum(X_mask, axis=1), dtype=theano.config.floatX)
return X, X_mask, Y, Y_mask, Count
def build_decoder(self, target, context=None, return_count=False):
"""
Build the Decoder Computational Graph
"""
X, X_mask, Y, Y_mask, Count = self.prepare_xy(target)
info = self.Recurrence.get_context(context)
X_out = self.Recurrence.loop(X, X_mask, info=info, return_sequence=True)
# Readout
readout = self.hidden_readout(X_out)
if self.config['context_predict']:
# warning: only supports RNN, cannot supports Memory
readout += self.context_readout(context).dimshuffle(0, 'x', 1) \
if self.config['bigram_predict']:
readout += self.prev_word_readout(X)
for l in self.output_nonlinear:
readout = l(readout)
prob_dist = self.output(readout) # (nb_samples, max_len, vocab_size)
# log_old = T.sum(T.log(self._grab_prob(prob_dist, target)), axis=1)
log_prob = T.sum(T.log(self._grab_prob(prob_dist, target)) * X_mask, axis=1)
log_ppl = log_prob / Count
self._monitoring()
if return_count:
return log_prob, Count
else:
return log_prob, log_ppl
"""
Sampling Functions.
"""
def _step_embed(self, prev_word):
# word embedding (note that for the first word, embedding should be all zero)
if self.config['use_input']:
X = T.switch(
prev_word[:, None] < 0,
alloc_zeros_matrix(prev_word.shape[0], self.config['dec_embedd_dim']),
self.Embed(prev_word)
)
else:
X = alloc_zeros_matrix(prev_word.shape[0], self.config['dec_embedd_dim'])
return X
def _step_sample(self, X, next_stat, context):
# compute the readout probability distribution and sample it
# here the readout is a matrix, different from the learner.
readout = self.hidden_readout(next_stat)
if context.ndim == 2 and self.config['context_predict']:
# warning: only supports RNN, cannot supports Memory
readout += self.context_readout(context)
if self.config['bigram_predict']:
readout += self.prev_word_readout(X)
for l in self.output_nonlinear:
readout = l(readout)
next_prob = self.output(readout)
next_sample = self.rng.multinomial(pvals=next_prob).argmax(1)
return next_prob, next_sample
"""
Build the sampler for sampling/greedy search/beam search
"""
def build_sampler(self):
"""
Build a sampler which only steps once.
Typically it only works for one word a time?
"""
prev_word = T.vector('prev_word', dtype='int64')
prev_X = self._step_embed(prev_word)
self.prev_embed = theano.function([prev_word], prev_X)
self.Recurrence.build_()
prev_X = T.matrix('prev_X', dtype='float32')
next_stat = T.matrix('next_state', dtype='float32')
logger.info('compile the function: sample_next')
if self.config['mode'] == 'RNN':
context = T.matrix('context')
else:
context = T.tensor3('memory')
next_prob, next_sample = self._step_sample(prev_X, next_stat, context)
self.sample_next = theano.function([prev_X, next_stat, context],
[next_prob, next_sample],
name='sample_next',
on_unused_input='warn')
logger.info('done')
"""
Generate samples, either with stochastic sampling or beam-search!
"""
def get_sample(self, context, k=1, maxlen=30, stochastic=True, argmax=False):
# beam size
if k > 1:
assert not stochastic, 'Beam search does not support stochastic sampling!!'
# prepare for searching
sample = []
score = []
if stochastic:
score = 0
live_k = 1
dead_k = 0
hyp_samples = [[]] * live_k
hyp_scores = np.zeros(live_k).astype(theano.config.floatX)
hyp_states = []
hyp_infos = []
# get initial state of decoder Recurrence
next_info = self.Recurrence.get_init(context)
# print 'sample with memory:\t', next_info['M'][0]
# next_state = next_info['init_h']
next_word = -1 * np.ones((1,)).astype('int64') # indicator for the first target word (bos target)
print '<0e~k>'
# Start searching!
for ii in xrange(maxlen):
# print next_word
ctx = np.tile(context, [live_k, 1])
next_embedding = self.prev_embed(next_word)
next_state, next_info = self.Recurrence.get_next_state(next_embedding, next_info)
next_prob, next_word = self.sample_next(next_embedding, next_state, ctx) # wtf.
if stochastic:
# using stochastic sampling (or greedy sampling.)
if argmax:
nw = next_prob[0].argmax()
next_word[0] = nw
else:
nw = next_word[0]
sample.append(nw)
score += next_prob[0, nw]
if nw == 0: # sample reached the end
break
else:
# using beam-search
# we can only computed in a flatten way!
# Recently beam-search does not support NTM !!
cand_scores = hyp_scores[:, None] - np.log(next_prob)
cand_flat = cand_scores.flatten()
ranks_flat = cand_flat.argsort()[:(k - dead_k)]
# fetch the best results.
voc_size = next_prob.shape[1]
trans_index = ranks_flat / voc_size
word_index = ranks_flat % voc_size
costs = cand_flat[ranks_flat]
# get the new hyp samples
new_hyp_samples = []
new_hyp_scores = np.zeros(k - dead_k).astype(theano.config.floatX)
new_hyp_states = []
new_hyp_infos = {w: [] for w in next_info}
for idx, [ti, wi] in enumerate(zip(trans_index, word_index)):
new_hyp_samples.append(hyp_samples[ti] + [wi])
new_hyp_scores[idx] = copy.copy(costs[idx])
new_hyp_states.append(copy.copy(next_state[ti]))
for w in next_info:
new_hyp_infos[w].append(copy.copy(next_info[w][ti]))
# check the finished samples
new_live_k = 0
hyp_samples = []
hyp_scores = []
hyp_states = []
hyp_infos = {w: [] for w in next_info}
for idx in xrange(len(new_hyp_samples)):
if new_hyp_states[idx][-1] == 0:
sample.append(new_hyp_samples[idx])
score.append(new_hyp_scores[idx])
dead_k += 1
else:
new_live_k += 1
hyp_samples.append(new_hyp_samples[idx])
hyp_scores.append(new_hyp_scores[idx])
hyp_states.append(new_hyp_states[idx])
for w in next_info:
hyp_infos[w].append(copy.copy(new_hyp_infos[w][ti]))
hyp_scores = np.array(hyp_scores)
live_k = new_live_k
if new_live_k < 1:
break
if dead_k >= k:
break
next_word = np.array([w[-1] for w in hyp_samples])
next_state = np.array(hyp_states)
for w in hyp_infos:
next_info[w] = np.array(hyp_infos[w])
pass
pass
# end.
if not stochastic:
# dump every remaining one
if live_k > 0:
for idx in xrange(live_k):
sample.append(hyp_samples[idx])
score.append(hyp_scores[idx])
return sample, score
class RNNLM(Model):
"""
RNN-LM, with context vector = 0.
It is very similar with the implementation of VAE.
"""
def __init__(self,
config, n_rng, rng,
mode='Evaluation'):
super(RNNLM, self).__init__()
self.config = config
self.n_rng = n_rng # numpy random stream
self.rng = rng # Theano random stream
self.mode = mode
self.name = 'rnnlm'
def build_(self):
logger.info("build the RNN/NTM-decoder")
self.decoder = Decoder(self.config, self.rng, prefix='dec', mode=self.mode)
# registration:
self._add(self.decoder)
# objectives and optimizers
self.optimizer = optimizers.get('adadelta')
# saved the initial memories
self.memory = initializations.get('glorot_uniform')(
(self.config['dec_memory_dim'], self.config['dec_memory_wdth']))
logger.info("create the RECURRENT language model. ok")
def compile_(self, mode='train', contrastive=False):
# compile the computational graph.
# INFO: the parameters.
# mode: 'train'/ 'display'/ 'policy' / 'all'
ps = 'params: {\n'
for p in self.params:
ps += '{0}: {1}\n'.format(p.name, p.eval().shape)
ps += '}.'
logger.info(ps)
param_num = np.sum([np.prod(p.shape.eval()) for p in self.params])
logger.info("total number of the parameters of the model: {}".format(param_num))
if mode == 'train' or mode == 'all':
if not contrastive:
self.compile_train()
else:
self.compile_train_CE()
if mode == 'display' or mode == 'all':
self.compile_sample()
if mode == 'inference' or mode == 'all':
self.compile_inference()
def compile_train(self):
# questions (theano variables)
inputs = T.imatrix() # padded input word sequence (for training)
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['dec_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
# decoding.
target = inputs
logPxz, logPPL = self.decoder.build_decoder(target, context)
# reconstruction loss
loss_rec = T.mean(-logPxz)
loss_ppl = T.exp(T.mean(-logPPL))
L1 = T.sum([T.sum(abs(w)) for w in self.params])
loss = loss_rec
updates = self.optimizer.get_updates(self.params, loss)
logger.info("compiling the compuational graph ::training function::")
train_inputs = [inputs]
self.train_ = theano.function(train_inputs,
[loss_rec, loss_ppl],
updates=updates,
name='train_fun')
logger.info("pre-training functions compile done.")
# add monitoring:
self.monitor['context'] = context
self._monitoring()
# compiling monitoring
self.compile_monitoring(train_inputs)
def compile_train_CE(self):
pass
def compile_sample(self):
# context vectors (as)
self.decoder.build_sampler()
logger.info("display functions compile done.")
def compile_inference(self):
pass
def default_context(self):
if self.config['mode'] == 'RNN':
return np.zeros(shape=(1, self.config['dec_contxt_dim']), dtype=theano.config.floatX)
elif self.config['mode'] == 'NTM':
memory = self.memory.get_value()
memory = memory.reshape((1, memory.shape[0], memory.shape[1]))
return memory
def generate_(self, context=None, mode='display', max_len=None):
"""
:param action: action vector to guide the question.
If None, use a Gaussian to simulate the action.
:return: question sentence in natural language.
"""
# assert self.config['sample_stoch'], 'RNNLM sampling must be stochastic'
# assert not self.config['sample_argmax'], 'RNNLM sampling cannot use argmax'
if context is None:
context = self.default_context()
args = dict(k=self.config['sample_beam'],
maxlen=self.config['max_len'] if not max_len else max_len,
stochastic=self.config['sample_stoch'] if mode == 'display' else None,
argmax=self.config['sample_argmax'] if mode == 'display' else None)
sample, score = self.decoder.get_sample(context, **args)
if not args['stochastic']:
score = score / np.array([len(s) for s in sample])
sample = sample[score.argmin()]
score = score.min()
else:
score /= float(len(sample))
return sample, np.exp(score)
class Helmholtz(RNNLM):
"""
Helmholtz Machine as an probabilistic version AutoEncoder
It is very similar with Variational Auto-Encoder
We implement the Helmholtz RNN as well as Helmholtz Turing Machine here.
Reference:
Reweighted Wake-Sleep
http://arxiv.org/abs/1406.2751
"""
def __init__(self,
config, n_rng, rng,
mode='RNN'):
super(RNNLM, self).__init__()
self.config = config
self.n_rng = n_rng # numpy random stream
self.rng = rng # Theano random stream
self.mode = mode
self.name = 'helmholtz'
def build_(self):
logger.info("build the Helmholtz auto-encoder")
if self.mode == 'NTM':
assert self.config['enc_memory_dim'] == self.config['dec_memory_dim']
assert self.config['enc_memory_wdth'] == self.config['dec_memory_wdth']
self.encoder = Encoder(self.config, self.rng, prefix='enc', mode=self.mode)
if self.config['shared_embed']:
self.decoder = Decoder(self.config, self.rng, prefix='dec',
embed=self.encoder.Embed, mode=self.mode)
else:
self.decoder = Decoder(self.config, self.rng, prefix='dec', mode=self.mode)
# registration
self._add(self.encoder)
self._add(self.decoder)
# The main difference between VAE and HM is that we can use
# a more flexible prior instead of Gaussian here.
# for example, we use a sigmoid prior here.
# prior distribution is a bias layer
if self.mode == 'RNN':
# here we first forcus on Helmholtz Turing Machine
# Thus the RNN version will be copied from Dial-DRL projects.
raise NotImplementedError
elif self.mode == 'NTM':
self.Prior = MemoryLinear(
self.config['enc_memory_dim'],
self.config['enc_memory_wdth'],
activation='sigmoid',
name='prior_proj',
has_input=False
)
self.Post = MemoryLinear(
self.config['enc_memory_dim'],
self.config['enc_memory_wdth'],
activation='sigmoid',
name='post_proj',
has_input=True
)
self.Trans = MemoryLinear(
self.config['enc_memory_dim'],
self.config['enc_memory_wdth'],
activation='linear',
name='trans_proj',
has_input=True
)
# registration
self._add(self.Prior)
self._add(self.Post)
self._add(self.Trans)
else:
raise NotImplementedError
# objectives and optimizers
self.optimizer = optimizers.get(self.config['optimizer'])
# saved the initial memories
self.memory = initializations.get('glorot_uniform')(
(self.config['dec_memory_dim'], self.config['dec_memory_wdth']))
logger.info("create Helmholtz Machine. ok")
def compile_train(self):
# questions (theano variables)
inputs = T.imatrix() # padded input word sequence (for training)
batch_size = inputs.shape[0]
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
# encoding
memorybook = self.encoder.build_encoder(inputs, context)
# get Q(a|y) = sigmoid
q_dis = self.Post(memorybook)
# repeats
L = self.config['repeats']
target = T.repeat(inputs[:, None, :],
L,
axis=1).reshape((inputs.shape[0] * L, inputs.shape[1]))
q_dis = T.repeat(q_dis[:, None, :, :],
L,
axis=1).reshape((q_dis.shape[0] * L, q_dis.shape[1], q_dis.shape[2]))
# sample actions
u = self.rng.uniform(q_dis.shape)
action = T.cast(u <= q_dis, dtype=theano.config.floatX)
# compute the exact probability for actions
logQax = action * T.log(q_dis) + (1 - action) * T.log(1 - q_dis)
logQax = logQax.sum(axis=-1).sum(axis=-1)
# decoding.
memorybook2 = self.Trans(action)
logPxa, count = self.decoder.build_decoder(target, memorybook2, return_count=True)
# prior.
p_dis = self.Prior()
logPa = action * T.log(p_dis) + (1 - action) * T.log(1 - p_dis)
logPa = logPa.sum(axis=-1).sum(axis=-1)
"""
Compute the weights
"""
# reshape
logQax = logQax.reshape((batch_size, L))
logPa = logPa.reshape((batch_size, L))
logPxa = logPxa.reshape((batch_size, L))
logPx_a = logPa + logPxa
# normalizing the weights
log_wk = logPx_a - logQax
log_bpk = logPa - logQax
log_w_sum = logSumExp(log_wk, axis=1)
log_bp_sum = logSumExp(log_bpk, axis=1)
log_wnk = log_wk - log_w_sum
log_bpnk = log_bpk - log_bp_sum
# unbiased log-likelihood estimator
logPx = T.mean(log_w_sum - T.log(L))
perplexity = T.exp(-T.mean((log_w_sum - T.log(L)) / count))
"""
Compute the Loss function
"""
# loss = weights * log [p(a)p(x|a)/q(a|x)]
weights = T.exp(log_wnk)
bp = T.exp(log_bpnk)
bq = 1. / L
ess = T.mean(1 / T.sum(weights ** 2, axis=1))
factor = self.config['factor']
if self.config['variant_control']:
lossQ = -T.mean(T.sum(logQax * (weights - bq), axis=1)) # log q(a|x)
lossPa = -T.mean(T.sum(logPa * (weights - bp), axis=1)) # log p(a)
lossPxa = -T.mean(T.sum(logPxa * weights, axis=1)) # log p(x|a)
lossP = lossPxa + lossPa
updates = self.optimizer.get_updates(self.params, [lossP + factor * lossQ, weights, bp])
else:
lossQ = -T.mean(T.sum(logQax * weights, axis=1)) # log q(a|x)
lossPa = -T.mean(T.sum(logPa * weights, axis=1)) # log p(a)
lossPxa = -T.mean(T.sum(logPxa * weights, axis=1)) # log p(x|a)
lossP = lossPxa + lossPa
updates = self.optimizer.get_updates(self.params, [lossP + factor * lossQ, weights])
logger.info("compiling the compuational graph ::training function::")
train_inputs = [inputs]
self.train_ = theano.function(train_inputs,
[lossPa, lossPxa, lossQ, perplexity, ess],
updates=updates,
name='train_fun')
logger.info("pre-training functions compile done.")
def compile_sample(self):
# # for Typical Auto-encoder, only conditional generation is useful.
# inputs = T.imatrix() # padded input word sequence (for training)
# if self.config['mode'] == 'RNN':
# context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
# elif self.config['mode'] == 'NTM':
# context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
# else:
# raise NotImplementedError
# pass
# sample the memorybook
p_dis = self.Prior()
l = T.iscalar()
u = self.rng.uniform((l, p_dis.shape[-2], p_dis.shape[-1]))
binarybook = T.cast(u <= p_dis, dtype=theano.config.floatX)
memorybook = self.Trans(binarybook)
self.take = theano.function([l], [binarybook, memorybook], name='take_action')
# compile the sampler.
self.decoder.build_sampler()
logger.info('sampler function compile done.')
def compile_inference(self):
"""
build the hidden action prediction.
"""
inputs = T.imatrix() # padded input word sequence (for training)
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
# encoding
memorybook = self.encoder.build_encoder(inputs, context)
# get Q(a|y) = sigmoid(.|Posterior * encoded)
q_dis = self.Post(memorybook)
p_dis = self.Prior()
self.inference_ = theano.function([inputs], [memorybook, q_dis, p_dis])
logger.info("inference function compile done.")
def default_context(self):
return self.take(1)[-1]
class BinaryHelmholtz(RNNLM):
"""
Helmholtz Machine as an probabilistic version AutoEncoder
It is very similar with Variational Auto-Encoder
We implement the Helmholtz RNN as well as Helmholtz Turing Machine here.
Reference:
Reweighted Wake-Sleep
http://arxiv.org/abs/1406.2751
"""
def __init__(self,
config, n_rng, rng,
mode='RNN'):
super(RNNLM, self).__init__()
self.config = config
self.n_rng = n_rng # numpy random stream
self.rng = rng # Theano random stream
self.mode = mode
self.name = 'helmholtz'
def build_(self):
logger.info("build the Binary-Helmholtz auto-encoder")
if self.mode == 'NTM':
assert self.config['enc_memory_dim'] == self.config['dec_memory_dim']
assert self.config['enc_memory_wdth'] == self.config['dec_memory_wdth']
self.encoder = Encoder(self.config, self.rng, prefix='enc', mode=self.mode)
if self.config['shared_embed']:
self.decoder = Decoder(self.config, self.rng, prefix='dec',
embed=self.encoder.Embed, mode=self.mode)
else:
self.decoder = Decoder(self.config, self.rng, prefix='dec', mode=self.mode)
# registration
self._add(self.encoder)
self._add(self.decoder)
# The main difference between VAE and HM is that we can use
# a more flexible prior instead of Gaussian here.
# for example, we use a sigmoid prior here.
# prior distribution is a bias layer
if self.mode == 'RNN':
# here we first forcus on Helmholtz Turing Machine
# Thus the RNN version will be copied from Dial-DRL projects.
raise NotImplementedError
elif self.mode == 'NTM':
self.Prior = MemoryLinear(
self.config['enc_memory_dim'],
self.config['enc_memory_wdth'],
activation='sigmoid',
name='prior_proj',
has_input=False
)
# registration
self._add(self.Prior)
else:
raise NotImplementedError
# objectives and optimizers
self.optimizer = optimizers.get(self.config['optimizer'])
# saved the initial memories
self.memory = T.nnet.sigmoid(initializations.get('glorot_uniform')(
(self.config['dec_memory_dim'], self.config['dec_memory_wdth'])))
logger.info("create Helmholtz Machine. ok")
def compile_train(self):
# questions (theano variables)
inputs = T.imatrix() # padded input word sequence (for training)
batch_size = inputs.shape[0]
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
# encoding
memorybook = self.encoder.build_encoder(inputs, context)
# get Q(a|y) = sigmoid
q_dis = memorybook
# repeats
L = self.config['repeats']
target = T.repeat(inputs[:, None, :],
L,
axis=1).reshape((inputs.shape[0] * L, inputs.shape[1]))
q_dis = T.repeat(q_dis[:, None, :, :],
L,
axis=1).reshape((q_dis.shape[0] * L, q_dis.shape[1], q_dis.shape[2]))
# sample actions
u = self.rng.uniform(q_dis.shape)
action = T.cast(u <= q_dis, dtype=theano.config.floatX)
# compute the exact probability for actions
logQax = action * T.log(q_dis) + (1 - action) * T.log(1 - q_dis)
logQax = logQax.sum(axis=-1).sum(axis=-1)
# decoding.
memorybook2 = action
logPxa, count = self.decoder.build_decoder(target, memorybook2, return_count=True)
# prior.
p_dis = self.Prior()
logPa = action * T.log(p_dis) + (1 - action) * T.log(1 - p_dis)
logPa = logPa.sum(axis=-1).sum(axis=-1)
"""
Compute the weights
"""
# reshape
logQax = logQax.reshape((batch_size, L))
logPa = logPa.reshape((batch_size, L))
logPxa = logPxa.reshape((batch_size, L))
logPx_a = logPa + logPxa
# normalizing the weights
log_wk = logPx_a - logQax
log_bpk = logPa - logQax
log_w_sum = logSumExp(log_wk, axis=1)
log_bp_sum = logSumExp(log_bpk, axis=1)
log_wnk = log_wk - log_w_sum
log_bpnk = log_bpk - log_bp_sum
# unbiased log-likelihood estimator
logPx = T.mean(log_w_sum - T.log(L))
perplexity = T.exp(-T.mean((log_w_sum - T.log(L)) / count))
"""
Compute the Loss function
"""
# loss = weights * log [p(a)p(x|a)/q(a|x)]
weights = T.exp(log_wnk)
bp = T.exp(log_bpnk)
bq = 1. / L
ess = T.mean(1 / T.sum(weights ** 2, axis=1))
factor = self.config['factor']
if self.config['variant_control']:
lossQ = -T.mean(T.sum(logQax * (weights - bq), axis=1)) # log q(a|x)
lossPa = -T.mean(T.sum(logPa * (weights - bp), axis=1)) # log p(a)
lossPxa = -T.mean(T.sum(logPxa * weights, axis=1)) # log p(x|a)
lossP = lossPxa + lossPa
updates = self.optimizer.get_updates(self.params, [lossP + factor * lossQ, weights, bp])
else:
lossQ = -T.mean(T.sum(logQax * weights, axis=1)) # log q(a|x)
lossPa = -T.mean(T.sum(logPa * weights, axis=1)) # log p(a)
lossPxa = -T.mean(T.sum(logPxa * weights, axis=1)) # log p(x|a)
lossP = lossPxa + lossPa
updates = self.optimizer.get_updates(self.params, [lossP + factor * lossQ, weights])
logger.info("compiling the compuational graph ::training function::")
train_inputs = [inputs]
self.train_ = theano.function(train_inputs,
[lossPa, lossPxa, lossQ, perplexity, ess],
updates=updates,
name='train_fun')
logger.info("pre-training functions compile done.")
def compile_sample(self):
# # for Typical Auto-encoder, only conditional generation is useful.
# inputs = T.imatrix() # padded input word sequence (for training)
# if self.config['mode'] == 'RNN':
# context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
# elif self.config['mode'] == 'NTM':
# context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
# else:
# raise NotImplementedError
# pass
# sample the memorybook
p_dis = self.Prior()
l = T.iscalar()
u = self.rng.uniform((l, p_dis.shape[-2], p_dis.shape[-1]))
binarybook = T.cast(u <= p_dis, dtype=theano.config.floatX)
self.take = theano.function([l], binarybook, name='take_action')
# compile the sampler.
self.decoder.build_sampler()
logger.info('sampler function compile done.')
def compile_inference(self):
"""
build the hidden action prediction.
"""
inputs = T.imatrix() # padded input word sequence (for training)
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
# encoding
memorybook = self.encoder.build_encoder(inputs, context)
# get Q(a|y) = sigmoid(.|Posterior * encoded)
q_dis = memorybook
p_dis = self.Prior()
self.inference_ = theano.function([inputs], [memorybook, q_dis, p_dis])
logger.info("inference function compile done.")
def default_context(self):
return self.take(1)
class AutoEncoder(RNNLM):
"""
Regular Auto-Encoder: RNN Encoder/Decoder
Regular Neural Turing Machine
"""
def __init__(self,
config, n_rng, rng,
mode='Evaluation'):
super(RNNLM, self).__init__()
self.config = config
self.n_rng = n_rng # numpy random stream
self.rng = rng # Theano random stream
self.mode = mode
self.name = 'autoencoder'
def build_(self):
logger.info("build the RNN/NTM auto-encoder")
self.encoder = Encoder(self.config, self.rng, prefix='enc', mode=self.mode)
if self.config['shared_embed']:
self.decoder = Decoder(self.config, self.rng, prefix='dec',
embed=self.encoder.Embed, mode=self.mode)
else:
self.decoder = Decoder(self.config, self.rng, prefix='dec', mode=self.mode)
# registration
self._add(self.encoder)
self._add(self.decoder)
# objectives and optimizers
self.optimizer = optimizers.get(self.config['optimizer'])
# saved the initial memories
self.memory = initializations.get('glorot_uniform')(
(self.config['dec_memory_dim'], self.config['dec_memory_wdth']))
logger.info("create Autoencoder Network. ok")
def compile_train(self, mode='train'):
# questions (theano variables)
inputs = T.imatrix() # padded input word sequence (for training)
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
# encoding
memorybook = self.encoder.build_encoder(inputs, context)
# decoding.
target = inputs
logPxz, logPPL = self.decoder.build_decoder(target, memorybook)
# reconstruction loss
loss_rec = T.mean(-logPxz)
loss_ppl = T.exp(T.mean(-logPPL))
loss = loss_rec
updates = self.optimizer.get_updates(self.params, loss)
logger.info("compiling the compuational graph ::training function::")
train_inputs = [inputs]
self.train_ = theano.function(train_inputs,
[loss_rec, loss_ppl],
updates=updates,
name='train_fun')
self.test = theano.function(train_inputs,
[loss_rec, loss_ppl],
name='test_fun')
logger.info("pre-training functions compile done.")
def compile_sample(self):
# for Typical Auto-encoder, only conditional generation is useful.
inputs = T.imatrix() # padded input word sequence (for training)
if self.config['mode'] == 'RNN':
context = alloc_zeros_matrix(inputs.shape[0], self.config['enc_contxt_dim'])
elif self.config['mode'] == 'NTM':
context = T.repeat(self.memory[None, :, :], inputs.shape[0], axis=0)
else:
raise NotImplementedError
pass
# encoding
memorybook = self.encoder.build_encoder(inputs, context)
self.memorize = theano.function([inputs], memorybook, name='memorize')
# compile the sampler.
self.decoder.build_sampler()
logger.info('sampler function compile done.')
|
MingyuanXie/CopyNet
|
emolga/models/ntm_encdec.py
|
Python
|
mit
| 53,418
|
[
"Gaussian"
] |
105b953f7b4a6ceb51e94b764b13870b1f39fe1ddfe2af510c49dfe4bdf6ffd0
|
#!/usr/bin/python
import numpy as np
import matplotlib.pyplot as plt
import scipy.special as spc
'''
To evaluate Change of Kinetic energy due to orthogonalization (Deltat E_kin ) which is Valence-Bond model for Pauli repulsion
see:
[1] eq.4 in http://aip.scitation.org/doi/10.1063/1.3272671
Jaramillo-Botero, A., Su, J., Qi, A. & Goddard, W. A. Large-scale, long-term nonadiabatic electron molecular dynamics for describing material properties and phenomena in extreme environments. J. Comput. Chem. 32, 497-512 (2011).
[2] eq.2 in http://doi.wiley.com/10.1002/jcc.21637
Su, J. T. & Goddard, W. A. The dynamics of highly excited electronic systems: Applications of the electron force field. J. Chem. Phys. 131, 244501 (2009).
Derivation in Gauss_KineticAndOverlap.wxmx
/SimpleSimulationEngine/cpp/sketches_SDL/Molecular/notes/Gauss_KineticAndOverlap.wxmx
'''
# ==== constants in SI Units
# see https://en.wikipedia.org/wiki/Fine-structure_constant
const_hbar = 1.054571817e-34 # [J.s] #6.582119569e-16 # [eV/s]
const_Me = 9.10938356e-31 # [kg]
const_e = 1.602176620898e-19 # [Coulomb]
const_eps0 = 8.854187812813e-12 # [F.m = Coulomb/(Volt*m)]
const_eV = 1.602176620898e-19 # [J]
const_Angstroem = 1.0e-10
const_K = const_hbar**2/const_Me
const_El = const_e**2/(4.*np.pi*const_eps0)
const_Ry = 0.5 * const_El**2/const_K
const_Ry_eV = 13.6056925944
const_El_eVA = const_El/( const_e*const_Angstroem )
const_K_eVA = (const_El_eVA**2)/(2*const_Ry_eV)
print "const_El, const_El_eVA ", const_El, const_El_eVA
print "const_Ry const_Ry_eV ", const_Ry, const_Ry/const_eV
print "const_K, const_K_eVA ", const_K, const_K_eVA
# ======= Functions
# ToDo : Kinetic and Overlap share much of calculations => make sense to calculate them together in one function
def overlap_(r,si,sj):
# NOTE : this gaussian is not normalized !!!
const = (2*np.pi)**(1.5)
s2 = si**2 + sj**2
r2 = r**2
g = np.exp( -r**2/(2*s2) )
E = const * (si*sj)**3/( s2**1.5 ) * g
return E
def kinetic_(r,si,sj):
# NOTE : this gaussian is not normalized !!!
const = (2*np.pi)**(1.5)
s2 = si**2 + sj**2
r2 = r**2
g = np.exp( -r**2/(2*s2) )
tau = -(r2 - 3 * s2)/(s2**2)
E = const * (si*sj)**3/( s2**1.5 ) * g
return E * tau
def overlap(r,si,sj):
# NOTE : this gaussian IS normalized !!!
#const = (2*np.pi)**(1.5) / (np.pi)**(1.5)
const = 2**1.5
si2 = si**2
sj2 = sj**2
s2 = si2 + sj2
r2 = r**2
norm = 1/(si*sj)**(1.5)
g = np.exp( -r**2/(2*s2) )
S = const * norm * (si*sj)**3/( s2**1.5 ) * g
dS_dr = S * -r/s2
dS_dsi = S * ( si2*r2 + 3*sj2*s2 )/( si*s2*s2 )
return S, dS_dr, dS_dsi
def tau(r,si,sj): # Kinetic/Overlap Tij/Sij
s2 = si**2 + sj**2
r2 = r**2
tau = -(r2 - 3 * s2)/(s2**2)
dTau_dr = 2*r/(s2*s2)
dTau_dsi = -2*si*( 2*r2 - 3*s2 )/(s2*s2*s2)
return tau, dTau_dr, dTau_dsi
def kinetic_S(r,si,sj):
tau, dTau_dr, dTau_dsi = overlap(r,si,sj)
S , dS_dr , dS_dsi = tau (r,si,sj)
T = S*tau
dT_dsi = S*dTau_dsi +tau*dS_dsi
dT_dr = S*dTau_dr +tau*dS_dr
return T, dT_dr, dT_dsi
'''
def kinetic(r,si,sj):
const = 2**1.5
s2 = si**2 + sj**2
r2 = r**2
norm = 1/(si*sj)**(1.5)
g = np.exp( -r**2/(2*s2) )
tau = -(r2 - 3 * s2)/(s2**2)
E = const * norm * (si*sj)**3/( s2**1.5 ) * g
return E * tau
'''
def processForces( xs,Es,fs, plt=plt, label="" ):
n=len(xs)
dx=xs[1]-xs[0]
fs_num=(Es[2:]-Es[:-2])/(-2*dx)
Err = np.sqrt( ( (fs_num-fs[1:-1])**2 ).sum()/(n-1) )
#print "Error ", err
if(plt):
plt.figure(figsize=(5,5))
plt.plot( xs, Es , label="E" )
plt.plot( xs, fs , label="f_ana" )
plt.plot( xs[1:-1],fs_num, ":", label="f_num" )
plt.grid();plt.legend();
plt.title(label)
return Err
if __name__ == "__main__":
rs = np.arange( 0.0, 6.0, 0.05 )
#ss = [0.25, 1.0, 2.5 ]
#si=1.0; sj=1.0;
#si=0.5; sj=0.5;
#si=2.0; sj=2.0;
si=0.5**0.5; sj=0.5**0.5;
#si=0.5; sj=2.0;
#si=2.0; sj=0.5;
'''
Ss = overlap( rs, si, sj ) ;print( Ss[0] )
Ts = kinetic( rs, si, sj ) ;print( Ts[0] )
taus = tau ( rs, si, sj ) ;print( taus[0] )
plt.plot( rs, Ss, label="Overlap S12" )
plt.plot( rs, Ts, label="Kinetic T12" )
plt.plot( rs, taus, label="tau T12/S12" )
plt.axhline(0,c='k',ls='--',lw=2 )
plt.ylim(-2,6)
plt.grid()
'''
rs = np.arange(0,1,0.02)
sis = np.arange(0.3,5.0,0.1)
E,fr,fs = overlap(rs,si,sj)
processForces( xs,E,fr, plt=plt, label="" )
plt.legend()
plt.show()
|
ProkopHapala/SimpleSimulationEngine
|
cpp/sketches_SDL/Molecular/python/eFF_KineticAndOverlap.py
|
Python
|
mit
| 4,811
|
[
"Gaussian"
] |
9ffd408fb28f5e79fc962178a091be1302d5c0a530df409f04ee3c0ac0bd2c82
|
###########################################################################
#
# This program is part of Zenoss Core, an open source monitoring platform.
# Copyright (C) 2011, Zenoss Inc.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 or (at your
# option) any later version as published by the Free Software Foundation.
#
# For complete information please visit: http://www.zenoss.com/oss/
#
###########################################################################
import logging
log = logging.getLogger('zen.CloudStack')
from Products.Five import zcml
from Products.DataCollector.ApplyDataMap import ApplyDataMap
from Products.ZenModel import ZVersion
from Products.ZenTestCase.BaseTestCase import BaseTestCase
from Products.Zuul.interfaces.info import IInfo
from ZenPacks.zenoss.CloudStack.modeler.plugins.zenoss.CloudStack \
import CloudStack as CloudStackModeler
from ZenPacks.zenoss.CloudStack.tests.utils import loadPickle
CLOUDSTACK_ICON = '/++resource++cloudstack/img/cloudstack.png'
class MockJar(object):
"""Mock object for x._p_jar.
Used to trick ApplyDataMap into not aborting transactions after adding
non-persistent objects. Without doing this, all sub-components will cause
ugly tracebacks in modeling tests.
"""
def sync(self):
pass
class TestModel(BaseTestCase):
def afterSetUp(self):
super(TestModel, self).afterSetUp()
dc = self.dmd.Devices.createOrganizer('/CloudStack')
dc.setZenProperty('zPythonClass', 'ZenPacks.zenoss.CloudStack.Cloud')
self.d = dc.createInstance('zenoss.CloudStack.testDevice')
if not ZVersion.VERSION.startswith('3.'):
self.d.dmd._p_jar = MockJar()
self.applyDataMap = ApplyDataMap()._applyDataMap
# Required to prevent erroring out when trying to define viewlets in
# ../browser/configure.zcml.
import zope.viewlet
zcml.load_config('meta.zcml', zope.viewlet)
import ZenPacks.zenoss.CloudStack
zcml.load_config('configure.zcml', ZenPacks.zenoss.CloudStack)
def _loadZenossData(self):
if hasattr(self, '_loaded'):
return
modeler = CloudStackModeler()
modeler_results = loadPickle('cloudstack_results.pickle')
for data_map in modeler.process(self.d, modeler_results, log):
self.applyDataMap(self.d, data_map)
self._loaded = True
def testCloud(self):
self._loadZenossData()
info = IInfo(self.d)
self.assertEquals(info.zone_count, 1)
self.assertEquals(info.pod_count, 2)
self.assertEquals(info.cluster_count, 2)
self.assertEquals(info.host_count, 6)
def testZone(self):
self._loadZenossData()
zone = self.d.zones._getOb('zone1')
self.assertEquals(zone.device().id, 'zenoss.CloudStack.testDevice')
info = IInfo(zone)
self.assertEquals(info.name, 'Demo5')
self.assertEquals(info.icon, CLOUDSTACK_ICON)
self.assertEquals(info.cloudstack_id, 1)
self.assertEquals(info.allocation_state, 'Enabled')
self.assertEquals(info.guest_cidr_address, '10.1.1.0/24')
self.assertEquals(info.dhcp_provider, 'VirtualRouter')
self.assertEquals(info.public_dns, '72.52.126.11, ')
self.assertEquals(info.internal_dns, '72.52.126.12, 72.52.126.12')
self.assertEquals(info.network_type, 'Advanced')
self.assertEquals(info.security_groups_enabled, False)
self.assertEquals(info.vlan, '1000-1200')
self.assertEquals(info.zone_token, 'f0c6542e-7a1a-39b3-8c92-1a1c67cede0b')
self.assertEquals(info.pod_count, 2)
self.assertEquals(info.cluster_count, 2)
self.assertEquals(info.host_count, 6)
def testPod(self):
self._loadZenossData()
zone = self.d.zones._getOb('zone1')
pod = zone.pods._getOb('pod1')
self.assertEquals(pod.device().id, 'zenoss.CloudStack.testDevice')
info = IInfo(pod)
self.assertEquals(info.name, 'Pod-A')
self.assertEquals(info.icon, CLOUDSTACK_ICON)
self.assertEquals(info.cloudstack_id, 1)
self.assertEquals(info.allocation_state, 'Enabled')
self.assertEquals(info.ip_range, '10.208.37.100 - 10.208.37.120')
self.assertEquals(info.netmask, '255.255.255.128')
self.assertEquals(info.gateway, '10.208.37.1')
self.assertEquals(info.zone.id, 'zone1')
self.assertEquals(info.cluster_count, 1)
self.assertEquals(info.host_count, 3)
def testCluster(self):
self._loadZenossData()
zone = self.d.zones._getOb('zone1')
pod = zone.pods._getOb('pod1')
cluster = pod.clusters._getOb('cluster1')
self.assertEquals(cluster.device().id, 'zenoss.CloudStack.testDevice')
info = IInfo(cluster)
self.assertEquals(info.name, 'XenCluster1-D5')
self.assertEquals(info.icon, CLOUDSTACK_ICON)
self.assertEquals(info.cloudstack_id, 1)
self.assertEquals(info.allocation_state, 'Enabled')
self.assertEquals(info.cluster_type, 'CloudManaged')
self.assertEquals(info.hypervisor_type, 'XenServer')
self.assertEquals(info.managed_state, 'Managed')
self.assertEquals(info.zone.id, 'zone1')
self.assertEquals(info.pod.id, 'pod1')
self.assertEquals(info.host_count, 3)
def testHost(self):
self._loadZenossData()
zone = self.d.zones._getOb('zone1')
pod = zone.pods._getOb('pod1')
cluster = pod.clusters._getOb('cluster1')
host = cluster.hosts._getOb('host1')
self.assertEquals(host.device().id, 'zenoss.CloudStack.testDevice')
info = IInfo(host)
self.assertEquals(info.name, 'demo5-xen')
self.assertEquals(info.icon, CLOUDSTACK_ICON)
self.assertEquals(info.cloudstack_id, 1)
self.assertEquals(info.allocation_state, 'Enabled')
self.assertEquals(info.host_type, 'Routing')
self.assertEquals(info.hypervisor, 'XenServer')
self.assertEquals(info.host_version, '2.2.13.20111117191758')
self.assertEquals(info.capabilities, 'xen-3.0-x86_64 , xen-3.0-x86_32p , hvm-3.0-x86_32 , hvm-3.0-x86_32p , hvm-3.0-x86_64')
self.assertEquals(info.host_state, 'Up')
self.assertEquals(info.created, '2011-10-17T21:19:45-0700')
self.assertEquals(info.host_tags, '')
self.assertEquals(info.ip_address, '10.208.37.11')
self.assertEquals(info.host_events, 'ShutdownRequested; AgentDisconnected; AgentConnected; HypervisorVersionChanged; PrepareUnmanaged; HostDown; PingTimeout; ManagementServerDown; StartAgentRebalance; Ping; MaintenanceRequested')
self.assertEquals(info.local_storage_active, False)
self.assertEquals(info.management_server_id, 257544418526661)
self.assertEquals(info.zone.id, 'zone1')
self.assertEquals(info.pod.id, 'pod1')
self.assertEquals(info.cluster.id, 'cluster1')
def test_getManagedDevice(self):
self._loadZenossData()
host_device1 = self.dmd.Devices.createInstance('host_device1')
host_device1.setManageIp('10.208.37.11')
host_device1.setPerformanceMonitor('localhost')
host_device2 = self.dmd.Devices.createInstance('host_device2')
host_device2.setManageIp('12.34.56.78')
host_device2.setPerformanceMonitor('localhost')
from Products.ZenModel.IpInterface import manage_addIpInterface
manage_addIpInterface(host_device2.os.interfaces, 'eth0', False)
eth0 = host_device2.os.interfaces._getOb('eth0')
eth0.setIpAddresses(['10.208.37.12/24'])
zone = self.d.zones._getOb('zone1')
pod = zone.pods._getOb('pod1')
cluster = pod.clusters._getOb('cluster1')
# Test finding host device by manageIp.
info1 = IInfo(cluster.hosts._getOb('host1'))
self.assertEquals(info1.managed_device.id, 'host_device1')
# Test finding host by interface IP.
info2 = IInfo(cluster.hosts._getOb('host5'))
self.assertEquals(info2.managed_device.id, 'host_device2')
def testMissingHostsResponse(self):
modeler = CloudStackModeler()
modeler_results = loadPickle('cloudstack_results_missingHosts.pickle')
maps = modeler.process(self.d, modeler_results, log)
self.assertNotEquals(maps, None)
def testNoZonesResponse(self):
modeler = CloudStackModeler()
modeler_results = loadPickle('cloudstack_results_noZones.pickle')
maps = modeler.process(self.d, modeler_results, log)
self.assertEquals(len(maps), 4)
self.assertEquals(maps[0].relname, 'zones')
self.assertEquals(maps[1].relname, 'pods')
self.assertEquals(maps[2].relname, 'clusters')
self.assertEquals(maps[3].relname, 'hosts')
self.assertEquals(len(maps[0].maps), 0)
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestModel))
return suite
|
zenoss/ZenPacks.zenoss.CloudStack
|
ZenPacks/zenoss/CloudStack/tests/testModel.py
|
Python
|
gpl-2.0
| 9,144
|
[
"VisIt"
] |
1812b034ff9960918fae8c550e02a958268ce6150bed8ea107dffa6c43421eb4
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Configuration file for chios documentation."""
import os
import sys
from datetime import datetime
try:
import sphinx_rtd_theme
except ImportError:
sphinx_rtd_theme = None
try:
from sphinxcontrib import spelling
except ImportError as e:
print(e)
spelling = None
sys.path.insert(0, os.path.abspath(".."))
sys.path.insert(0, os.path.abspath("../chios/"))
try:
from chios import __version__
except ImportError:
print("Cannot load version.")
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.doctest",
"sphinx.ext.viewcode",
]
if spelling is not None:
extensions.append("sphinxcontrib.spelling")
source_suffix = ".rst"
master_doc = "index"
project = "chios"
copyright = f"{datetime.now().year}, Brian Moss"
author = "Brian Moss"
version = __version__
language = None
exclude_patterns = ["_build", "README.rst"]
pygments_style = "sphinx"
if sphinx_rtd_theme:
html_theme = "sphinx_rtd_theme"
else:
html_theme = "default"
html_logo = "_static/chios.png"
html_favicon = "_static/chios.ico"
html_static_path = ["_static"]
html_css_files = ["theme_overrides.css"]
html_use_smartypants = False
htmlhelp_basename = "doc"
html_permalinks = True
html_permalinks_icon = "#"
|
kallimachos/chios
|
doc/conf.py
|
Python
|
gpl-3.0
| 1,267
|
[
"Brian"
] |
9f67114667d6fc90ee0297e4a55265282a9f1bd6afc89edb459343a27138e261
|
from __future__ import division, print_function
import numpy as np
from .miscellaneous_utilities import BCTParamError
def adjacency_plot_und(A, coor, tube=False):
'''
This function in matlab is a visualization helper which translates an
adjacency matrix and an Nx3 matrix of spatial coordinates, and plots a
3D isometric network connecting the undirected unweighted nodes using a
specific plotting format. Including the formatted output is not useful at
all for bctpy since matplotlib will not be able to plot it in quite the
same way.
Instead of doing this, I have included code that will plot the adjacency
matrix onto nodes at the given spatial coordinates in mayavi
This routine is basically a less featureful version of the 3D brain in
cvu, the connectome visualization utility which I also maintain. cvu uses
freesurfer surfaces and annotations to get the node coordinates (rather
than leaving them up to the user) and has many other interactive
visualization features not included here for the sake of brevity.
There are other similar visualizations in the ConnectomeViewer and the
UCLA multimodal connectivity database.
Note that unlike other bctpy functions, this function depends on mayavi.
Paramaters
----------
A : NxN np.ndarray
adjacency matrix
coor : Nx3 np.ndarray
vector of node coordinates
tube : bool
plots using cylindrical tubes for higher resolution image. If True,
plots cylindrical tube sources. If False, plots line sources. Default
value is False.
Returns
-------
fig : Instance(Scene)
handle to a mayavi figure.
Notes
-----
To display the output interactively, call
fig=adjacency_plot_und(A,coor)
from mayavi import mlab
mlab.show()
Note: Thresholding the matrix is strongly recommended. It is recommended
that the input matrix have fewer than 5000 total connections in order to
achieve reasonable performance and noncluttered visualization.
'''
from mayavi import mlab
n = len(A)
nr_edges = (n * n - 1) // 2
#starts = np.zeros((nr_edges,3))
#vecs = np.zeros((nr_edges,3))
#adjdat = np.zeros((nr_edges,))
ixes, = np.where(np.triu(np.ones((n, n)), 1).flat)
# i=0
# for r2 in xrange(n):
# for r1 in xrange(r2):
# starts[i,:] = coor[r1,:]
# vecs[i,:] = coor[r2,:] - coor[r1,:]
# adjdat[i,:]
# i+=1
adjdat = A.flat[ixes]
A_r = np.tile(coor, (n, 1, 1))
starts = np.reshape(A_r, (n * n, 3))[ixes, :]
vecs = np.reshape(A_r - np.transpose(A_r, (1, 0, 2)), (n * n, 3))[ixes, :]
# plotting
fig = mlab.figure()
nodesource = mlab.pipeline.scalar_scatter(
coor[:, 0], coor[:, 1], coor[:, 2], figure=fig)
nodes = mlab.pipeline.glyph(nodesource, scale_mode='none',
scale_factor=3., mode='sphere', figure=fig)
nodes.glyph.color_mode = 'color_by_scalar'
vectorsrc = mlab.pipeline.vector_scatter(
starts[:, 0], starts[:, 1], starts[
:, 2], vecs[:, 0], vecs[:, 1], vecs[:, 2],
figure=fig)
vectorsrc.mlab_source.dataset.point_data.scalars = adjdat
thres = mlab.pipeline.threshold(vectorsrc,
low=0.0001, up=np.max(A), figure=fig)
vectors = mlab.pipeline.vectors(thres, colormap='YlOrRd',
scale_mode='vector', figure=fig)
vectors.glyph.glyph.clamping = False
vectors.glyph.glyph.color_mode = 'color_by_scalar'
vectors.glyph.color_mode = 'color_by_scalar'
vectors.glyph.glyph_source.glyph_position = 'head'
vectors.actor.property.opacity = .7
if tube:
vectors.glyph.glyph_source.glyph_source = (vectors.glyph.glyph_source.
glyph_dict['cylinder_source'])
vectors.glyph.glyph_source.glyph_source.radius = 0.015
else:
vectors.glyph.glyph_source.glyph_source.glyph_type = 'dash'
return fig
def align_matrices(m1, m2, dfun='sqrdiff', verbose=False, H=1e6, Texp=1,
T0=1e-3, Hbrk=10):
'''
This function aligns two matrices relative to one another by reordering
the nodes in M2. The function uses a version of simulated annealing.
Parameters
----------
M1 : NxN np.ndarray
first connection matrix
M2 : NxN np.ndarray
second connection matrix
dfun : str
distance metric to use for matching
'absdiff' : absolute difference
'sqrdiff' : squared difference (default)
'cosang' : cosine of vector angle
verbose : bool
print out cost at each iteration. Default False.
H : int
annealing parameter, default value 1e6
Texp : int
annealing parameter, default value 1. Coefficient of H s.t.
Texp0=1-Texp/H
T0 : float
annealing parameter, default value 1e-3
Hbrk : int
annealing parameter, default value = 10. Coefficient of H s.t.
Hbrk0 = H/Hkbr
Returns
-------
Mreordered : NxN np.ndarray
reordered connection matrix M2
Mindices : Nx1 np.ndarray
reordered indices
cost : float
objective function distance between M1 and Mreordered
Notes
-----
Connection matrices can be weighted or binary, directed or undirected.
They must have the same number of nodes. M1 can be entered in any
node ordering.
Note that in general, the outcome will depend on the initial condition
(the setting of the random number seed). Also, there is no good way to
determine optimal annealing parameters in advance - these parameters
will need to be adjusted "by hand" (particularly H, Texp, T0, and Hbrk).
For large and/or dense matrices, it is highly recommended to perform
exploratory runs varying the settings of 'H' and 'Texp' and then select
the best values.
Based on extensive testing, it appears that T0 and Hbrk can remain
unchanged in most cases. Texp may be varied from 1-1/H to 1-10/H, for
example. H is the most important parameter - set to larger values as
the problem size increases. Good solutions can be obtained for
matrices up to about 100 nodes. It is advisable to run this function
multiple times and select the solution(s) with the lowest 'cost'.
If the two matrices are related it may be very helpful to pre-align them
by reordering along their largest eigenvectors:
[v,~] = eig(M1); v1 = abs(v(:,end)); [a1,b1] = sort(v1);
[v,~] = eig(M2); v2 = abs(v(:,end)); [a2,b2] = sort(v2);
[a,b,c] = overlapMAT2(M1(b1,b1),M2(b2,b2),'dfun',1);
Setting 'Texp' to zero cancels annealing and uses a greedy algorithm
instead.
'''
n = len(m1)
if n < 2:
raise BCTParamError("align_matrix will infinite loop on a singleton "
"or null matrix.")
# define maxcost (greatest possible difference) and lowcost
if dfun in ('absdiff', 'absdff'):
maxcost = np.sum(np.abs(np.sort(m1.flat) - np.sort(m2.flat)[::-1]))
lowcost = np.sum(np.abs(m1 - m2)) / maxcost
elif dfun in ('sqrdiff', 'sqrdff'):
maxcost = np.sum((np.sort(m1.flat) - np.sort(m2.flat)[::-1])**2)
lowcost = np.sum((m1 - m2)**2) / maxcost
elif dfun == 'cosang':
maxcost = np.pi / 2
lowcost = np.arccos(np.dot(m1.flat, m2.flat) /
np.sqrt(np.dot(m1.flat, m1.flat) * np.dot(m2.flat, m2.flat))) / maxcost
else:
raise BCTParamError('dfun must be absdiff or sqrdiff or cosang')
mincost = lowcost
anew = np.arange(n)
amin = np.arange(n)
h = 0
hcnt = 0
# adjust annealing parameters from user provided coefficients
# H determines the maximal number of steps (user-provided)
# Texp determines the steepness of the temperature gradient
Texp = 1 - Texp / H
# T0 sets the initial temperature and scales the energy term (user provided)
# Hbrk sets a break point for the stimulation
Hbrk = H / Hbrk
while h < H:
h += 1
hcnt += 1
# terminate if no new mincost has been found for some time
if hcnt > Hbrk:
break
# current temperature
T = T0 * (Texp**h)
# choose two positions at random and flip them
atmp = anew.copy()
r1, r2 = np.random.randint(n, size=(2,))
while r1 == r2:
r2 = np.random.randint(n)
atmp[r1] = anew[r2]
atmp[r2] = anew[r1]
m2atmp = m2[np.ix_(atmp, atmp)]
if dfun in ('absdiff', 'absdff'):
costnew = np.sum(np.abs(m1 - m2atmp)) / maxcost
elif dfun in ('sqrdiff', 'sqrdff'):
costnew = np.sum((m1 - m2atmp)**2) / maxcost
elif dfun == 'cosang':
costnew = np.arccos(np.dot(m1.flat, m2atmp.flat) / np.sqrt(
np.dot(m1.flat, m1.flat) * np.dot(m2.flat, m2.flat))) / maxcost
if costnew < lowcost or np.random.random() < np.exp(-(costnew - lowcost) / T):
anew = atmp
lowcost = costnew
# is this the absolute best?
if lowcost < mincost:
amin = anew
mincost = lowcost
if verbose:
print('step %i ... current lowest cost = %f' % (h, mincost))
hcnt = 0
# if the cost is 0 we're done
if mincost == 0:
break
if verbose:
print('step %i ... final lowest cost = %f' % (h, mincost))
M_reordered = m2[np.ix_(amin, amin)]
M_indices = amin
cost = mincost
return M_reordered, M_indices, cost
def backbone_wu(CIJ, avgdeg):
'''
The network backbone contains the dominant connections in the network
and may be used to aid network visualization. This function computes
the backbone of a given weighted and undirected connection matrix CIJ,
using a minimum-spanning-tree based algorithm.
Parameters
----------
CIJ : NxN np.ndarray
weighted undirected connection matrix
avgdeg : int
desired average degree of backbone
Returns
-------
CIJtree : NxN np.ndarray
connection matrix of the minimum spanning tree of CIJ
CIJclus : NxN np.ndarray
connection matrix of the minimum spanning tree plus strongest
connections up to some average degree 'avgdeg'. Identical to CIJtree
if the degree requirement is already met.
Notes
-----
NOTE: nodes with zero strength are discarded.
NOTE: CIJclus will have a total average degree exactly equal to
(or very close to) 'avgdeg'.
NOTE: 'avgdeg' backfill is handled slightly differently than in Hagmann
et al 2008.
'''
n = len(CIJ)
if not np.all(CIJ == CIJ.T):
raise BCTParamError('backbone_wu can only be computed for undirected '
'matrices. If your matrix is has noise, correct it with np.around')
CIJtree = np.zeros((n, n))
# find strongest edge (if multiple edges are tied, use only first one)
i, j = np.where(np.max(CIJ) == CIJ)
im = [i[0], i[1]] # what? why take two values? doesnt that mess up multiples?
jm = [j[0], j[1]]
# copy into tree graph
CIJtree[im, jm] = CIJ[im, jm]
in_ = im
out = np.setdiff1d(range(n), in_)
# repeat n-2 times
for ix in range(n - 2):
CIJ_io = CIJ[np.ix_(in_, out)]
i, j = np.where(np.max(CIJ_io) == CIJ_io)
# i,j=np.where(np.max(CIJ[in_,out])==CIJ[in_,out])
print(i, j)
im = in_[i[0]]
jm = out[j[0]]
# copy into tree graph
CIJtree[im, jm] = CIJ[im, jm]
CIJtree[jm, im] = CIJ[jm, im]
in_ = np.append(in_, jm)
out = np.setdiff1d(range(n), in_)
# now add connections back with the total number of added connections
# determined by the desired avgdeg
CIJnotintree = CIJ * np.logical_not(CIJtree)
ix, = np.where(CIJnotintree.flat)
a = np.sort(CIJnotintree.flat[ix])[::-1]
cutoff = avgdeg * n - 2 * (n - 1) - 1
# if the avgdeg req is already satisfied, skip this
if cutoff >= np.size(a):
CIJclus = CIJtree.copy()
else:
thr = a[cutoff]
CIJclus = CIJtree + CIJnotintree * (CIJnotintree >= thr)
return CIJtree, CIJclus
def grid_communities(c):
'''
(X,Y,INDSORT) = GRID_COMMUNITIES(C) takes a vector of community
assignments C and returns three output arguments for visualizing the
communities. The third is INDSORT, which is an ordering of the vertices
so that nodes with the same community assignment are next to one
another. The first two arguments are vectors that, when overlaid on the
adjacency matrix using the PLOT function, highlight the communities.
Parameters
----------
c : Nx1 np.ndarray
community assignments
Returns
-------
bounds : list
list containing the communities
indsort : np.ndarray
indices
Notes
-----
Note: This function returns considerably different values than in
matlab due to differences between matplotlib and matlab. This function
has been designed to work with matplotlib, as in the following example:
ci,_=modularity_und(adj)
bounds,ixes=grid_communities(ci)
pylab.imshow(adj[np.ix_(ixes,ixes)],interpolation='none',cmap='BuGn')
for b in bounds:
pylab.axvline(x=b,color='red')
pylab.axhline(y=b,color='red')
Note that I adapted the idea from the matlab function of the same name,
and have not tested the functionality extensively.
'''
c = c.copy()
nr_c = np.max(c)
ixes = np.argsort(c)
c = c[ixes]
bounds = []
for i in range(nr_c):
ind = np.where(c == i + 1)
if np.size(ind):
mn = np.min(ind) - .5
mx = np.max(ind) + .5
bounds.extend([mn, mx])
bounds = np.unique(bounds)
return bounds, ixes
def reorderMAT(m, H=5000, cost='line'):
'''
This function reorders the connectivity matrix in order to place more
edges closer to the diagonal. This often helps in displaying community
structure, clusters, etc.
Parameters
----------
MAT : NxN np.ndarray
connection matrix
H : int
number of reordering attempts
cost : str
'line' or 'circ' for shape of lattice (linear or ring lattice).
Default is linear lattice.
Returns
-------
MATreordered : NxN np.ndarray
reordered connection matrix
MATindices : Nx1 np.ndarray
reordered indices
MATcost : float
objective function cost of reordered matrix
Notes
-----
I'm not 100% sure how the algorithms between this and reorder_matrix
differ, but this code looks a ton sketchier and might have had some minor
bugs in it. Considering reorder_matrix() does the same thing using a well
vetted simulated annealing algorithm, just use that. ~rlaplant
'''
from scipy import linalg, stats
m = m.copy()
n = len(m)
np.fill_diagonal(m, 0)
# generate cost function
if cost == 'line':
profile = stats.norm.pdf(range(1, n + 1), 0, n / 2)[::-1]
elif cost == 'circ':
profile = stats.norm.pdf(range(1, n + 1), n / 2, n / 4)[::-1]
else:
raise BCTParamError('dfun must be line or circ')
costf = linalg.toeplitz(profile, r=profile)
lowcost = np.sum(costf * m)
# keep track of starting configuration
m_start = m.copy()
starta = np.arange(n)
# reorder
for h in range(H):
a = np.arange(n)
# choose two positions and flip them
r1, r2 = np.random.randint(n, size=(2,))
a[r1] = r2
a[r2] = r1
costnew = np.sum((m[np.ix_(a, a)]) * costf)
# if this reduced the overall cost
if costnew < lowcost:
m = m[np.ix_(a, a)]
r2_swap = starta[r2]
r1_swap = starta[r1]
starta[r1] = r2_swap
starta[r2] = r1_swap
lowcost = costnew
M_reordered = m_start[np.ix_(starta, starta)]
m_indices = starta
cost = lowcost
return M_reordered, m_indices, cost
def reorder_matrix(m1, cost='line', verbose=False, H=1e4, Texp=10, T0=1e-3, Hbrk=10):
'''
This function rearranges the nodes in matrix M1 such that the matrix
elements are squeezed along the main diagonal. The function uses a
version of simulated annealing.
Parameters
----------
M1 : NxN np.ndarray
connection matrix weighted/binary directed/undirected
cost : str
'line' or 'circ' for shape of lattice (linear or ring lattice).
Default is linear lattice.
verbose : bool
print out cost at each iteration. Default False.
H : int
annealing parameter, default value 1e6
Texp : int
annealing parameter, default value 1. Coefficient of H s.t.
Texp0=1-Texp/H
T0 : float
annealing parameter, default value 1e-3
Hbrk : int
annealing parameter, default value = 10. Coefficient of H s.t.
Hbrk0 = H/Hkbr
Returns
-------
Mreordered : NxN np.ndarray
reordered connection matrix
Mindices : Nx1 np.ndarray
reordered indices
Mcost : float
objective function cost of reordered matrix
Notes
-----
Note that in general, the outcome will depend on the initial condition
(the setting of the random number seed). Also, there is no good way to
determine optimal annealing parameters in advance - these paramters
will need to be adjusted "by hand" (particularly H, Texp, and T0).
For large and/or dense matrices, it is highly recommended to perform
exploratory runs varying the settings of 'H' and 'Texp' and then select
the best values.
Based on extensive testing, it appears that T0 and Hbrk can remain
unchanged in most cases. Texp may be varied from 1-1/H to 1-10/H, for
example. H is the most important parameter - set to larger values as
the problem size increases. It is advisable to run this function
multiple times and select the solution(s) with the lowest 'cost'.
Setting 'Texp' to zero cancels annealing and uses a greedy algorithm
instead.
'''
from scipy import linalg, stats
n = len(m1)
if n < 2:
raise BCTParamError("align_matrix will infinite loop on a singleton "
"or null matrix.")
# generate cost function
if cost == 'line':
profile = stats.norm.pdf(range(1, n + 1), loc=0, scale=n / 2)[::-1]
elif cost == 'circ':
profile = stats.norm.pdf(
range(1, n + 1), loc=n / 2, scale=n / 4)[::-1]
else:
raise BCTParamError('cost must be line or circ')
costf = linalg.toeplitz(profile, r=profile) * np.logical_not(np.eye(n))
costf /= np.sum(costf)
# establish maxcost, lowcost, mincost
maxcost = np.sum(np.sort(costf.flat) * np.sort(m1.flat))
lowcost = np.sum(m1 * costf) / maxcost
mincost = lowcost
# initialize
anew = np.arange(n)
amin = np.arange(n)
h = 0
hcnt = 0
# adjust annealing parameters
# H determines the maximal number of steps (user specified)
# Texp determines the steepness of the temperature gradient
Texp = 1 - Texp / H
# T0 sets the initial temperature and scales the energy term (user provided)
# Hbrk sets a break point for the stimulation
Hbrk = H / Hbrk
while h < H:
h += 1
hcnt += 1
# terminate if no new mincost has been found for some time
if hcnt > Hbrk:
break
T = T0 * Texp**h
atmp = anew.copy()
r1, r2 = np.random.randint(n, size=(2,))
while r1 == r2:
r2 = np.random.randint(n)
atmp[r1] = anew[r2]
atmp[r2] = anew[r1]
costnew = np.sum((m1[np.ix_(atmp, atmp)]) * costf) / maxcost
# annealing
if costnew < lowcost or np.random.random() < np.exp(-(costnew - lowcost) / T):
anew = atmp
lowcost = costnew
# is this a new absolute best?
if lowcost < mincost:
amin = anew
mincost = lowcost
if verbose:
print('step %i ... current lowest cost = %f' % (h, mincost))
hcnt = 0
if verbose:
print('step %i ... final lowest cost = %f' % (h, mincost))
M_reordered = m1[np.ix_(amin, amin)]
M_indices = amin
cost = mincost
return M_reordered, M_indices, cost
def reorder_mod(A, ci):
'''
This function reorders the connectivity matrix by modular structure and
may hence be useful in visualization of modular structure.
Parameters
----------
A : NxN np.ndarray
binary/weighted connectivity matrix
ci : Nx1 np.ndarray
module affiliation vector
Returns
-------
On : Nx1 np.ndarray
new node order
Ar : NxN np.ndarray
reordered connectivity matrix
'''
# TODO update function with 2015 changes
from scipy import stats
_, max_module_size = stats.mode(ci)
u, ci = np.unique(ci, return_inverse=True) # make consecutive
n = np.size(ci) # number of nodes
m = np.size(u) # number of modules
nm = np.zeros((m,)) # number of nodes in modules
knm = np.zeros((n, m)) # degree to other modules
for i in range(m):
nm[i] = np.size(np.where(ci == i))
knm[:, i] = np.sum(A[:, ci == i], axis=1)
am = np.zeros((m, m)) # relative intermodular connectivity
for i in range(m):
am[i, :] = np.sum(knm[ci == i, :], axis=0)
am /= np.outer(nm, nm)
# 1. Arrange densely connected modules together
# symmetrized intermodular connectivity
i, j = np.where(np.tril(am, -1) + 1)
s = (np.tril(am, -1) + 1)[i, j]
ord = np.argsort(s)[::-1] # sort by high relative connectivity
i = i[ord]
j = j[ord]
i += 1
j += 1 # fix off by 1 error so np.where doesnt
om = np.array((i[0], j[0])) # catch module 0
i[0] = 0
j[0] = 0
while len(om) < m: # while not all modules ordered
ui, = np.where(np.logical_and(
i, np.logical_or(j == om[0], j == om[-1])))
uj, = np.where(np.logical_and(
j, np.logical_or(i == om[0], i == om[-1])))
if np.size(ui):
ui = ui[0]
if np.size(uj):
uj = uj[0]
if ui == uj:
i[ui] = 0
j[uj] = 0
continue
if not np.size(ui):
ui = np.inf
if not np.size(uj):
uj = np.inf
if ui < uj:
old = j[ui]
new = i[ui]
if uj < ui:
old = i[uj]
new = j[uj]
if old == om[0]:
om = np.append((new,), om)
if old == om[-1]:
om = np.append(om, (new,))
i[i == old] = 0
j[j == old] = 0
print(om)
# 2. Reorder nodes within modules
on = np.zeros((n,), dtype=int)
for y, x in enumerate(om):
ind, = np.where(ci == x - 1) # indices
pos, = np.where(om == x) # position
# NOT DONE! OE NOES
mod_imp = np.array((om, np.sign(np.arange(m) - pos),
np.abs(np.arange(m) - pos), am[x - 1, om - 1])).T
print(np.shape((mod_imp[:, 3][::-1], mod_imp[:, 2])))
ix = np.lexsort((mod_imp[:, 3][::-1], mod_imp[:, 2]))
mod_imp = mod_imp[ix]
# at this point mod_imp agrees with the matlab version
signs = mod_imp[:, 1]
mod_imp = np.abs(mod_imp[:, 0] * mod_imp[:, 1])
mod_imp = np.append(mod_imp[1:], x)
mod_imp = np.array(mod_imp - 1, dtype=int)
print(mod_imp, signs)
# at this point mod_imp is the absolute value of that in the matlab
# version. this limitation comes from sortrows ability to deal with
# negative indices, which we would have to do manually.
# instead, i punt on its importance; i only bother to order by the
# principal dimension. some within-module orderings
# may potentially be a little bit out of order.
# ksmi=knm[ind,:].T[mod_imp[::-1]]
# reverse mod_imp to sort by the first column first and so on
# print ksmi
# for i,sin in enumerate(signs):
# if sin==-1:
# ksmi[i,:]=ksmi[i,:][::-1]
# print ksmi
# print np.shape(ksmi)
# ^ this is unworkable and wrong, lexsort alone cannot handle the
# negative indices problem of sortrows. you would pretty much need
# to rewrite sortrows to do lexsort plus negative indices; the algorithm
# cant be further simplified.
ord = np.lexsort(knm[np.ix_(ind, mod_imp[::-1])])
# ord=np.lexsort(knm[ind,:].T[mod_imp[::-1]])
if signs[mod_imp[0]] == -1:
ord = ord[::-1]
# reverse just the principal level and punt on the other levels.
# this will basically be fine for most purposes and probably won't
# ever show a difference for weighted graphs.
on[ind[ord]] = y * int(max_module_size) + \
np.arange(nm[x - 1], dtype=int)
on = np.argsort(on)
ar = A[np.ix_(on, on)]
return on, ar
def writetoPAJ(CIJ, fname, directed):
'''
This function writes a Pajek .net file from a numpy matrix
Parameters
----------
CIJ : NxN np.ndarray
adjacency matrix
fname : str
filename
directed : bool
True if the network is directed and False otherwise. The data format
may be required to know this for some reason so I am afraid to just
use directed as the default value.
'''
n = np.size(CIJ, axis=0)
with open(fname, 'w') as fd:
fd.write('*vertices %i \r' % n)
for i in range(1, n + 1):
fd.write('%i "%i" \r' % (i, i))
if directed:
fd.write('*arcs \r')
else:
fd.write('*edges \r')
for i in range(n):
for j in range(n):
if CIJ[i, j] != 0:
fd.write('%i %i %.6f \r' % (i + 1, j + 1, CIJ[i, j]))
|
boomsbloom/dtm-fmri
|
DTM/for_gensim/lib/python2.7/site-packages/bct/utils/visualization.py
|
Python
|
mit
| 26,277
|
[
"Mayavi"
] |
79bf72f66f0405284501b96d8a3f9c1dbdfac0579b4c184c36cba97fb827f693
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2017 Romain Boman
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import platform
system, node, release, version, machine, processor = platform.uname()
machine_name = node.split('.')[0].split('-')[0].lower()
print('machine_name =', machine_name)
print('system =', system)
if system == 'Darwin':
mac_release, mac_versioninfo, mac_machine = platform.mac_ver()
print('\tmac_release =', mac_release)
print('\tmac_versioninfo =', mac_versioninfo)
print('\tmac_machine =', mac_machine)
if system == 'Linux':
lin_distname, lin_version, lin_id = platform.linux_distribution()
print('\tlin_distname =', lin_distname)
print('\tlin_version =', lin_version)
print('\tlin_id =', lin_id)
if system == 'Windows':
win_release, win_version, win_csd, win_ptype = platform.win32_ver()
print('\twin_release =', win_release)
print('\twin_version =', win_version)
print('\twin_csd =', win_csd)
print('\twin_ptype =', win_ptype)
# --
import subprocess, re
def gcc_version():
try:
out = subprocess.check_output(['gcc', '--version'])
except OSError:
return 'gcc not found'
out = out.decode() # python 3 returns bytes
m = re.match(r'gcc \(.+\) (\d+\.\d+\.\d+)', out)
if m and len(m.groups()) > 0:
return m.group(1)
else:
return 'cannot read "gcc --version" output'
def cmake_version():
try:
out = subprocess.check_output(['cmake', '--version'])
except OSError:
return 'cmake not found'
out = out.decode() # python 3 returns bytes
m = re.match(r'cmake version (\d+\.\d+\.\d+)', out)
if m and len(m.groups()) > 0:
return m.group(1)
else:
return 'cannot read "cmake --version" output'
def python_version():
#return sys.version
return f'{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.minor}'
def numpy_version():
try:
import numpy
except:
return 'numpy not installed'
return numpy.version.version
def scipy_version():
try:
import scipy
except:
return 'scipy not installed'
return scipy.version.version
def vtk_version():
try:
import vtk
except:
return 'python-vtk not installed'
return vtk.vtkVersion.GetVTKVersion()
def qt_version():
try:
from PyQt5.QtCore import QT_VERSION_STR
except:
return 'PyQt5 not installed'
return QT_VERSION_STR
def pyqt_version():
try:
from PyQt5.Qt import PYQT_VERSION_STR
except:
return 'PyQt5 not installed'
return PYQT_VERSION_STR
def sip_version():
try:
from sip import SIP_VERSION_STR
except:
return 'sip not installed'
return SIP_VERSION_STR
if __name__=="__main__":
print("gcc:", gcc_version())
print("cmake:", cmake_version())
print("python:", python_version())
print("numpy:", numpy_version())
print("scipy:", numpy_version())
print("VTK:", vtk_version())
print("Qt:", qt_version())
print("PyQt:", pyqt_version())
print("sip:", sip_version())
|
rboman/progs
|
sandbox/versions/versions.py
|
Python
|
apache-2.0
| 3,657
|
[
"VTK"
] |
ee5839e9515db0aaac45aebb4aa3ae6717df2ae826d854bae2fb87d3d4a82619
|
tests=[
("python","testCMIM.py",{}),
]
longTests=[]
if __name__=='__main__':
import sys
from rdkit import TestRunner
failed,tests = TestRunner.RunScript('test_list.py',0,1)
sys.exit(len(failed))
|
rdkit/rdkit-orig
|
Code/ML/FeatureSelect/Wrap/test_list.py
|
Python
|
bsd-3-clause
| 221
|
[
"RDKit"
] |
97ca7f0c049680b6d73832fa11760ca739ff8cdf2b3198f58df2b0ac305de538
|
#!/usr/bin/env python
import os
from math import *
nopyfoam=False
try:
from PyFoam.Applications.Runner import Runner
from PyFoam.Applications.PlotRunner import PlotRunner
from PyFoam.Applications.CloneCase import CloneCase
except:
nopyfoam = True
from aerofoilmesher import *
# clone wing_template case
#CloneCase(args=["wing_template",case])
threed = False # set this to False to generate a two-dimensional aerofoil
blunt_te = True # set this to True to make trailing edge blunt instead of radius
num_c = 5 # number of chord sections for the wing
span = 3.6 # wingspan
c_0 = 0.5 # root chord
th = 0.1 # max thickness-to-chord ratio
alpha = 0.0 # angle of attack
camber = 10.0 # camber angle
stack = 0.25 # stacking line as a fration of chord
sweep = 40.0
span_c = []
chords = []
dihedral = []
# If 3d, generate eliptical wing planform
if(threed):
for i in range(num_c):
span_c.append(span/2.0*i/(num_c-1))
chords.append(max(c_0*sqrt(1.0-(2.0*span_c[i]/span)**2),0.1))
if(span_c[i]<span/6):
dihedral.append(-10.0)
else:
dihedral.append(7.0)
# area beyond wingtip
span_c.append(span/2.0*2)
chords.append(chords[-1])
dihedral.append(0.0)
else:
num_c = 2
chords = [c_0,c_0]
span_c = [0.0,c_0/10]
dihedral = [0.0,0.0]
sweep=0.0
for i in range(0,1):
alpha = float(i)
# generate root aerofoil
#[cline,p,s] = genNACA65(c_0,th,camber,alpha)
[p,s] = read_ps_profile("clarky.csv",c_0,alpha)
farfield = 5.0 #number of chord lengths to farfield boundary
case = "wing2d_a"+str(i) #case directory name
print("Creating case "+case+" from template.")
if(nopyfoam):
os.system("cp -r wing2d "+case)
else:
CloneCase(args=["wing2d",case])
# create blockmesh file for wing
print("Writing blockMeshDict file for "+case+".")
write_blockmesh(case,threed,blunt_te,span_c,chords,stack,sweep,dihedral,p,s,grid_w = 60)
# run blockMesh on the case
print("Running blockMesh on "+case+".")
if(nopyfoam):
os.system("blockMesh -case "+case+" > "+os.path.join(case,case+"_mesh.log")+" 2>&1")
else:
Runner(args=["--clear","blockMesh", "-case",case])
# ask the user if they want to examine the mesh in paraview
viewit = raw_input("Do you want to view the mesh in ParaView? (y/N): ")
if(viewit in ["y","Y","yes","Yes","YES"]):
if(nopyfoam):
os.system("touch "+os.path.join(case,case+".foam"))
os.system("paraview "+os.path.join(case,case+".foam"))
else:
Runner(args=["paraFoam","-case",case])
# run the simulation
print("Running simpleFOAM solver on "+case+".")
if(nopyfoam):
os.system("simpleFoam -case "+case+" > "+os.path.join(case,case+".log")+" 2>&1")
else:
PlotRunner(args=["--progress","simpleFoam","-case",case])
# calculate y+ wall values
print("Calculating Y+ values on "+case+".")
if(nopyfoam):
os.system("yPlus -case "+case+" > "+os.path.join(case,case+"_yplus.log")+" 2>&1")
else:
Runner(args=["yPlus","-case",case])
# uncomment for pre OF-3.0.x : Runner(args=["yPlusRAS","-case",case])
# ask the user if they want to examine the results in paraview
#viewit = raw_input("Do you want to view the results in ParaView? (y/N): ")
#if(viewit in ["y","Y","yes","Yes","YES"]):
# if(nopyfoam):
# os.system("touch "+os.path.join(case,case+".foam"))
# os.system("paraview "+os.path.join(case,case+".foam"))
# else:
# Runner(args=["paraFoam","-case",case])
|
fruitynoodles/pyaeromesh
|
test_afmesher.py
|
Python
|
gpl-3.0
| 3,629
|
[
"ParaView"
] |
596bc3a350249c614af8c95079584275718b49c65748bd8a5b080f515cb40125
|
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2021, 2021, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
import os
import shutil
import mx
import mx_native
import mx_subst
class CMakeNinjaProject(mx_native.NinjaProject): # pylint: disable=too-many-ancestors
"""A CMake project that is built using Ninja.
Attributes
ninja_targets: list of str, optional
Targets that should be built using Ninja
ninja_install_targets: list of str, optional
Targets that should be executed after a successful build. In contrast to `ninja_targets`, the `ninja_install_targets`
are not considered when deciding whether a project needs to be rebuilt. This is needed because `install`
targets created by CMake are often executed unconditionally, which would cause the project to be always
rebuilt.
cmakeConfig: dict, optional
Additional arguments passed to CMake in the form '-D{key}={value}'.
Path substitution is performed on the values.
"""
def __init__(self, suite, name, deps, workingSets, subDir, ninja_targets=None, ninja_install_targets=None,
cmake_show_warnings=True, results=None, output=None, **args):
projectDir = args.pop('dir', None)
if projectDir:
d_rel = projectDir
elif subDir is None:
d_rel = name
else:
d_rel = os.path.join(subDir, name)
d = os.path.join(suite.dir, d_rel.replace('/', os.sep))
srcDir = args.pop('sourceDir', d)
if not srcDir:
mx.abort("Exactly one 'sourceDir' is required")
srcDir = mx_subst.path_substitutions.substitute(srcDir)
self._install_targets = [mx_subst.path_substitutions.substitute(x) for x in ninja_install_targets or []]
self._ninja_targets = [mx_subst.path_substitutions.substitute(x) for x in ninja_targets or []]
super(CMakeNinjaProject, self).__init__(suite, name, subDir, [srcDir], deps, workingSets, d, results=results, output=output, **args)
self.silent = not cmake_show_warnings
self._cmake_config_raw = args.pop('cmakeConfig', {})
@staticmethod
def config_entry(key, value):
value_substitute = mx_subst.path_substitutions.substitute(value)
if mx.is_windows():
# cmake does not like backslashes
value_substitute = value_substitute.replace("\\", "/")
return '-D{}={}'.format(key, value_substitute)
@staticmethod
def check_cmake():
try:
CMakeNinjaProject.run_cmake(["--version"], silent=False, nonZeroIsFatal=False)
except OSError as e:
mx.abort(str(e) + "\nError executing 'cmake --version'. Are you sure 'cmake' is installed? ")
@staticmethod
def run_cmake(cmdline, silent, *args, **kwargs):
log_error = kwargs.pop("log_error", False)
if mx._opts.verbose:
mx.run(["cmake"] + cmdline, *args, **kwargs)
else:
with open(os.devnull, 'w') as fnull:
err = mx.OutputCapture() if silent else None
try:
mx.run(["cmake"] + cmdline, out=fnull, err=err, *args, **kwargs)
except:
if log_error and err and err.data:
mx.log_error(err.data)
raise
def cmake_config(self):
return [CMakeNinjaProject.config_entry(k, v) for k, v in sorted(self._cmake_config_raw.items())]
def generate_manifest(self, path, extra_cmake_config=None):
source_dir = self.source_dirs()[0]
out_dir = os.path.dirname(path)
cmakefile = os.path.join(out_dir, 'CMakeCache.txt')
if os.path.exists(cmakefile):
# remove cache file if it exist
os.remove(cmakefile)
cmake_config = self.cmake_config()
if extra_cmake_config:
cmake_config.extend(extra_cmake_config)
# explicitly set ninja executable if not on path
cmake_make_program = 'CMAKE_MAKE_PROGRAM'
if cmake_make_program not in cmake_config and mx_native.Ninja.binary != 'ninja':
cmake_config.append(CMakeNinjaProject.config_entry(cmake_make_program, mx_native.Ninja.binary))
# cmake will always create build.ninja - there is nothing we can do about it ATM
cmdline = ["-G", "Ninja", source_dir] + cmake_config
CMakeNinjaProject.check_cmake()
CMakeNinjaProject.run_cmake(cmdline, silent=self.silent, cwd=out_dir, log_error=True)
# move the build.ninja to the temporary path (just move it back later ... *sigh*)
shutil.copyfile(os.path.join(out_dir, mx_native.Ninja.default_manifest), path)
return True
def _build_task(self, target_arch, args):
return CMakeNinjaBuildTask(args, self, target_arch, self._ninja_targets)
def getResults(self, replaceVar=mx_subst.results_substitutions):
return [mx_subst.as_engine(replaceVar).substitute(rt, dependency=self) for rt in self.results]
def _archivable_results(self, target_arch, use_relpath, single):
def result(base_dir, file_path):
assert not mx.isabs(file_path)
archive_path = file_path if use_relpath else mx.basename(file_path)
return mx.join(base_dir, file_path), archive_path
out_dir_arch = mx.join(self.out_dir, target_arch)
for _result in self.getResults():
yield result(out_dir_arch, _result)
class CMakeNinjaBuildTask(mx_native.NinjaBuildTask):
"""A build task which executes Ninja on a project configured by CMake."""
def __init__(self, args, project, *otherargs, **kwargs):
super(CMakeNinjaBuildTask, self).__init__(args, project, *otherargs, **kwargs)
self._cmake_config_file = os.path.join(project.suite.get_mx_output_dir(), 'cmakeConfig',
mx.get_os() + '-' + mx.get_arch() if project.isPlatformDependent() else '',
type(project).__name__,
project._extra_artifact_discriminant(),
self.name)
def needsBuild(self, newestInput):
mx.logv('Checking whether to reconfigure {} with CMake'.format(self.subject.name))
need_configure, reason = self._need_configure()
if need_configure:
return need_configure, "reconfigure needed by CMake ({})".format(reason)
return super(CMakeNinjaBuildTask, self).needsBuild(newestInput)
def build(self):
super(CMakeNinjaBuildTask, self).build()
# write guard file
source_dir = self.subject.source_dirs()[0]
self._write_guard(source_dir, self.subject.cmake_config())
# call install targets
if self.subject._install_targets:
self.ninja._run(*self.subject._install_targets)
def newestOutput(self):
return mx.TimeStampFile.newest([_path for _path, _ in self.subject.getArchivableResults()])
def _write_guard(self, source_dir, cmake_config):
with mx.SafeFileCreation(self.guard_file()) as sfc:
with open(sfc.tmpPath, 'w') as fp:
fp.write(self._guard_data(source_dir, cmake_config))
def _guard_data(self, source_dir, cmake_config):
return source_dir + '\n' + '\n'.join(cmake_config)
def _need_configure(self):
source_dir = self.subject.source_dirs()[0]
cmake_lists = os.path.join(source_dir, "CMakeLists.txt")
guard_file = self.guard_file()
cmake_config = self.subject.cmake_config()
if not os.path.exists(guard_file):
return True, "No CMake configuration found - reconfigure"
if os.path.exists(cmake_lists) and mx.TimeStampFile(cmake_lists).isNewerThan(mx.TimeStampFile(guard_file)):
return True, cmake_lists + " is newer than the configuration - reconfigure"
with open(guard_file, 'r') as fp:
if fp.read() != self._guard_data(source_dir, cmake_config):
return True, "CMake configuration changed - reconfigure"
return False, None
def guard_file(self):
return self._cmake_config_file
|
graalvm/mx
|
mx_cmake.py
|
Python
|
gpl-2.0
| 9,358
|
[
"VisIt"
] |
93762966800fc1c73a25664c3dbbeb30b457bdeb3710a841caa18968dafa71cc
|
from DIRAC.AccountingSystem.Client.Types.BaseAccountingType import BaseAccountingType
import DIRAC
__RCSID__ = "$Id$"
class Job(BaseAccountingType):
def __init__(self):
BaseAccountingType.__init__(self)
self.definitionKeyFields = [('User', 'VARCHAR(32)'),
('UserGroup', 'VARCHAR(32)'),
('JobGroup', "VARCHAR(64)"),
('JobType', 'VARCHAR(32)'),
('JobClass', 'VARCHAR(32)'),
('ProcessingType', 'VARCHAR(256)'),
('Site', 'VARCHAR(32)'),
('FinalMajorStatus', 'VARCHAR(32)'),
('FinalMinorStatus', 'VARCHAR(64)')
]
self.definitionAccountingFields = [('CPUTime', "INT UNSIGNED"),
('NormCPUTime', "INT UNSIGNED"),
('ExecTime', "INT UNSIGNED"),
('InputDataSize', 'BIGINT UNSIGNED'),
('OutputDataSize', 'BIGINT UNSIGNED'),
('InputDataFiles', 'INT UNSIGNED'),
('OutputDataFiles', 'INT UNSIGNED'),
('DiskSpace', 'BIGINT UNSIGNED'),
('InputSandBoxSize', 'BIGINT UNSIGNED'),
('OutputSandBoxSize', 'BIGINT UNSIGNED'),
('ProcessedEvents', 'INT UNSIGNED')
]
self.bucketsLength = [(86400 * 8, 3600), # <1w+1d = 1h
(86400 * 35, 3600 * 4), # <35d = 4h
(86400 * 30 * 6, 86400), # <6m = 1d
(86400 * 365, 86400 * 2), # <1y = 2d
(86400 * 600, 604800), # >1y = 1w
]
self.checkType()
# Fill the site
self.setValueByKey("Site", DIRAC.siteName())
def checkRecord(self):
result = self.getValue("ExecTime")
if not result['OK']:
return result
execTime = result['Value']
result = self.getValue("CPUTime")
if not result['OK']:
return result
cpuTime = result['Value']
if cpuTime > execTime * 100:
return DIRAC.S_ERROR("OOps. CPUTime seems to be more than 100 times the ExecTime. Smells fishy!")
if execTime > 33350400: # 1 year
return DIRAC.S_ERROR("OOps. More than 1 year of cpu time smells fishy!")
return DIRAC.S_OK()
|
arrabito/DIRAC
|
AccountingSystem/Client/Types/Job.py
|
Python
|
gpl-3.0
| 2,637
|
[
"DIRAC"
] |
0d565152ee0be7e397da38237a939828b37331f5178cca64fd53e8936346e09c
|
# pylint: disable=R0913,R0914,W0201,W0622,C0302,R0902,R0903,W1001,W0612,W0613
"""Useful functions and objects used more or less everywhere."""
from __future__ import print_function
from builtins import zip
from builtins import str
from builtins import range
from builtins import object
__author__ = 'Lorenzo Bolla'
import numpy
import EMpy.constants
import EMpy.materials
import scipy.linalg
import scipy.interpolate
import scipy.optimize
import time
import sys
class Layer(object):
"""A layer is defined by a material (iso or aniso) and a thickness."""
def __init__(self, mat, thickness):
"""Set the material and the thickness."""
self.mat = mat
self.thickness = thickness
def isIsotropic(self):
"""Return True if the material is isotropic, False if anisotropic."""
return self.mat.isIsotropic()
def getEPSFourierCoeffs(self, wl, n, anisotropic=True):
"""Return the Fourier coefficients of eps and eps**-1, orders [-n,n]."""
nood = 2 * n + 1
hmax = nood - 1
if not anisotropic:
# isotropic
EPS = numpy.zeros(2 * hmax + 1, dtype=complex)
EPS1 = numpy.zeros_like(EPS)
rix = self.mat.n(wl)
EPS[hmax] = rix ** 2
EPS1[hmax] = rix ** -2
return EPS, EPS1
else:
# anisotropic
EPS = numpy.zeros((3, 3, 2 * hmax + 1), dtype=complex)
EPS1 = numpy.zeros_like(EPS)
EPS[:, :, hmax] = numpy.squeeze(
self.mat.epsilonTensor(wl)) / EMpy.constants.eps0
EPS1[:, :, hmax] = scipy.linalg.inv(EPS[:, :, hmax])
return EPS, EPS1
def capacitance(self, area=1., wl=0):
"""Capacitance = eps0 * eps_r * area / thickness."""
if self.isIsotropic():
eps = EMpy.constants.eps0 * numpy.real(self.mat.n(wl).item() ** 2)
else:
# suppose to compute the capacitance along the z-axis
eps = self.mat.epsilonTensor(wl)[2, 2, 0]
return eps * area / self.thickness
def __str__(self):
"""Return the description of a layer."""
return "%s, thickness: %g" % (self.mat, self.thickness)
class BinaryGrating(object):
"""A Binary Grating is defined by two materials (iso or aniso), a
duty cycle, a pitch and a thickness."""
def __init__(self, mat1, mat2, dc, pitch, thickness):
"""Set the materials, the duty cycle and the thickness."""
self.mat1 = mat1
self.mat2 = mat2
self.dc = dc
self.pitch = pitch
self.thickness = thickness
def isIsotropic(self):
"""Return True if both the materials are isotropic, False otherwise."""
return self.mat1.isIsotropic() and self.mat2.isIsotropic()
def getEPSFourierCoeffs(self, wl, n, anisotropic=True):
"""Return the Fourier coefficients of eps and eps**-1, orders [-n,n]."""
nood = 2 * n + 1
hmax = nood - 1
if not anisotropic:
# isotropic
rix1 = self.mat1.n(wl)
rix2 = self.mat2.n(wl)
f = self.dc
h = numpy.arange(-hmax, hmax + 1)
EPS = (rix1 ** 2 - rix2 ** 2) * f * \
numpy.sinc(h * f) + rix2 ** 2 * (h == 0)
EPS1 = (rix1 ** -2 - rix2 ** -2) * f * \
numpy.sinc(h * f) + rix2 ** -2 * (h == 0)
return EPS, EPS1
else:
# anisotropic
EPS = numpy.zeros((3, 3, 2 * hmax + 1), dtype=complex)
EPS1 = numpy.zeros_like(EPS)
eps1 = numpy.squeeze(
self.mat1.epsilonTensor(wl)) / EMpy.constants.eps0
eps2 = numpy.squeeze(
self.mat2.epsilonTensor(wl)) / EMpy.constants.eps0
f = self.dc
h = numpy.arange(-hmax, hmax + 1)
for ih, hh in enumerate(h):
EPS[:, :, ih] = (eps1 - eps2) * f * \
numpy.sinc(hh * f) + eps2 * (hh == 0)
EPS1[:, :, ih] = (
scipy.linalg.inv(eps1) - scipy.linalg.inv(eps2)
) * f * numpy.sinc(hh * f) + scipy.linalg.inv(eps2) * (hh == 0)
return EPS, EPS1
def capacitance(self, area=1., wl=0):
"""Capacitance = eps0 * eps_r * area / thickness."""
if self.isIsotropic():
eps = EMpy.constants.eps0 * numpy.real(
self.mat1.n(wl) ** 2 * self.dc + self.mat2.n(wl) ** 2
* (1 - self.dc))
else:
eps1 = self.mat1.epsilonTensor(wl)[2, 2, 0]
eps2 = self.mat2.epsilonTensor(wl)[2, 2, 0]
eps = numpy.real(eps1 * self.dc + eps2 * (1 - self.dc))
return eps * area / self.thickness
def __str__(self):
"""Return the description of a binary grating."""
return "(%s, %s), dc: %g, pitch: %g, thickness: %g" % (
self.mat1, self.mat2, self.dc, self.pitch, self.thickness)
class SymmetricDoubleGrating(object):
"""A Symmetric Double Grating is defined by three materials (iso
or aniso), two duty cycles, a pitch and a thickness.
Inside the pitch there are two rect of width dc1*pitch of mat1 and
dc2*pitch of mat2, with a spacer of fixed width made of mat3 between them.
"""
def __init__(self, mat1, mat2, mat3, dc1, dc2, pitch, thickness):
"""Set the materials, the duty cycle and the thickness."""
self.mat1 = mat1
self.mat2 = mat2
self.mat3 = mat3
self.dc1 = dc1
self.dc2 = dc2
self.pitch = pitch
self.thickness = thickness
def isIsotropic(self):
"""Return True if all the materials are isotropic, False otherwise."""
return (self.mat1.isIsotropic() and
self.mat2.isIsotropic() and
self.mat3.isIsotropic())
def getEPSFourierCoeffs(self, wl, n, anisotropic=True):
"""Return the Fourier coefficients of eps and eps**-1, orders [-n,n]."""
nood = 2 * n + 1
hmax = nood - 1
if not anisotropic:
# isotropic
rix1 = self.mat1.n(wl)
rix2 = self.mat2.n(wl)
rix3 = self.mat3.n(wl)
f1 = self.dc1
f2 = self.dc2
h = numpy.arange(-hmax, hmax + 1)
N = len(h)
A = -N*f1 / 2.
B = N*f2 / 2.
EPS = (
rix3 ** 2 * (h == 0) + (rix1 ** 2 - rix3 ** 2) * f1 *
numpy.sinc(h * f1) * numpy.exp(2j * numpy.pi * h / N * A) +
(rix2 ** 2 - rix3 ** 2) * f2 * numpy.sinc(h * f2) *
numpy.exp(2j * numpy.pi * h / N * B)
)
EPS1 = (
rix3 ** -2 * (h == 0) + (rix1 ** -2 - rix3 ** -2) * f1 *
numpy.sinc(h * f1) * numpy.exp(2j * numpy.pi * h / N * A) +
(rix2 ** -2 - rix3 ** -2) * f2 * numpy.sinc(h * f2) *
numpy.exp(2j * numpy.pi * h / N * B)
)
return EPS, EPS1
else:
# anisotropic
EPS = numpy.zeros((3, 3, 2 * hmax + 1), dtype=complex)
EPS1 = numpy.zeros_like(EPS)
eps1 = numpy.squeeze(
self.mat1.epsilonTensor(wl)) / EMpy.constants.eps0
eps2 = numpy.squeeze(
self.mat2.epsilonTensor(wl)) / EMpy.constants.eps0
eps3 = numpy.squeeze(
self.mat3.epsilonTensor(wl)) / EMpy.constants.eps0
f1 = self.dc1
f2 = self.dc2
h = numpy.arange(-hmax, hmax + 1)
N = len(h)
A = -N*f1 / 2.
B = N*f2 / 2.
for ih, hh in enumerate(h):
EPS[:, :, ih] = (
(eps1 - eps3) * f1 * numpy.sinc(hh * f1) *
numpy.exp(2j * numpy.pi * hh / N * A) +
(eps2 - eps3) * f2 * numpy.sinc(hh * f2) *
numpy.exp(2j * numpy.pi * hh / N * B) +
eps3 * (hh == 0)
)
EPS1[:, :, ih] = (
(scipy.linalg.inv(eps1) - scipy.linalg.inv(eps3)) * f1 *
numpy.sinc(hh * f1) *
numpy.exp(2j * numpy.pi * hh / N * A) +
(scipy.linalg.inv(eps2) - scipy.linalg.inv(eps3)) * f2 *
numpy.sinc(hh * f2) *
numpy.exp(2j * numpy.pi * hh / N * B) +
scipy.linalg.inv(eps3) * (hh == 0)
)
return EPS, EPS1
def capacitance(self, area=1., wl=0):
"""Capacitance = eps0 * eps_r * area / thickness."""
if self.isIsotropic():
eps = EMpy.constants.eps0 * numpy.real(
self.mat1.n(wl) ** 2 * self.dc1 + self.mat2.n(wl) ** 2
* self.dc2 + self.mat3.n(wl) ** 2 * (1 - self.dc1 - self.dc2))
else:
eps1 = self.mat1.epsilonTensor(wl)[2, 2, 0]
eps2 = self.mat2.epsilonTensor(wl)[2, 2, 0]
eps3 = self.mat3.epsilonTensor(wl)[2, 2, 0]
eps = numpy.real(
eps1 * self.dc1 + eps2 * self.dc2 +
eps3 * (1 - self.dc1 - self.dc2))
return eps * area / self.thickness
def __str__(self):
"""Return the description of a binary grating."""
return "(%s, %s, %s), dc1: %g, dc2: %g, pitch: %g, thickness: %g" % (
self.mat1, self.mat2, self.mat3, self.dc1, self.dc2, self.pitch,
self.thickness)
class AsymmetricDoubleGrating(SymmetricDoubleGrating):
"""An Asymmetric Double Grating is defined by three materials (iso
or aniso), three duty cycles, a pitch and a thickness.
Inside the pitch there are two rect of width dc1*pitch of mat1 and
dc2*pitch of mat2, separated by dcM*pitch mat3 (between mat1 e
mat2, not between mat2 and mat1!).
"""
def __init__(self, mat1, mat2, mat3, dc1, dc2, dcM, pitch, thickness):
SymmetricDoubleGrating.__init__(
self, mat1, mat2, mat3, dc1, dc2, pitch, thickness)
self.dcM = dcM
def getEPSFourierCoeffs(self, wl, n, anisotropic=True):
"""Return the Fourier coefficients of eps and eps**-1, orders [-n,n]."""
nood = 2 * n + 1
hmax = nood - 1
if not anisotropic:
# isotropic
rix1 = self.mat1.n(wl)
rix2 = self.mat2.n(wl)
rix3 = self.mat3.n(wl)
f1 = self.dc1
f2 = self.dc2
fM = self.dcM
h = numpy.arange(-hmax, hmax + 1)
N = len(h)
A = -N * (f1 + fM) / 2.
B = N * (f2 + fM) / 2.
EPS = (
rix3 ** 2 * (h == 0) + (rix1 ** 2 - rix3 ** 2) * f1 *
numpy.sinc(h * f1) * numpy.exp(2j * numpy.pi * h / N * A) +
(rix2 ** 2 - rix3 ** 2) * f2 *
numpy.sinc(h * f2) * numpy.exp(2j * numpy.pi * h / N * B)
)
EPS1 = (
rix3 ** -2 * (h == 0) + (rix1 ** -2 - rix3 ** -2) * f1 *
numpy.sinc(h * f1) * numpy.exp(2j * numpy.pi * h / N * A) +
(rix2 ** -2 - rix3 ** -2) * f2 * numpy.sinc(h * f2) *
numpy.exp(2j * numpy.pi * h / N * B)
)
return EPS, EPS1
else:
# anisotropic
EPS = numpy.zeros((3, 3, 2 * hmax + 1), dtype=complex)
EPS1 = numpy.zeros_like(EPS)
eps1 = numpy.squeeze(
self.mat1.epsilonTensor(wl)) / EMpy.constants.eps0
eps2 = numpy.squeeze(
self.mat2.epsilonTensor(wl)) / EMpy.constants.eps0
eps3 = numpy.squeeze(
self.mat3.epsilonTensor(wl)) / EMpy.constants.eps0
f1 = self.dc1
f2 = self.dc2
fM = self.dcM
h = numpy.arange(-hmax, hmax + 1)
N = len(h)
A = -N * (f1 + fM) / 2.
B = N * (f2 + fM) / 2.
for ih, hh in enumerate(h):
EPS[:, :, ih] = (
(eps1 - eps3) * f1 * numpy.sinc(hh * f1) *
numpy.exp(2j * numpy.pi * hh / N * A) +
(eps2 - eps3) * f2 * numpy.sinc(hh * f2) *
numpy.exp(2j * numpy.pi * hh / N * B) +
eps3 * (hh == 0)
)
EPS1[:, :, ih] = (
(scipy.linalg.inv(eps1) - scipy.linalg.inv(eps3)) * f1 *
numpy.sinc(hh * f1) *
numpy.exp(2j * numpy.pi * hh / N * A) +
(scipy.linalg.inv(eps2) - scipy.linalg.inv(eps3)) * f2 *
numpy.sinc(hh * f2) *
numpy.exp(2j * numpy.pi * hh / N * B) +
scipy.linalg.inv(eps3) * (hh == 0)
)
return EPS, EPS1
def capacitance(self, area=1., wl=0):
"""Capacitance = eps0 * eps_r * area / thickness."""
if self.isIsotropic():
eps = EMpy.constants.eps0 * numpy.real(
self.mat1.n(wl) ** 2 * self.dc1 + self.mat2.n(wl) ** 2 *
self.dc2 + self.mat3.n(wl) ** 2 * (1 - self.dc1 - self.dc2))
else:
eps1 = self.mat1.epsilonTensor(wl)[2, 2, 0]
eps2 = self.mat2.epsilonTensor(wl)[2, 2, 0]
eps3 = self.mat3.epsilonTensor(wl)[2, 2, 0]
eps = numpy.real(
eps1 * self.dc1 + eps2 * self.dc2 +
eps3 * (1 - self.dc1 - self.dc2))
return eps * area / self.thickness
def __str__(self):
"""Return the description of a binary grating."""
return ("(%s, %s, %s), dc1: %g, dc2: %g, dcM: %g, "
"pitch: %g, thickness: %g") % (
self.mat1, self.mat2, self.mat3, self.dc1, self.dc2,
self.dcM, self.pitch, self.thickness)
class LiquidCrystalCell(object):
"""Liquid Crystal Cell.
A liquid crystal cell is determined by a liquid crystal, a voltage
applied to it, a total thickness, an anchoring thickness. The
liquid crystal molecules are anchored to the cell with a given
pretilt angle (that, at zero volts, is constant throughout all the LC cell).
The cell is decomposed in nlayers homogeneous layers. The LC
characteristics in each layer is either read from file or deduced
by the LC physical parameters solving a boundary value problem
(bvp).
Inspiration from:
U{http://www.ee.ucl.ac.uk/~rjames/modelling/constant-order/oned/}.
@ivar lc: Liquid Crystal.
@ivar voltage: voltage applied.
@ivar t_tot: total thickness.
@ivar t_anchoring: anchoring thickness.
@ivar pretilt: LC angle pretilt.
@ivar totaltwist: LC angle total twist between the anchoring layers.
@ivar nlayers: number of layers to subdived the cell.
@ivar data_file: file with the angles for voltages applid to the cell.
"""
def __init__(self, lc, voltage, t_tot, t_anchoring, pretilt=0,
totaltwist=0, nlayers=100, data_file=None):
self.lc = lc
self.t_tot = t_tot
self.t_anchoring = t_anchoring
self.pretilt = pretilt
self.totaltwist = totaltwist
self.nlayers = nlayers
self.data_file = data_file
# thicknesses of internal layers
tlc_internal = (self.t_tot - 2. * self.t_anchoring) / \
(self.nlayers - 2.) * numpy.ones(self.nlayers - 2)
# thicknesses of layers
self.tlc = numpy.r_[self.t_anchoring, tlc_internal, self.t_anchoring]
# internal sample points
lhs = numpy.r_[0, numpy.cumsum(tlc_internal)]
# normalized sample points: at the center of internal layers, plus the
# boundaries (i.e. the anchoring layers)
self.normalized_sample_points = numpy.r_[
0, (lhs[1:] + lhs[:-1]) / 2. / (self.t_tot - 2 * self.t_anchoring),
1]
tmp = numpy.r_[0, numpy.cumsum(self.tlc)]
self.sample_points = .5 * (tmp[1:] + tmp[:-1])
# finally, apply voltage
self.voltage = voltage
def getvoltage(self):
return self.__voltage
def setvoltage(self, v):
self.__voltage = v
if self.data_file is not None:
self.__angles = self._get_angles_from_file()
else:
self.__angles = self._get_angles_from_bvp()
voltage = property(fget=getvoltage, fset=setvoltage)
def getangles(self):
return self.__angles
angles = property(fget=getangles)
def __ode_3k(self, z, f):
"""Inspiration from:
U{http://www.ee.ucl.ac.uk/~rjames/modelling/constant-order/oned/}."""
# ------------------------------------------------------------
# minimise Oseen Frank free energy and solve Laplace equation
# ------------------------------------------------------------
# [f(1..6)] = [theta theta' phi phi' u u']
theta2, dtheta2dz, phi2, dphi2dz, u2, du2dz = f
K11 = self.lc.K11
K22 = self.lc.K22
K33 = self.lc.K33
q0 = self.lc.q0
epslow = self.lc.epslow
deleps = self.lc.deleps
e0 = EMpy.constants.eps0
K1122 = K11 - K22
K3322 = K33 - K22
costheta1 = numpy.cos(theta2)
sintheta1 = numpy.sin(theta2)
ezz = e0 * (epslow + deleps * sintheta1 ** 2)
# maple generated (see lc3k.mws)
ddtheta2dz = costheta1 * sintheta1 * (
K1122 * dtheta2dz ** 2 +
2 * K3322 * costheta1 ** 2 * dphi2dz ** 2 -
K3322 * dtheta2dz ** 2 -
K22 * dphi2dz ** 2 -
e0 * deleps * du2dz ** 2 +
2 * q0 * K22 * dphi2dz -
K3322 * dphi2dz ** 2
) / (
K1122 * costheta1 ** 2 -
K3322 * costheta1 ** 2 +
K22 + K3322
)
ddphi2dz = 2 * sintheta1 * dtheta2dz * (
2 * K3322 * costheta1 ** 2 * dphi2dz -
K22 * dphi2dz +
q0 * K22 -
K3322 * dphi2dz
) / costheta1 / (K3322 * costheta1 ** 2 - K22 - K3322)
ddu2dz = -2 * e0 * deleps * sintheta1 * \
costheta1 * dtheta2dz * du2dz / ezz
return numpy.array([ddtheta2dz, ddphi2dz, ddu2dz])
def __bc_nosplay(self, f):
"""Inspiration from:
U{http://www.ee.ucl.ac.uk/~rjames/modelling/constant-order/oned/}."""
theta2, dtheta2dz, phi2, dphi2dz, u2, du2dz = f
return numpy.array([theta2[0] - self.pretilt,
phi2[1] - 0,
u2[2] - 0,
theta2[3] - self.pretilt,
phi2[4] - self.totaltwist,
u2[5] - self.voltage])
def __ic_nosplay(self, z):
"""Inspiration from:
U{http://www.ee.ucl.ac.uk/~rjames/modelling/constant-order/oned/}."""
self.maxtilt = 90 * numpy.pi / 180 - self.pretilt
init = numpy.array([self.pretilt + self.maxtilt * 4 * z * (1 - z),
self.maxtilt * 4 * (1 - 2 * z),
self.totaltwist * z,
self.totaltwist * numpy.ones_like(z),
self.voltage * z,
self.voltage * numpy.ones_like(z)])
return init, self.__ode_3k(z, init)
def __apply_tension(self):
"""Inspiration from:
U{http://www.ee.ucl.ac.uk/~rjames/modelling/constant-order/oned/}."""
try:
from scikits.bvp1lg import colnew
except ImportError:
warning("bvp module not found.")
raise
boundary_points = numpy.array([0, 0, 0, 1, 1, 1])
tol = 1e-6 * numpy.ones_like(boundary_points)
degrees = numpy.array([2, 2, 2])
solution = colnew.solve(
boundary_points, degrees, self.__ode_3k, self.__bc_nosplay,
is_linear=False, initial_guess=self.__ic_nosplay,
tolerances=tol, vectorized=True,
maximum_mesh_size=1000)
self.bvp_solution = solution
def get_parameters(self, z=None):
"""Inspiration from:
U{http://www.ee.ucl.ac.uk/~rjames/modelling/constant-order/oned/}."""
if z is None:
z = self.bvp_solution.mesh
data = self.bvp_solution(z)
theta = EMpy.utils.rad2deg(numpy.pi / 2. - data[:, 0])
phi = EMpy.utils.rad2deg(data[:, 2])
u = data[:, 4]
return z, theta, phi, u
def _get_angles_from_file(self):
# interpolate data file
data = numpy.loadtxt(self.data_file)
data_x = numpy.linspace(0, 1, data.shape[0] - 1)
data_y = data[0, :]
x = self.normalized_sample_points
y = [self.voltage]
angles = interp2(x, y, data_x, data_y, data[1:, :])
return angles.squeeze()
def _get_angles_from_bvp(self):
# solve bvp
self.__apply_tension()
z_ = self.normalized_sample_points
z, theta, phi, u = self.get_parameters(z_)
return theta
def createMultilayer(self):
"""Split the cell in nlayers homogeneous layers."""
m = []
for a, t in zip(EMpy.utils.deg2rad(self.angles), self.tlc):
epsT = EMpy.materials.EpsilonTensor(
epsilon_tensor_const=EMpy.utils.euler_rotate(
numpy.diag([self.lc.nE,
self.lc.nO,
self.lc.nO]) ** 2,
0., numpy.pi / 2., numpy.pi / 2. - a) * EMpy.constants.eps0,
epsilon_tensor_known={
0: EMpy.utils.euler_rotate(
numpy.diag([self.lc.nE_electrical,
self.lc.nO_electrical,
self.lc.nO_electrical]) ** 2,
0., numpy.pi / 2.,
numpy.pi / 2. - a) * EMpy.constants.eps0,
}
)
m.append(
Layer(EMpy.materials.AnisotropicMaterial(
'LC', epsilon_tensor=epsT), t))
return Multilayer(m)
def capacitance(self, area=1., wl=0):
"""Capacitance = eps0 * eps_r * area / thickness."""
return self.createMultilayer().capacitance(area, wl)
@staticmethod
def isIsotropic():
"""Return False."""
return False
def __str__(self):
"""Return the description of a LiquidCrystal."""
return ("datafile: %s, voltage: %g, t_tot: %g, "
"t_anchoring: %g, (nO, nE) = (%g, %g)") % (
self.data_file, self.voltage, self.t_tot, self.t_anchoring,
self.lc.nO, self.lc.nE)
class Multilayer(object):
"""A Multilayer is a list of layers with some more methods."""
def __init__(self, data=None):
"""Initialize the data list."""
if data is None:
data = []
self.data = data[:]
def __delitem__(self, i):
"""Delete an item from list."""
del self.data[i]
def __getitem__(self, i):
"""Get an item of the list of layers."""
return self.data[i]
def __getslice__(self, i, j):
"""Get a Multilayer from a slice of layers."""
return Multilayer(self.data[i:j])
def __len__(self):
"""Return the number of layers."""
return len(self.data)
def __setitem__(self, i, item):
"""Set an item of the list of layers."""
self.data[i] = item
def __setslice__(self, i, j, other):
"""Set a slice of layers."""
self.data[i:j] = other
def append(self, item):
"""Append a layer to the layers list."""
self.data.append(item)
def extend(self, other):
"""Extend the layers list with other layers."""
self.data.extend(other)
def insert(self, i, item):
"""Insert a new layer in the layers list at the position i."""
self.data.insert(i, item)
def remove(self, item):
"""Remove item from layers list."""
self.data.remove(item)
def pop(self, i=-1):
return self.data.pop(i)
def isIsotropic(self):
"""Return True if all the layers of the multilayers are
isotropic, False otherwise."""
return numpy.asarray([m.isIsotropic() for m in self.data]).all()
def simplify(self):
"""Return a new flatten Multilayer, with expanded LiquidCrystalCells."""
# make a tmp list, copy of self, to work with
tmp = self.data[:]
# expand the liquid crystals
for il, l in enumerate(tmp):
if isinstance(l, LiquidCrystalCell):
tmp[il] = l.createMultilayer()
# flatten the tmp list
def helper(multilayer):
"""Recurse to flatten all the nested Multilayers."""
ret = []
for layer in multilayer:
if not isinstance(layer, Multilayer):
ret.append(layer)
else:
ret.extend(helper(layer[:]))
return ret
return Multilayer(helper(tmp))
def capacitance(self, area=1., wl=0):
"""Capacitance = eps0 * eps_r * area / thickness."""
m = self.simplify()
ctot_1 = 0.
for l in m:
if numpy.isfinite(l.thickness):
ctot_1 += 1. / l.capacitance(area, wl)
return 1. / ctot_1
def __str__(self):
"""Return a description of the Multilayer."""
if self.__len__() == 0:
list_str = "<emtpy>"
else:
list_str = '\n'.join([
'%d: %s' % (il, l.__str__()) for il, l in enumerate(self.data)
])
return 'Multilayer\n----------\n' + list_str
class Slice(Multilayer):
def __init__(self, width, *argv):
Multilayer.__init__(self, *argv)
self.width = width
def heights(self):
return numpy.array([l.thickness for l in self])
def ys(self):
return numpy.r_[0., self.heights().cumsum()]
def height(self):
return self.heights().sum()
def find_layer(self, y):
l = numpy.where(self.ys() <= y)[0]
if len(l) > 0:
return self[min(l[-1], len(self) - 1)]
else:
return self[0]
def plot(self, x0, x1, nmin, nmax, wl=1.55e-6):
try:
import pylab
except ImportError:
warning('no pylab installed')
return
y0 = 0
# ytot = sum([l.thickness for l in self])
for l in self:
y1 = y0 + l.thickness
n = l.mat.n(wl)
r = 1. - (1. * (n - nmin) / (nmax - nmin))
pylab.fill(
[x0, x1, x1, x0], [y0, y0, y1, y1], ec='yellow', fc=(r, r, r),
alpha=.5)
y0 = y1
pylab.axis('image')
def __str__(self):
return 'width = %e\n%s' % (self.width, Multilayer.__str__(self))
class CrossSection(list):
def __str__(self):
return '\n'.join('%s' % s for s in self)
def widths(self):
return numpy.array([s.width for s in self])
def xs(self):
return numpy.r_[0., self.widths().cumsum()]
def ys(self):
tmp = numpy.concatenate([s.ys() for s in self])
# get rid of numerical errors
tmp = numpy.round(tmp * 1e10) * 1e-10
return numpy.unique(tmp)
def width(self):
return self.widths().sum()
def grid(self, nx_per_region, ny_per_region):
xs = self.xs()
ys = self.ys()
nxregions = len(xs) - 1
nyregions = len(ys) - 1
if numpy.isscalar(nx_per_region):
nx = (nx_per_region,) * nxregions
elif len(nx_per_region) != nxregions:
raise ValueError('wrong nx_per_region dim')
else:
nx = nx_per_region
if numpy.isscalar(ny_per_region):
ny = (ny_per_region,) * nyregions
elif len(ny_per_region) != nyregions:
raise ValueError('wrong ny_per_region dim')
else:
ny = ny_per_region
X = []
x0 = xs[0]
for x, n in zip(xs[1:], nx):
X.append(numpy.linspace(x0, x, n + 1)[:-1])
x0 = x
X = numpy.concatenate(X)
X = numpy.r_[X, x0]
Y = []
y0 = ys[0]
for y, n in zip(ys[1:], ny):
Y.append(numpy.linspace(y0, y, n + 1)[:-1])
y0 = y
Y = numpy.concatenate(Y)
Y = numpy.r_[Y, y0]
return X, Y
def find_slice(self, x):
s = numpy.where(self.xs() <= x)[0]
if len(s) > 0:
return self[min(s[-1], len(self) - 1)]
else:
return self[0]
def _epsfunc(self, x, y, wl):
if numpy.isscalar(x) and numpy.isscalar(y):
return self.find_slice(x).find_layer(y).mat.n(wl) ** 2
else:
raise ValueError('only scalars, please!')
def epsfunc(self, x, y, wl):
eps = numpy.ones((len(x), len(y)), dtype=complex)
for ix, xx in enumerate(x):
for iy, yy in enumerate(y):
eps[ix, iy] = self._epsfunc(xx, yy, wl)
return eps
def plot(self, wl=1.55e-6):
try:
import pylab
except ImportError:
warning('no pylab installed')
return
x0 = 0
ns = [[l.mat.n(wl) for l in s] for s in self]
nmax = max(max(ns))
nmin = min(min(ns))
for s in self:
x1 = x0 + s.width
s.plot(x0, x1, nmin, nmax, wl=wl)
x0 = x1
pylab.axis('image')
class Peak(object):
def __init__(self, x, y, idx, x0, y0, xFWHM_1, xFWHM_2):
self.x = x
self.y = y
self.idx = idx
self.x0 = x0
self.y0 = y0
self.xFWHM_1 = xFWHM_1
self.xFWHM_2 = xFWHM_2
self.FWHM = numpy.abs(xFWHM_2 - xFWHM_1)
def __str__(self):
return '(%g, %g) [%d, (%g, %g)] FWHM = %s' % (
self.x, self.y, self.idx, self.x0, self.y0, self.FWHM)
def deg2rad(x):
"""Convert from deg to rad."""
return x / 180. * numpy.pi
def rad2deg(x):
"""Convert from rad to deg."""
return x / numpy.pi * 180.
def norm(x):
"""Return the norm of a 1D vector."""
return numpy.sqrt(numpy.vdot(x, x))
def normalize(x):
"""Return a normalized 1D vector."""
return x / norm(x)
def euler_rotate(X, phi, theta, psi):
"""Euler rotate.
Rotate the matrix X by the angles phi, theta, psi.
INPUT
X = 2d numpy.array.
phi, theta, psi = rotation angles.
OUTPUT
Rotated matrix = 2d numpy.array.
NOTE
see http://mathworld.wolfram.com/EulerAngles.html
"""
A = numpy.array([
[numpy.cos(psi) * numpy.cos(phi) -
numpy.cos(theta) * numpy.sin(phi) * numpy.sin(psi),
-numpy.sin(psi) * numpy.cos(phi) -
numpy.cos(theta) * numpy.sin(phi) * numpy.cos(psi),
numpy.sin(theta) * numpy.sin(phi)],
[numpy.cos(psi) * numpy.sin(phi) +
numpy.cos(theta) * numpy.cos(phi) * numpy.sin(psi),
-numpy.sin(psi) * numpy.sin(phi) +
numpy.cos(theta) * numpy.cos(phi) * numpy.cos(psi),
-numpy.sin(theta) * numpy.cos(phi)],
[numpy.sin(theta) * numpy.sin(psi),
numpy.sin(theta) * numpy.cos(psi), numpy.cos(theta)]
])
return numpy.dot(A, numpy.dot(X, scipy.linalg.inv(A)))
def snell(theta_inc, n):
"""Snell law.
INPUT
theta_inc = angle of incidence.
n = 1D numpy.array of refractive indices.
OUTPUT
theta = 1D numpy.array.
"""
theta = numpy.zeros_like(n)
theta[0] = theta_inc
for i in range(1, n.size):
theta[i] = numpy.arcsin(n[i - 1] / n[i] * numpy.sin(theta[i - 1]))
return theta
def group_delay_and_dispersion(wls, y):
"""Compute group delay and dispersion.
INPUT
wls = wavelengths (ndarray).
y = function (ndarray).
OUTPUT
phi = phase of function in rad.
tau = group delay in ps.
Dpsnm = dispersion in ps/nm.
NOTE
wls and y must have the same shape.
phi has the same shape as wls.
tau has wls.shape - (..., 1)
Dpsnm has wls.shape - (..., 2)
"""
# transform the input in ndarrays
wls = numpy.asarray(wls)
y = numpy.asarray(y)
# check for good input
if wls.shape != y.shape:
raise ValueError('wls and y must have the same shape.')
f = EMpy.constants.c / wls
df = numpy.diff(f)
toPSNM = 1E12 / 1E9
cnmps = EMpy.constants.c / toPSNM
# phase
phi = numpy.unwrap(4. * numpy.angle(y)) / 4.
# group delay
tau = -.5 / numpy.pi * numpy.diff(phi) / df * 1E12
# dispersion in ps/nm
Dpsnm = -.5 / numpy.pi / cnmps * \
f[1:-1] ** 2 * numpy.diff(phi, 2) / df[0:-1] ** 2
return phi, tau, Dpsnm
def rix2losses(n, wl):
"""Return real(n), imag(n), alpha, alpha_cm1, alpha_dBcm1, given a
complex refractive index. Power goes as: P = P0 exp(-alpha*z)."""
nr = numpy.real(n)
ni = numpy.imag(n)
alpha = 4 * numpy.pi * ni / wl
alpha_cm1 = alpha / 100.
alpha_dBcm1 = 10 * numpy.log10(numpy.exp(1)) * alpha_cm1
return nr, ni, alpha, alpha_cm1, alpha_dBcm1
def wl2f(wl0, dwl):
"""Convert a central wavelength and an interval in frequency."""
wl1 = wl0 - dwl / 2.
wl2 = wl0 + dwl / 2.
f1 = EMpy.constants.c / wl2
f2 = EMpy.constants.c / wl1
f0 = (f1 + f2) / 2.
df = (f2 - f1)
return f0, df
def f2wl(f0, df):
"""Convert a central frequency and an interval in wavelength."""
return wl2f(f0, df)
def find_peaks(x, y, threshold=1e-6):
# find peaks' candidates
dy = numpy.diff(y)
ddy = numpy.diff(numpy.sign(dy))
# idxs = numpy.where(ddy < 0)[0] + 1
idxs = numpy.where(ddy < 0)
if len(idxs) == 0:
# there is only 1 min in f, so the max is on either boundary
# get the max and set FWHM = 0
idx = numpy.argmax(y)
p = Peak(x[idx], y[idx], idx, x[idx], y[idx], x[idx], x[idx])
# return a list of one element
return [p]
# refine search with splines
tck = scipy.interpolate.splrep(x, y)
# look for zero derivative
absdy = lambda x_: numpy.abs(scipy.interpolate.splev(x_, tck, der=1))
peaks = []
for idx in idxs:
# look around the candidate
xtol = (x.max() - x.min()) * 1e-6
xopt = scipy.optimize.fminbound(
absdy, x[idx - 1], x[idx + 1], xtol=xtol, disp=False)
yopt = scipy.interpolate.splev(xopt, tck)
if yopt > threshold * y.max():
# FWHM
tckFWHM = scipy.interpolate.splrep(x, y - 0.5 * yopt)
roots = scipy.interpolate.sproot(tckFWHM)
idxFWHM = numpy.searchsorted(roots, xopt)
if idxFWHM <= 0:
xFWHM_1 = x[0]
else:
xFWHM_1 = roots[idxFWHM - 1]
if idxFWHM >= len(roots):
xFWHM_2 = x[-1]
else:
xFWHM_2 = roots[idxFWHM]
p = Peak(xopt, yopt, idx, x[idx], y[idx], xFWHM_1, xFWHM_2)
peaks.append(p)
def cmp_y(x_, y_):
# to sort in descending order
if x_.y == y_.y:
return 0
if x_.y > y_.y:
return -1
return 1
peaks.sort(cmp=cmp_y)
return peaks
def cond(M):
"""Return the condition number of the 2D array M."""
svdv = scipy.linalg.svdvals(M)
return svdv.max() / svdv.min()
def interp2(x, y, xp, yp, fp):
"""Interpolate a 2D complex array.
:rtype : numpy.array
"""
f1r = numpy.zeros((len(xp), len(y)))
f1i = numpy.zeros((len(xp), len(y)))
for ixp in range(len(xp)):
f1r[ixp, :] = numpy.interp(y, yp, numpy.real(fp[ixp, :]))
f1i[ixp, :] = numpy.interp(y, yp, numpy.imag(fp[ixp, :]))
fr = numpy.zeros((len(x), len(y)))
fi = numpy.zeros((len(x), len(y)))
for iy in range(len(y)):
fr[:, iy] = numpy.interp(x, xp, f1r[:, iy])
fi[:, iy] = numpy.interp(x, xp, f1i[:, iy])
return fr + 1j * fi
def trapz2(f, x=None, y=None, dx=1.0, dy=1.0):
"""Double integrate."""
return numpy.trapz(numpy.trapz(f, x=y, dx=dy), x=x, dx=dx)
def centered1d(x):
return (x[1:] + x[:-1]) / 2.
def centered2d(x):
return (x[1:, 1:] + x[1:, :-1] + x[:-1, 1:] + x[:-1, :-1]) / 4.
def blackbody(f, T):
return 2 * EMpy.constants.h * f ** 3 / (EMpy.constants.c ** 2) * 1. / (
numpy.exp(EMpy.constants.h * f / (EMpy.constants.k * T)) - 1)
def warning(s):
"""Print a warning on the stdout.
:param s: warning message
:type s: str
:rtype : str
"""
print('WARNING --- {}'.format(s))
class ProgressBar(object):
""" Creates a text-based progress bar. Call the object with the `print'
command to see the progress bar, which looks something like this:
[=======> 22% ]
You may specify the progress bar's width, min and max values on init.
"""
def __init__(self, minValue=0, maxValue=100, totalWidth=80):
self.progBar = "[]" # This holds the progress bar string
self.min = minValue
self.max = maxValue
self.span = maxValue - minValue
self.width = totalWidth
self.reset()
def reset(self):
self.start_time = time.time()
self.amount = 0 # When amount == max, we are 100% done
self.updateAmount(0) # Build progress bar string
def updateAmount(self, newAmount=0):
""" Update the progress bar with the new amount (with min and max
values set at initialization; if it is over or under, it takes the
min or max value as a default. """
if newAmount < self.min:
newAmount = self.min
if newAmount > self.max:
newAmount = self.max
self.amount = newAmount
# Figure out the new percent done, round to an integer
diffFromMin = float(self.amount - self.min)
percentDone = (diffFromMin / float(self.span)) * 100.0
percentDone = int(round(percentDone))
# Figure out how many hash bars the percentage should be
allFull = self.width - 2 - 18
numHashes = (percentDone / 100.0) * allFull
numHashes = int(round(numHashes))
# Build a progress bar with an arrow of equal signs; special cases for
# empty and full
if numHashes == 0:
self.progBar = '[>%s]' % (' ' * (allFull - 1))
elif numHashes == allFull:
self.progBar = '[%s]' % ('=' * allFull)
else:
self.progBar = '[%s>%s]' % ('=' * (numHashes - 1),
' ' * (allFull - numHashes))
# figure out where to put the percentage, roughly centered
percentPlace = (len(self.progBar) / 2) - len(str(percentDone))
percentString = ' ' + str(percentDone) + '% '
elapsed_time = time.time() - self.start_time
# slice the percentage into the bar
self.progBar = ''.join([self.progBar[0:percentPlace], percentString,
self.progBar[
percentPlace + len(percentString):],
])
if percentDone > 0:
self.progBar += ' %6ds / %6ds' % (
int(elapsed_time), int(elapsed_time * (100. / percentDone - 1)))
def update(self, value, every=1):
""" Updates the amount, and writes to stdout. Prints a carriage return
first, so it will overwrite the current line in stdout."""
if value % every == 0 or value >= self.max:
print('\r', end=' ')
self.updateAmount(value)
sys.stdout.write(self.progBar)
sys.stdout.flush()
|
DavidRimel/EMpy
|
EMpy/utils.py
|
Python
|
mit
| 39,651
|
[
"CRYSTAL"
] |
c9987b89a8b23230e605946ea933f694c27e72fb5b1108a679115b2e4b9459b8
|
import sys
import gzip
from glob import glob
from io import BytesIO
from .. import backends, conventions
from .common import ArrayWriter
from ..core.alignment import auto_combine
from ..core.utils import close_on_error, is_remote_uri
from ..core.pycompat import basestring, OrderedDict, range
def _get_default_engine(path, allow_remote=False):
if allow_remote and is_remote_uri(path): # pragma: no cover
try:
import netCDF4
engine = 'netcdf4'
except ImportError:
try:
import pydap
engine = 'pydap'
except ImportError:
raise ValueError('netCDF4 or pydap is required for accessing '
'remote datasets via OPeNDAP')
else:
try:
import netCDF4
engine = 'netcdf4'
except ImportError: # pragma: no cover
try:
import scipy.io.netcdf
engine = 'scipy'
except ImportError:
raise ValueError('cannot read or write netCDF files without '
'netCDF4-python or scipy installed')
return engine
def open_dataset(filename_or_obj, group=None, decode_cf=True,
mask_and_scale=True, decode_times=True,
concat_characters=True, decode_coords=True, engine=None,
chunks=None, lock=True):
"""Load and decode a dataset from a file or file-like object.
Parameters
----------
filename_or_obj : str, file or xray.backends.*DataStore
Strings are interpreted as a path to a netCDF file or an OpenDAP URL
and opened with python-netCDF4, unless the filename ends with .gz, in
which case the file is gunzipped and opened with scipy.io.netcdf (only
netCDF3 supported). File-like objects are opened with scipy.io.netcdf
(only netCDF3 supported).
group : str, optional
Path to the netCDF4 group in the given file to open (only works for
netCDF4 files).
decode_cf : bool, optional
Whether to decode these variables, assuming they were saved according
to CF conventions.
mask_and_scale : bool, optional
If True, replace array values equal to `_FillValue` with NA and scale
values according to the formula `original_values * scale_factor +
add_offset`, where `_FillValue`, `scale_factor` and `add_offset` are
taken from variable attributes (if they exist).
decode_times : bool, optional
If True, decode times encoded in the standard NetCDF datetime format
into datetime objects. Otherwise, leave them encoded as numbers.
concat_characters : bool, optional
If True, concatenate along the last dimension of character arrays to
form string arrays. Dimensions will only be concatenated over (and
removed) if they have no corresponding variable and if they are only
used as the last dimension of character arrays.
decode_coords : bool, optional
If True, decode the 'coordinates' attribute to identify coordinates in
the resulting dataset.
engine : {'netcdf4', 'scipy', 'pydap', 'h5netcdf'}, optional
Engine to use when reading netCDF files. If not provided, the default
engine is chosen based on available dependencies, with a preference for
'netcdf4'.
chunks : dict, optional
If chunks is provided, it used to load the new dataset into dask
arrays. This is an experimental feature; see the documentation for more
details.
lock : optional
If chunks is provided, this argument is passed on to
:py:func:`dask.array.from_array`. By default, a lock is used to avoid
issues with concurrent access with dask's multithreaded backend.
Returns
-------
dataset : Dataset
The newly created dataset.
See Also
--------
open_mfdataset
"""
if not decode_cf:
mask_and_scale = False
decode_times = False
concat_characters = False
decode_coords = False
def maybe_decode_store(store):
ds = conventions.decode_cf(
store, mask_and_scale=mask_and_scale, decode_times=decode_times,
concat_characters=concat_characters, decode_coords=decode_coords)
if chunks is not None:
ds = ds.chunk(chunks, lock=lock)
return ds
if isinstance(filename_or_obj, backends.AbstractDataStore):
store = filename_or_obj
elif isinstance(filename_or_obj, basestring):
if filename_or_obj.endswith('.gz'):
if engine is not None and engine != 'scipy':
raise ValueError('can only read gzipped netCDF files with '
"default engine or engine='scipy'")
# if the string ends with .gz, then gunzip and open as netcdf file
if sys.version_info[:2] < (2, 7):
raise ValueError('reading a gzipped netCDF not '
'supported on Python 2.6')
try:
store = backends.ScipyDataStore(gzip.open(filename_or_obj))
except TypeError as e:
# TODO: gzipped loading only works with NetCDF3 files.
if 'is not a valid NetCDF 3 file' in e.message:
raise ValueError('gzipped file loading only supports '
'NetCDF 3 files.')
else:
raise
else:
# TODO: automatically fall back to using pydap if given a URL and
# netCDF4 is not available
if engine is None:
engine = _get_default_engine(filename_or_obj,
allow_remote=True)
if engine == 'netcdf4':
store = backends.NetCDF4DataStore(filename_or_obj, group=group)
elif engine == 'scipy':
store = backends.ScipyDataStore(filename_or_obj)
elif engine == 'pydap':
store = backends.PydapDataStore(filename_or_obj)
elif engine == 'h5netcdf':
store = backends.H5NetCDFStore(filename_or_obj, group=group)
else:
raise ValueError('unrecognized engine for open_dataset: %r'
% engine)
with close_on_error(store):
return maybe_decode_store(store)
else:
if engine is not None and engine != 'scipy':
raise ValueError('can only read file-like objects with '
"default engine or engine='scipy'")
# assume filename_or_obj is a file-like object
store = backends.ScipyDataStore(filename_or_obj)
return maybe_decode_store(store)
class _MultiFileCloser(object):
def __init__(self, file_objs):
self.file_objs = file_objs
def close(self):
for f in self.file_objs:
f.close()
def open_mfdataset(paths, chunks=None, concat_dim=None, preprocess=None,
lock=True, **kwargs):
"""Open multiple files as a single dataset.
Experimental. Requires dask to be installed.
Parameters
----------
paths : str or sequence
Either a string glob in the form "path/to/my/files/*.nc" or an explicit
list of files to open.
chunks : dict, optional
Dictionary with keys given by dimension names and values given by chunk
sizes. In general, these should divide the dimensions of each dataset.
By default, chunks will be chosen to load entire input files into
memory at once. This has a major impact on performance: please see the
full documentation for more details.
concat_dim : str or DataArray or Index, optional
Dimension to concatenate files along. This argument is passed on to
:py:func:`xray.auto_combine` along with the dataset objects. You only
need to provide this argument if the dimension along which you want to
concatenate is not a dimension in the original datasets, e.g., if you
want to stack a collection of 2D arrays along a third dimension.
preprocess : callable, optional
If provided, call this function on each dataset prior to concatenation.
lock : optional
This argument is passed on to :py:func:`dask.array.from_array`. By
default, a lock is used to avoid issues with concurrent access with
dask's multithreaded backend.
**kwargs : optional
Additional arguments passed on to :py:func:`xray.open_dataset`.
Returns
-------
xray.Dataset
See Also
--------
auto_combine
open_dataset
"""
if isinstance(paths, basestring):
paths = sorted(glob(paths))
if not paths:
raise IOError('no files to open')
datasets = [open_dataset(p, **kwargs) for p in paths]
file_objs = [ds._file_obj for ds in datasets]
datasets = [ds.chunk(chunks, lock=lock) for ds in datasets]
if preprocess is not None:
datasets = [preprocess(ds) for ds in datasets]
combined = auto_combine(datasets, concat_dim=concat_dim)
combined._file_obj = _MultiFileCloser(file_objs)
return combined
WRITEABLE_STORES = {'netcdf4': backends.NetCDF4DataStore,
'scipy': backends.ScipyDataStore,
'h5netcdf': backends.H5NetCDFStore}
def to_netcdf(dataset, path=None, mode='w', format=None, group=None,
engine=None, writer=None):
"""This function creates an appropriate datastore for writing a dataset to
disk as a netCDF file
See `Dataset.to_netcdf` for full API docs.
The ``writer`` argument is only for the private use of save_mfdataset.
"""
if path is None:
path = BytesIO()
if engine is None:
engine = 'scipy'
elif engine is not None:
raise ValueError('invalid engine for creating bytes with '
'to_netcdf: %r. Only the default engine '
"or engine='scipy' is supported" % engine)
elif engine is None:
engine = _get_default_engine(path)
try:
store_cls = WRITEABLE_STORES[engine]
except KeyError:
raise ValueError('unrecognized engine for to_netcdf: %r' % engine)
if format is not None:
format = format.upper()
# if a writer is provided, store asynchronously
sync = writer is None
store = store_cls(path, mode, format, group, writer)
try:
dataset.dump_to_store(store, sync=sync)
if isinstance(path, BytesIO):
return path.getvalue()
finally:
if sync:
store.close()
if not sync:
return store
def save_mfdataset(datasets, paths, mode='w', format=None, groups=None,
engine=None):
"""Write multiple datasets to disk as netCDF files simultaneously.
This function is intended for use with datasets consisting of dask.array
objects, in which case it can write the multiple datasets to disk
simultaneously using a shared thread pool.
When not using dask, it is no different than calling ``to_netcdf``
repeatedly.
Parameters
----------
datasets : list of xray.Dataset
List of datasets to save.
paths : list of str
List of paths to which to save each corresponding dataset.
mode : {'w', 'a'}, optional
Write ('w') or append ('a') mode. If mode='w', any existing file at
these locations will be overwritten.
format : {'NETCDF4', 'NETCDF4_CLASSIC', 'NETCDF3_64BIT', 'NETCDF3_CLASSIC'}, optional
File format for the resulting netCDF file:
* NETCDF4: Data is stored in an HDF5 file, using netCDF4 API
features.
* NETCDF4_CLASSIC: Data is stored in an HDF5 file, using only
netCDF 3 compatibile API features.
* NETCDF3_64BIT: 64-bit offset version of the netCDF 3 file format,
which fully supports 2+ GB files, but is only compatible with
clients linked against netCDF version 3.6.0 or later.
* NETCDF3_CLASSIC: The classic netCDF 3 file format. It does not
handle 2+ GB files very well.
All formats are supported by the netCDF4-python library.
scipy.io.netcdf only supports the last two formats.
The default format is NETCDF4 if you are saving a file to disk and
have the netCDF4-python library available. Otherwise, xray falls
back to using scipy to write netCDF files and defaults to the
NETCDF3_64BIT format (scipy does not support netCDF4).
groups : list of str, optional
Paths to the netCDF4 group in each corresponding file to which to save
datasets (only works for format='NETCDF4'). The groups will be created
if necessary.
engine : {'netcdf4', 'scipy', 'h5netcdf'}, optional
Engine to use when writing netCDF files. If not provided, the
default engine is chosen based on available dependencies, with a
preference for 'netcdf4' if writing to a file on disk.
Examples
--------
Save a dataset into one netCDF per year of data:
>>> years, datasets = zip(*ds.groupby('time.year'))
>>> paths = ['%s.nc' % y for y in years]
>>> xray.save_mfdataset(datasets, paths)
"""
if mode == 'w' and len(set(paths)) < len(paths):
raise ValueError("cannot use mode='w' when writing multiple "
'datasets to the same path')
if groups is None:
groups = [None] * len(datasets)
if len(set([len(datasets), len(paths), len(groups)])) > 1:
raise ValueError('must supply lists of the same length for the '
'datasets, paths and groups arguments to '
'save_mfdataset')
writer = ArrayWriter()
stores = [to_netcdf(ds, path, mode, format, group, engine, writer)
for ds, path, group in zip(datasets, paths, groups)]
try:
writer.sync()
for store in stores:
store.sync()
finally:
for store in stores:
store.close()
|
hetland/xray
|
xray/backends/api.py
|
Python
|
apache-2.0
| 14,163
|
[
"NetCDF"
] |
5440a0e13b112644c812013d22c83a601833f79ecdf596d875e652743db769ad
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['stableinterface'],
'supported_by': 'committer',
'version': '1.0'}
DOCUMENTATION = """
---
module: ec2_elb_lb
description:
- Returns information about the load balancer.
- Will be marked changed when called only if state is changed.
short_description: Creates or destroys Amazon ELB.
version_added: "1.5"
author:
- "Jim Dalton (@jsdalton)"
options:
state:
description:
- Create or destroy the ELB
choices: ["present", "absent"]
required: true
name:
description:
- The name of the ELB
required: true
listeners:
description:
- List of ports/protocols for this ELB to listen on (see example)
required: false
purge_listeners:
description:
- Purge existing listeners on ELB that are not found in listeners
required: false
default: true
instance_ids:
description:
- List of instance ids to attach to this ELB
required: false
default: false
version_added: "2.1"
purge_instance_ids:
description:
- Purge existing instance ids on ELB that are not found in instance_ids
required: false
default: false
version_added: "2.1"
zones:
description:
- List of availability zones to enable on this ELB
required: false
purge_zones:
description:
- Purge existing availability zones on ELB that are not found in zones
required: false
default: false
security_group_ids:
description:
- A list of security groups to apply to the elb
require: false
default: None
version_added: "1.6"
security_group_names:
description:
- A list of security group names to apply to the elb
require: false
default: None
version_added: "2.0"
health_check:
description:
- An associative array of health check configuration settings (see example)
require: false
default: None
access_logs:
description:
- An associative array of access logs configuration settings (see example)
require: false
default: None
version_added: "2.0"
subnets:
description:
- A list of VPC subnets to use when creating ELB. Zones should be empty if using this.
required: false
default: None
aliases: []
version_added: "1.7"
purge_subnets:
description:
- Purge existing subnet on ELB that are not found in subnets
required: false
default: false
version_added: "1.7"
scheme:
description:
- The scheme to use when creating the ELB. For a private VPC-visible ELB use 'internal'.
required: false
default: 'internet-facing'
version_added: "1.7"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
aliases: []
version_added: "1.5"
connection_draining_timeout:
description:
- Wait a specified timeout allowing connections to drain before terminating an instance
required: false
aliases: []
version_added: "1.8"
idle_timeout:
description:
- ELB connections from clients and to servers are timed out after this amount of time
required: false
version_added: "2.0"
cross_az_load_balancing:
description:
- Distribute load across all configured Availability Zones
required: false
default: "no"
choices: ["yes", "no"]
aliases: []
version_added: "1.8"
stickiness:
description:
- An associative array of stickiness policy settings. Policy will be applied to all listeners ( see example )
required: false
version_added: "2.0"
wait:
description:
- When specified, Ansible will check the status of the load balancer to ensure it has been successfully
removed from AWS.
required: false
default: no
choices: ["yes", "no"]
version_added: "2.1"
wait_timeout:
description:
- Used in conjunction with wait. Number of seconds to wait for the elb to be terminated.
A maximum of 600 seconds (10 minutes) is allowed.
required: false
default: 60
version_added: "2.1"
tags:
description:
- An associative array of tags. To delete all tags, supply an empty dict.
required: false
version_added: "2.1"
extends_documentation_fragment:
- aws
- ec2
"""
EXAMPLES = """
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Basic provisioning example (non-VPC)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
proxy_protocol: True
- protocol: https
load_balancer_port: 443
instance_protocol: http # optional, defaults to value of protocol setting
instance_port: 80
# ssl certificate required for https or ssl
ssl_certificate_id: "arn:aws:iam::123456789012:server-certificate/company/servercerts/ProdServerCert"
# Internal ELB example
- local_action:
module: ec2_elb_lb
name: "test-vpc"
scheme: internal
state: present
instance_ids:
- i-abcd1234
purge_instance_ids: true
subnets:
- subnet-abcd1234
- subnet-1a2b3c4d
listeners:
- protocol: http # options are http, https, ssl, tcp
load_balancer_port: 80
instance_port: 80
# Configure a health check and the access logs
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
health_check:
ping_protocol: http # options are http, https, ssl, tcp
ping_port: 80
ping_path: "/index.html" # not required for tcp or ssl
response_timeout: 5 # seconds
interval: 30 # seconds
unhealthy_threshold: 2
healthy_threshold: 10
access_logs:
interval: 5 # minutes (defaults to 60)
s3_location: "my-bucket" # This value is required if access_logs is set
s3_prefix: "logs"
# Ensure ELB is gone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
# Ensure ELB is gone and wait for check (for default timeout)
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
wait: yes
# Ensure ELB is gone and wait for check with timeout value
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: absent
wait: yes
wait_timeout: 600
# Normally, this module will purge any listeners that exist on the ELB
# but aren't specified in the listeners parameter. If purge_listeners is
# false it leaves them alone
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_listeners: no
# Normally, this module will leave availability zones that are enabled
# on the ELB alone. If purge_zones is true, then any extraneous zones
# will be removed
- local_action:
module: ec2_elb_lb
name: "test-please-delete"
state: present
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
purge_zones: yes
# Creates a ELB and assigns a list of subnets to it.
- local_action:
module: ec2_elb_lb
state: present
name: 'New ELB'
security_group_ids: 'sg-123456, sg-67890'
region: us-west-2
subnets: 'subnet-123456,subnet-67890'
purge_subnets: yes
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
# Create an ELB with connection draining, increased idle timeout and cross availability
# zone load balancing
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
connection_draining_timeout: 60
idle_timeout: 300
cross_az_load_balancing: "yes"
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
# Create an ELB with load balancer stickiness enabled
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
stickiness:
type: loadbalancer
enabled: yes
expiration: 300
# Create an ELB with application stickiness enabled
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
stickiness:
type: application
enabled: yes
cookie: SESSIONID
# Create an ELB and add tags
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
tags:
Name: "New ELB"
stack: "production"
client: "Bob"
# Delete all tags from an ELB
- local_action:
module: ec2_elb_lb
name: "New ELB"
state: present
region: us-east-1
zones:
- us-east-1a
- us-east-1d
listeners:
- protocol: http
load_balancer_port: 80
instance_port: 80
tags: {}
"""
try:
import boto
import boto.ec2.elb
import boto.ec2.elb.attributes
import boto.vpc
from boto.ec2.elb.healthcheck import HealthCheck
from boto.ec2.tag import Tag
from boto.regioninfo import RegionInfo
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
import time
import random
def _throttleable_operation(max_retries):
def _operation_wrapper(op):
def _do_op(*args, **kwargs):
retry = 0
while True:
try:
return op(*args, **kwargs)
except boto.exception.BotoServerError as e:
if retry < max_retries and e.code in \
("Throttling", "RequestLimitExceeded"):
retry = retry + 1
time.sleep(min(random.random() * (2 ** retry), 300))
continue
else:
raise
return _do_op
return _operation_wrapper
def _get_vpc_connection(module, region, aws_connect_params):
try:
return connect_to_aws(boto.vpc, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
_THROTTLING_RETRIES = 5
class ElbManager(object):
"""Handles ELB creation and destruction"""
def __init__(self, module, name, listeners=None, purge_listeners=None,
zones=None, purge_zones=None, security_group_ids=None,
health_check=None, subnets=None, purge_subnets=None,
scheme="internet-facing", connection_draining_timeout=None,
idle_timeout=None,
cross_az_load_balancing=None, access_logs=None,
stickiness=None, wait=None, wait_timeout=None, tags=None,
region=None,
instance_ids=None, purge_instance_ids=None, **aws_connect_params):
self.module = module
self.name = name
self.listeners = listeners
self.purge_listeners = purge_listeners
self.instance_ids = instance_ids
self.purge_instance_ids = purge_instance_ids
self.zones = zones
self.purge_zones = purge_zones
self.security_group_ids = security_group_ids
self.health_check = health_check
self.subnets = subnets
self.purge_subnets = purge_subnets
self.scheme = scheme
self.connection_draining_timeout = connection_draining_timeout
self.idle_timeout = idle_timeout
self.cross_az_load_balancing = cross_az_load_balancing
self.access_logs = access_logs
self.stickiness = stickiness
self.wait = wait
self.wait_timeout = wait_timeout
self.tags = tags
self.aws_connect_params = aws_connect_params
self.region = region
self.changed = False
self.status = 'gone'
self.elb_conn = self._get_elb_connection()
self.elb = self._get_elb()
self.ec2_conn = self._get_ec2_connection()
@_throttleable_operation(_THROTTLING_RETRIES)
def ensure_ok(self):
"""Create the ELB"""
if not self.elb:
# Zones and listeners will be added at creation
self._create_elb()
else:
self._set_zones()
self._set_security_groups()
self._set_elb_listeners()
self._set_subnets()
self._set_health_check()
# boto has introduced support for some ELB attributes in
# different versions, so we check first before trying to
# set them to avoid errors
if self._check_attribute_support('connection_draining'):
self._set_connection_draining_timeout()
if self._check_attribute_support('connecting_settings'):
self._set_idle_timeout()
if self._check_attribute_support('cross_zone_load_balancing'):
self._set_cross_az_load_balancing()
if self._check_attribute_support('access_log'):
self._set_access_log()
# add sitcky options
self.select_stickiness_policy()
# ensure backend server policies are correct
self._set_backend_policies()
# set/remove instance ids
self._set_instance_ids()
self._set_tags()
def ensure_gone(self):
"""Destroy the ELB"""
if self.elb:
self._delete_elb()
if self.wait:
elb_removed = self._wait_for_elb_removed()
# Unfortunately even though the ELB itself is removed quickly
# the interfaces take longer so reliant security groups cannot
# be deleted until the interface has registered as removed.
elb_interface_removed = self._wait_for_elb_interface_removed()
if not (elb_removed and elb_interface_removed):
self.module.fail_json(msg='Timed out waiting for removal of load balancer.')
def get_info(self):
try:
check_elb = self.elb_conn.get_all_load_balancers(self.name)[0]
except:
check_elb = None
if not check_elb:
info = {
'name': self.name,
'status': self.status,
'region': self.region
}
else:
try:
lb_cookie_policy = check_elb.policies.lb_cookie_stickiness_policies[0].__dict__['policy_name']
except:
lb_cookie_policy = None
try:
app_cookie_policy = check_elb.policies.app_cookie_stickiness_policies[0].__dict__['policy_name']
except:
app_cookie_policy = None
info = {
'name': check_elb.name,
'dns_name': check_elb.dns_name,
'zones': check_elb.availability_zones,
'security_group_ids': check_elb.security_groups,
'status': self.status,
'subnets': self.subnets,
'scheme': check_elb.scheme,
'hosted_zone_name': check_elb.canonical_hosted_zone_name,
'hosted_zone_id': check_elb.canonical_hosted_zone_name_id,
'lb_cookie_policy': lb_cookie_policy,
'app_cookie_policy': app_cookie_policy,
'proxy_policy': self._get_proxy_protocol_policy(),
'backends': self._get_backend_policies(),
'instances': [instance.id for instance in check_elb.instances],
'out_of_service_count': 0,
'in_service_count': 0,
'unknown_instance_state_count': 0,
'region': self.region
}
# status of instances behind the ELB
if info['instances']:
info['instance_health'] = [ dict(
instance_id = instance_state.instance_id,
reason_code = instance_state.reason_code,
state = instance_state.state
) for instance_state in self.elb_conn.describe_instance_health(self.name)]
else:
info['instance_health'] = []
# instance state counts: InService or OutOfService
if info['instance_health']:
for instance_state in info['instance_health']:
if instance_state['state'] == "InService":
info['in_service_count'] += 1
elif instance_state['state'] == "OutOfService":
info['out_of_service_count'] += 1
else:
info['unknown_instance_state_count'] += 1
if check_elb.health_check:
info['health_check'] = {
'target': check_elb.health_check.target,
'interval': check_elb.health_check.interval,
'timeout': check_elb.health_check.timeout,
'healthy_threshold': check_elb.health_check.healthy_threshold,
'unhealthy_threshold': check_elb.health_check.unhealthy_threshold,
}
if check_elb.listeners:
info['listeners'] = [self._api_listener_as_tuple(l)
for l in check_elb.listeners]
elif self.status == 'created':
# When creating a new ELB, listeners don't show in the
# immediately returned result, so just include the
# ones that were added
info['listeners'] = [self._listener_as_tuple(l)
for l in self.listeners]
else:
info['listeners'] = []
if self._check_attribute_support('connection_draining'):
info['connection_draining_timeout'] = self.elb_conn.get_lb_attribute(self.name, 'ConnectionDraining').timeout
if self._check_attribute_support('connecting_settings'):
info['idle_timeout'] = self.elb_conn.get_lb_attribute(self.name, 'ConnectingSettings').idle_timeout
if self._check_attribute_support('cross_zone_load_balancing'):
is_cross_az_lb_enabled = self.elb_conn.get_lb_attribute(self.name, 'CrossZoneLoadBalancing')
if is_cross_az_lb_enabled:
info['cross_az_load_balancing'] = 'yes'
else:
info['cross_az_load_balancing'] = 'no'
# return stickiness info?
info['tags'] = self.tags
return info
@_throttleable_operation(_THROTTLING_RETRIES)
def _wait_for_elb_removed(self):
polling_increment_secs = 15
max_retries = (self.wait_timeout / polling_increment_secs)
status_achieved = False
for x in range(0, max_retries):
try:
result = self.elb_conn.get_all_lb_attributes(self.name)
except (boto.exception.BotoServerError, StandardError) as e:
if "LoadBalancerNotFound" in e.code:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
return status_achieved
@_throttleable_operation(_THROTTLING_RETRIES)
def _wait_for_elb_interface_removed(self):
polling_increment_secs = 15
max_retries = (self.wait_timeout / polling_increment_secs)
status_achieved = False
elb_interfaces = self.ec2_conn.get_all_network_interfaces(
filters={'attachment.instance-owner-id': 'amazon-elb',
'description': 'ELB {0}'.format(self.name) })
for x in range(0, max_retries):
for interface in elb_interfaces:
try:
result = self.ec2_conn.get_all_network_interfaces(interface.id)
if result == []:
status_achieved = True
break
else:
time.sleep(polling_increment_secs)
except (boto.exception.BotoServerError, StandardError) as e:
if 'InvalidNetworkInterfaceID' in e.code:
status_achieved = True
break
else:
self.module.fail_json(msg=str(e))
return status_achieved
@_throttleable_operation(_THROTTLING_RETRIES)
def _get_elb(self):
elbs = self.elb_conn.get_all_load_balancers()
for elb in elbs:
if self.name == elb.name:
self.status = 'ok'
return elb
def _get_elb_connection(self):
try:
return connect_to_aws(boto.ec2.elb, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
self.module.fail_json(msg=str(e))
def _get_ec2_connection(self):
try:
return connect_to_aws(boto.ec2, self.region,
**self.aws_connect_params)
except (boto.exception.NoAuthHandlerFound, StandardError) as e:
self.module.fail_json(msg=str(e))
@_throttleable_operation(_THROTTLING_RETRIES)
def _delete_elb(self):
# True if succeeds, exception raised if not
result = self.elb_conn.delete_load_balancer(name=self.name)
if result:
self.changed = True
self.status = 'deleted'
def _create_elb(self):
listeners = [self._listener_as_tuple(l) for l in self.listeners]
self.elb = self.elb_conn.create_load_balancer(name=self.name,
zones=self.zones,
security_groups=self.security_group_ids,
complex_listeners=listeners,
subnets=self.subnets,
scheme=self.scheme)
if self.elb:
# HACK: Work around a boto bug in which the listeners attribute is
# always set to the listeners argument to create_load_balancer, and
# not the complex_listeners
# We're not doing a self.elb = self._get_elb here because there
# might be eventual consistency issues and it doesn't necessarily
# make sense to wait until the ELB gets returned from the EC2 API.
# This is necessary in the event we hit the throttling errors and
# need to retry ensure_ok
# See https://github.com/boto/boto/issues/3526
self.elb.listeners = self.listeners
self.changed = True
self.status = 'created'
def _create_elb_listeners(self, listeners):
"""Takes a list of listener tuples and creates them"""
# True if succeeds, exception raised if not
self.changed = self.elb_conn.create_load_balancer_listeners(self.name,
complex_listeners=listeners)
def _delete_elb_listeners(self, listeners):
"""Takes a list of listener tuples and deletes them from the elb"""
ports = [l[0] for l in listeners]
# True if succeeds, exception raised if not
self.changed = self.elb_conn.delete_load_balancer_listeners(self.name,
ports)
def _set_elb_listeners(self):
"""
Creates listeners specified by self.listeners; overwrites existing
listeners on these ports; removes extraneous listeners
"""
listeners_to_add = []
listeners_to_remove = []
listeners_to_keep = []
# Check for any listeners we need to create or overwrite
for listener in self.listeners:
listener_as_tuple = self._listener_as_tuple(listener)
# First we loop through existing listeners to see if one is
# already specified for this port
existing_listener_found = None
for existing_listener in self.elb.listeners:
# Since ELB allows only one listener on each incoming port, a
# single match on the incoming port is all we're looking for
if existing_listener[0] == int(listener['load_balancer_port']):
existing_listener_found = self._api_listener_as_tuple(existing_listener)
break
if existing_listener_found:
# Does it match exactly?
if listener_as_tuple != existing_listener_found:
# The ports are the same but something else is different,
# so we'll remove the existing one and add the new one
listeners_to_remove.append(existing_listener_found)
listeners_to_add.append(listener_as_tuple)
else:
# We already have this listener, so we're going to keep it
listeners_to_keep.append(existing_listener_found)
else:
# We didn't find an existing listener, so just add the new one
listeners_to_add.append(listener_as_tuple)
# Check for any extraneous listeners we need to remove, if desired
if self.purge_listeners:
for existing_listener in self.elb.listeners:
existing_listener_tuple = self._api_listener_as_tuple(existing_listener)
if existing_listener_tuple in listeners_to_remove:
# Already queued for removal
continue
if existing_listener_tuple in listeners_to_keep:
# Keep this one around
continue
# Since we're not already removing it and we don't need to keep
# it, let's get rid of it
listeners_to_remove.append(existing_listener_tuple)
if listeners_to_remove:
self._delete_elb_listeners(listeners_to_remove)
if listeners_to_add:
self._create_elb_listeners(listeners_to_add)
def _api_listener_as_tuple(self, listener):
"""Adds ssl_certificate_id to ELB API tuple if present"""
base_tuple = listener.get_complex_tuple()
if listener.ssl_certificate_id and len(base_tuple) < 5:
return base_tuple + (listener.ssl_certificate_id,)
return base_tuple
def _listener_as_tuple(self, listener):
"""Formats listener as a 4- or 5-tuples, in the order specified by the
ELB API"""
# N.B. string manipulations on protocols below (str(), upper()) is to
# ensure format matches output from ELB API
listener_list = [
int(listener['load_balancer_port']),
int(listener['instance_port']),
str(listener['protocol'].upper()),
]
# Instance protocol is not required by ELB API; it defaults to match
# load balancer protocol. We'll mimic that behavior here
if 'instance_protocol' in listener:
listener_list.append(str(listener['instance_protocol'].upper()))
else:
listener_list.append(str(listener['protocol'].upper()))
if 'ssl_certificate_id' in listener:
listener_list.append(str(listener['ssl_certificate_id']))
return tuple(listener_list)
def _enable_zones(self, zones):
try:
self.elb.enable_zones(zones)
except boto.exception.BotoServerError as e:
if "Invalid Availability Zone" in e.error_message:
self.module.fail_json(msg=e.error_message)
else:
self.module.fail_json(msg="an unknown server error occurred, please try again later")
self.changed = True
def _disable_zones(self, zones):
try:
self.elb.disable_zones(zones)
except boto.exception.BotoServerError as e:
if "Invalid Availability Zone" in e.error_message:
self.module.fail_json(msg=e.error_message)
else:
self.module.fail_json(msg="an unknown server error occurred, please try again later")
self.changed = True
def _attach_subnets(self, subnets):
self.elb_conn.attach_lb_to_subnets(self.name, subnets)
self.changed = True
def _detach_subnets(self, subnets):
self.elb_conn.detach_lb_from_subnets(self.name, subnets)
self.changed = True
def _set_subnets(self):
"""Determine which subnets need to be attached or detached on the ELB"""
if self.subnets:
if self.purge_subnets:
subnets_to_detach = list(set(self.elb.subnets) - set(self.subnets))
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
else:
subnets_to_detach = None
subnets_to_attach = list(set(self.subnets) - set(self.elb.subnets))
if subnets_to_attach:
self._attach_subnets(subnets_to_attach)
if subnets_to_detach:
self._detach_subnets(subnets_to_detach)
def _set_zones(self):
"""Determine which zones need to be enabled or disabled on the ELB"""
if self.zones:
if self.purge_zones:
zones_to_disable = list(set(self.elb.availability_zones) -
set(self.zones))
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
else:
zones_to_disable = None
zones_to_enable = list(set(self.zones) -
set(self.elb.availability_zones))
if zones_to_enable:
self._enable_zones(zones_to_enable)
# N.B. This must come second, in case it would have removed all zones
if zones_to_disable:
self._disable_zones(zones_to_disable)
def _set_security_groups(self):
if self.security_group_ids is not None and set(self.elb.security_groups) != set(self.security_group_ids):
self.elb_conn.apply_security_groups_to_lb(self.name, self.security_group_ids)
self.changed = True
def _set_health_check(self):
"""Set health check values on ELB as needed"""
if self.health_check:
# This just makes it easier to compare each of the attributes
# and look for changes. Keys are attributes of the current
# health_check; values are desired values of new health_check
health_check_config = {
"target": self._get_health_check_target(),
"timeout": self.health_check['response_timeout'],
"interval": self.health_check['interval'],
"unhealthy_threshold": self.health_check['unhealthy_threshold'],
"healthy_threshold": self.health_check['healthy_threshold'],
}
update_health_check = False
# The health_check attribute is *not* set on newly created
# ELBs! So we have to create our own.
if not self.elb.health_check:
self.elb.health_check = HealthCheck()
for attr, desired_value in health_check_config.items():
if getattr(self.elb.health_check, attr) != desired_value:
setattr(self.elb.health_check, attr, desired_value)
update_health_check = True
if update_health_check:
self.elb.configure_health_check(self.elb.health_check)
self.changed = True
def _check_attribute_support(self, attr):
return hasattr(boto.ec2.elb.attributes.LbAttributes(), attr)
def _set_cross_az_load_balancing(self):
attributes = self.elb.get_attributes()
if self.cross_az_load_balancing:
if not attributes.cross_zone_load_balancing.enabled:
self.changed = True
attributes.cross_zone_load_balancing.enabled = True
else:
if attributes.cross_zone_load_balancing.enabled:
self.changed = True
attributes.cross_zone_load_balancing.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'CrossZoneLoadBalancing',
attributes.cross_zone_load_balancing.enabled)
def _set_access_log(self):
attributes = self.elb.get_attributes()
if self.access_logs:
if 's3_location' not in self.access_logs:
self.module.fail_json(msg='s3_location information required')
access_logs_config = {
"enabled": True,
"s3_bucket_name": self.access_logs['s3_location'],
"s3_bucket_prefix": self.access_logs.get('s3_prefix', ''),
"emit_interval": self.access_logs.get('interval', 60),
}
update_access_logs_config = False
for attr, desired_value in access_logs_config.items():
if getattr(attributes.access_log, attr) != desired_value:
setattr(attributes.access_log, attr, desired_value)
update_access_logs_config = True
if update_access_logs_config:
self.elb_conn.modify_lb_attribute(self.name, 'AccessLog', attributes.access_log)
self.changed = True
elif attributes.access_log.enabled:
attributes.access_log.enabled = False
self.changed = True
self.elb_conn.modify_lb_attribute(self.name, 'AccessLog', attributes.access_log)
def _set_connection_draining_timeout(self):
attributes = self.elb.get_attributes()
if self.connection_draining_timeout is not None:
if not attributes.connection_draining.enabled or \
attributes.connection_draining.timeout != self.connection_draining_timeout:
self.changed = True
attributes.connection_draining.enabled = True
attributes.connection_draining.timeout = self.connection_draining_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
else:
if attributes.connection_draining.enabled:
self.changed = True
attributes.connection_draining.enabled = False
self.elb_conn.modify_lb_attribute(self.name, 'ConnectionDraining', attributes.connection_draining)
def _set_idle_timeout(self):
attributes = self.elb.get_attributes()
if self.idle_timeout is not None:
if attributes.connecting_settings.idle_timeout != self.idle_timeout:
self.changed = True
attributes.connecting_settings.idle_timeout = self.idle_timeout
self.elb_conn.modify_lb_attribute(self.name, 'ConnectingSettings', attributes.connecting_settings)
def _policy_name(self, policy_type):
return __file__.split('/')[-1].split('.')[0].replace('_', '-') + '-' + policy_type
def _create_policy(self, policy_param, policy_meth, policy):
getattr(self.elb_conn, policy_meth )(policy_param, self.elb.name, policy)
def _delete_policy(self, elb_name, policy):
self.elb_conn.delete_lb_policy(elb_name, policy)
def _update_policy(self, policy_param, policy_meth, policy_attr, policy):
self._delete_policy(self.elb.name, policy)
self._create_policy(policy_param, policy_meth, policy)
def _set_listener_policy(self, listeners_dict, policy=[]):
for listener_port in listeners_dict:
if listeners_dict[listener_port].startswith('HTTP'):
self.elb_conn.set_lb_policies_of_listener(self.elb.name, listener_port, policy)
def _set_stickiness_policy(self, elb_info, listeners_dict, policy, **policy_attrs):
for p in getattr(elb_info.policies, policy_attrs['attr']):
if str(p.__dict__['policy_name']) == str(policy[0]):
if str(p.__dict__[policy_attrs['dict_key']]) != str(policy_attrs['param_value'] or 0):
self._set_listener_policy(listeners_dict)
self._update_policy(policy_attrs['param_value'], policy_attrs['method'], policy_attrs['attr'], policy[0])
self.changed = True
break
else:
self._create_policy(policy_attrs['param_value'], policy_attrs['method'], policy[0])
self.changed = True
self._set_listener_policy(listeners_dict, policy)
def select_stickiness_policy(self):
if self.stickiness:
if 'cookie' in self.stickiness and 'expiration' in self.stickiness:
self.module.fail_json(msg='\'cookie\' and \'expiration\' can not be set at the same time')
elb_info = self.elb_conn.get_all_load_balancers(self.elb.name)[0]
d = {}
for listener in elb_info.listeners:
d[listener[0]] = listener[2]
listeners_dict = d
if self.stickiness['type'] == 'loadbalancer':
policy = []
policy_type = 'LBCookieStickinessPolicyType'
if self.module.boolean(self.stickiness['enabled']) is True:
if 'expiration' not in self.stickiness:
self.module.fail_json(msg='expiration must be set when type is loadbalancer')
expiration = self.stickiness['expiration'] if self.stickiness['expiration'] is not 0 else None
policy_attrs = {
'type': policy_type,
'attr': 'lb_cookie_stickiness_policies',
'method': 'create_lb_cookie_stickiness_policy',
'dict_key': 'cookie_expiration_period',
'param_value': expiration
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif self.module.boolean(self.stickiness['enabled']) is False:
if len(elb_info.policies.lb_cookie_stickiness_policies):
if elb_info.policies.lb_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
else:
self.changed = False
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
elif self.stickiness['type'] == 'application':
policy = []
policy_type = 'AppCookieStickinessPolicyType'
if self.module.boolean(self.stickiness['enabled']) is True:
if 'cookie' not in self.stickiness:
self.module.fail_json(msg='cookie must be set when type is application')
policy_attrs = {
'type': policy_type,
'attr': 'app_cookie_stickiness_policies',
'method': 'create_app_cookie_stickiness_policy',
'dict_key': 'cookie_name',
'param_value': self.stickiness['cookie']
}
policy.append(self._policy_name(policy_attrs['type']))
self._set_stickiness_policy(elb_info, listeners_dict, policy, **policy_attrs)
elif self.module.boolean(self.stickiness['enabled']) is False:
if len(elb_info.policies.app_cookie_stickiness_policies):
if elb_info.policies.app_cookie_stickiness_policies[0].policy_name == self._policy_name(policy_type):
self.changed = True
self._set_listener_policy(listeners_dict)
self._delete_policy(self.elb.name, self._policy_name(policy_type))
else:
self._set_listener_policy(listeners_dict)
def _get_backend_policies(self):
"""Get a list of backend policies"""
policies = []
if self.elb.backends is not None:
for backend in self.elb.backends:
if backend.policies is not None:
for policy in backend.policies:
policies.append(str(backend.instance_port) + ':' + policy.policy_name)
return policies
def _set_backend_policies(self):
"""Sets policies for all backends"""
ensure_proxy_protocol = False
replace = []
backend_policies = self._get_backend_policies()
# Find out what needs to be changed
for listener in self.listeners:
want = False
if 'proxy_protocol' in listener and listener['proxy_protocol']:
ensure_proxy_protocol = True
want = True
if str(listener['instance_port']) + ':ProxyProtocol-policy' in backend_policies:
if not want:
replace.append({'port': listener['instance_port'], 'policies': []})
elif want:
replace.append({'port': listener['instance_port'], 'policies': ['ProxyProtocol-policy']})
# enable or disable proxy protocol
if ensure_proxy_protocol:
self._set_proxy_protocol_policy()
# Make the backend policies so
for item in replace:
self.elb_conn.set_lb_policies_of_backend_server(self.elb.name, item['port'], item['policies'])
self.changed = True
def _get_proxy_protocol_policy(self):
"""Find out if the elb has a proxy protocol enabled"""
if self.elb.policies is not None and self.elb.policies.other_policies is not None:
for policy in self.elb.policies.other_policies:
if policy.policy_name == 'ProxyProtocol-policy':
return policy.policy_name
return None
def _set_proxy_protocol_policy(self):
"""Install a proxy protocol policy if needed"""
proxy_policy = self._get_proxy_protocol_policy()
if proxy_policy is None:
self.elb_conn.create_lb_policy(
self.elb.name, 'ProxyProtocol-policy', 'ProxyProtocolPolicyType', {'ProxyProtocol': True}
)
self.changed = True
# TODO: remove proxy protocol policy if not needed anymore? There is no side effect to leaving it there
def _diff_list(self, a, b):
"""Find the entries in list a that are not in list b"""
b = set(b)
return [aa for aa in a if aa not in b]
def _get_instance_ids(self):
"""Get the current list of instance ids installed in the elb"""
instances = []
if self.elb.instances is not None:
for instance in self.elb.instances:
instances.append(instance.id)
return instances
def _set_instance_ids(self):
"""Register or deregister instances from an lb instance"""
assert_instances = self.instance_ids or []
has_instances = self._get_instance_ids()
add_instances = self._diff_list(assert_instances, has_instances)
if add_instances:
self.elb_conn.register_instances(self.elb.name, add_instances)
self.changed = True
if self.purge_instance_ids:
remove_instances = self._diff_list(has_instances, assert_instances)
if remove_instances:
self.elb_conn.deregister_instances(self.elb.name, remove_instances)
self.changed = True
def _set_tags(self):
"""Add/Delete tags"""
if self.tags is None:
return
params = {'LoadBalancerNames.member.1': self.name}
tagdict = dict()
# get the current list of tags from the ELB, if ELB exists
if self.elb:
current_tags = self.elb_conn.get_list('DescribeTags', params,
[('member', Tag)])
tagdict = dict((tag.Key, tag.Value) for tag in current_tags
if hasattr(tag, 'Key'))
# Add missing tags
dictact = dict(set(self.tags.items()) - set(tagdict.items()))
if dictact:
for i, key in enumerate(dictact):
params['Tags.member.%d.Key' % (i + 1)] = key
params['Tags.member.%d.Value' % (i + 1)] = dictact[key]
self.elb_conn.make_request('AddTags', params)
self.changed=True
# Remove extra tags
dictact = dict(set(tagdict.items()) - set(self.tags.items()))
if dictact:
for i, key in enumerate(dictact):
params['Tags.member.%d.Key' % (i + 1)] = key
self.elb_conn.make_request('RemoveTags', params)
self.changed=True
def _get_health_check_target(self):
"""Compose target string from healthcheck parameters"""
protocol = self.health_check['ping_protocol'].upper()
path = ""
if protocol in ['HTTP', 'HTTPS'] and 'ping_path' in self.health_check:
path = self.health_check['ping_path']
return "%s:%s%s" % (protocol, self.health_check['ping_port'], path)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state={'required': True, 'choices': ['present', 'absent']},
name={'required': True},
listeners={'default': None, 'required': False, 'type': 'list'},
purge_listeners={'default': True, 'required': False, 'type': 'bool'},
instance_ids={'default': None, 'required': False, 'type': 'list'},
purge_instance_ids={'default': False, 'required': False, 'type': 'bool'},
zones={'default': None, 'required': False, 'type': 'list'},
purge_zones={'default': False, 'required': False, 'type': 'bool'},
security_group_ids={'default': None, 'required': False, 'type': 'list'},
security_group_names={'default': None, 'required': False, 'type': 'list'},
health_check={'default': None, 'required': False, 'type': 'dict'},
subnets={'default': None, 'required': False, 'type': 'list'},
purge_subnets={'default': False, 'required': False, 'type': 'bool'},
scheme={'default': 'internet-facing', 'required': False},
connection_draining_timeout={'default': None, 'required': False},
idle_timeout={'default': None, 'required': False},
cross_az_load_balancing={'default': None, 'required': False},
stickiness={'default': None, 'required': False, 'type': 'dict'},
access_logs={'default': None, 'required': False, 'type': 'dict'},
wait={'default': False, 'type': 'bool', 'required': False},
wait_timeout={'default': 60, 'type': 'int', 'required': False},
tags={'default': None, 'required': False, 'type': 'dict'}
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive = [['security_group_ids', 'security_group_names']]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg="Region must be specified as a parameter, in EC2_REGION or AWS_REGION environment variables or in boto configuration file")
name = module.params['name']
state = module.params['state']
listeners = module.params['listeners']
purge_listeners = module.params['purge_listeners']
instance_ids = module.params['instance_ids']
purge_instance_ids = module.params['purge_instance_ids']
zones = module.params['zones']
purge_zones = module.params['purge_zones']
security_group_ids = module.params['security_group_ids']
security_group_names = module.params['security_group_names']
health_check = module.params['health_check']
access_logs = module.params['access_logs']
subnets = module.params['subnets']
purge_subnets = module.params['purge_subnets']
scheme = module.params['scheme']
connection_draining_timeout = module.params['connection_draining_timeout']
idle_timeout = module.params['idle_timeout']
cross_az_load_balancing = module.params['cross_az_load_balancing']
stickiness = module.params['stickiness']
wait = module.params['wait']
wait_timeout = module.params['wait_timeout']
tags = module.params['tags']
if state == 'present' and not listeners:
module.fail_json(msg="At least one listener is required for ELB creation")
if state == 'present' and not (zones or subnets):
module.fail_json(msg="At least one availability zone or subnet is required for ELB creation")
if wait_timeout > 600:
module.fail_json(msg='wait_timeout maximum is 600 seconds')
if security_group_names:
security_group_ids = []
try:
ec2 = ec2_connect(module)
if subnets: # We have at least one subnet, ergo this is a VPC
vpc_conn = _get_vpc_connection(module=module, region=region, aws_connect_params=aws_connect_params)
vpc_id = vpc_conn.get_all_subnets([subnets[0]])[0].vpc_id
filters = {'vpc_id': vpc_id}
else:
filters = None
grp_details = ec2.get_all_security_groups(filters=filters)
for group_name in security_group_names:
if isinstance(group_name, basestring):
group_name = [group_name]
group_id = [ str(grp.id) for grp in grp_details if str(grp.name) in group_name ]
security_group_ids.extend(group_id)
except boto.exception.NoAuthHandlerFound as e:
module.fail_json(msg = str(e))
elb_man = ElbManager(module, name, listeners, purge_listeners, zones,
purge_zones, security_group_ids, health_check,
subnets, purge_subnets, scheme,
connection_draining_timeout, idle_timeout,
cross_az_load_balancing,
access_logs, stickiness, wait, wait_timeout, tags,
region=region, instance_ids=instance_ids, purge_instance_ids=purge_instance_ids,
**aws_connect_params)
# check for unsupported attributes for this version of boto
if cross_az_load_balancing and not elb_man._check_attribute_support('cross_zone_load_balancing'):
module.fail_json(msg="You must install boto >= 2.18.0 to use the cross_az_load_balancing attribute")
if connection_draining_timeout and not elb_man._check_attribute_support('connection_draining'):
module.fail_json(msg="You must install boto >= 2.28.0 to use the connection_draining_timeout attribute")
if idle_timeout and not elb_man._check_attribute_support('connecting_settings'):
module.fail_json(msg="You must install boto >= 2.33.0 to use the idle_timeout attribute")
if state == 'present':
elb_man.ensure_ok()
elif state == 'absent':
elb_man.ensure_gone()
ansible_facts = {'ec2_elb': 'info'}
ec2_facts_result = dict(changed=elb_man.changed,
elb=elb_man.get_info(),
ansible_facts=ansible_facts)
module.exit_json(**ec2_facts_result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
adityacs/ansible
|
lib/ansible/modules/cloud/amazon/ec2_elb_lb.py
|
Python
|
gpl-3.0
| 53,190
|
[
"Dalton"
] |
f2084f88f7f1308d994e89dacb4f55644db6d57f1cd09dba5250256f815190b4
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Chris Caron <lead2gold@gmail.com>
# All rights reserved.
#
# This code is licensed under the MIT License.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files(the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions :
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Once you visit: https://developer.gitter.im/apps you'll get a personal
# access token that will look something like this:
# b5647881d563fm846dfbb2c27d1fe8f669b8f026
# Don't worry about generating an app; this token is all you need to form
# you're URL with. The syntax is as follows:
# gitter://{token}/{channel}
# Hence a URL might look like the following:
# gitter://b5647881d563fm846dfbb2c27d1fe8f669b8f026/apprise
# Note: You must have joined the channel to send a message to it!
# Official API reference: https://developer.gitter.im/docs/user-resource
import re
import requests
from json import loads
from json import dumps
from datetime import datetime
from .NotifyBase import NotifyBase
from ..common import NotifyImageSize
from ..common import NotifyFormat
from ..common import NotifyType
from ..utils import parse_list
from ..utils import parse_bool
from ..utils import validate_regex
from ..AppriseLocale import gettext_lazy as _
# API Gitter URL
GITTER_API_URL = 'https://api.gitter.im/v1'
# Used to break path apart into list of targets
TARGET_LIST_DELIM = re.compile(r'[ \t\r\n,\\/]+')
class NotifyGitter(NotifyBase):
"""
A wrapper for Gitter Notifications
"""
# The default descriptive name associated with the Notification
service_name = 'Gitter'
# The services URL
service_url = 'https://gitter.im/'
# All notification requests are secure
secure_protocol = 'gitter'
# A URL that takes you to the setup/help of the specific protocol
setup_url = 'https://github.com/caronc/apprise/wiki/Notify_gitter'
# Allows the user to specify the NotifyImageSize object
image_size = NotifyImageSize.XY_32
# Gitter does not support a title
title_maxlen = 0
# Gitter is kind enough to return how many more requests we're allowed to
# continue to make within it's header response as:
# X-RateLimit-Reset: The epoc time (in seconds) we can expect our
# rate-limit to be reset.
# X-RateLimit-Remaining: an integer identifying how many requests we're
# still allow to make.
request_rate_per_sec = 0
# For Tracking Purposes
ratelimit_reset = datetime.utcnow()
# Default to 1
ratelimit_remaining = 1
# Default Notification Format
notify_format = NotifyFormat.MARKDOWN
# Define object templates
templates = (
'{schema}://{token}/{targets}/',
)
# Define our template tokens
template_tokens = dict(NotifyBase.template_tokens, **{
'token': {
'name': _('Token'),
'type': 'string',
'private': True,
'required': True,
'regex': (r'^[a-z0-9]{40}$', 'i'),
},
'targets': {
'name': _('Rooms'),
'type': 'list:string',
},
})
# Define our template arguments
template_args = dict(NotifyBase.template_args, **{
'image': {
'name': _('Include Image'),
'type': 'bool',
'default': False,
'map_to': 'include_image',
},
'to': {
'alias_of': 'targets',
},
})
def __init__(self, token, targets, include_image=False, **kwargs):
"""
Initialize Gitter Object
"""
super(NotifyGitter, self).__init__(**kwargs)
# Secret Key (associated with project)
self.token = validate_regex(
token, *self.template_tokens['token']['regex'])
if not self.token:
msg = 'An invalid Gitter API Token ' \
'({}) was specified.'.format(token)
self.logger.warning(msg)
raise TypeError(msg)
# Parse our targets
self.targets = parse_list(targets)
if not self.targets:
msg = 'There are no valid Gitter targets to notify.'
self.logger.warning(msg)
raise TypeError(msg)
# Used to track maping of rooms to their numeric id lookup for
# messaging
self._room_mapping = None
# Track whether or not we want to send an image with our notification
# or not.
self.include_image = include_image
return
def send(self, body, title='', notify_type=NotifyType.INFO, **kwargs):
"""
Perform Gitter Notification
"""
# error tracking (used for function return)
has_error = False
# Set up our image for display if configured to do so
image_url = None if not self.include_image \
else self.image_url(notify_type)
if image_url:
body = '\n{}'.format(image_url, body)
if self._room_mapping is None:
# Populate our room mapping
self._room_mapping = {}
postokay, response = self._fetch(url='rooms')
if not postokay:
return False
# Response generally looks like this:
# [
# {
# noindex: False,
# oneToOne: False,
# avatarUrl: 'https://path/to/avatar/url',
# url: '/apprise-notifications/community',
# public: True,
# tags: [],
# lurk: False,
# uri: 'apprise-notifications/community',
# lastAccessTime: '2019-03-25T00:12:28.144Z',
# topic: '',
# roomMember: True,
# groupId: '5c981cecd73408ce4fbbad2f',
# githubType: 'REPO_CHANNEL',
# unreadItems: 0,
# mentions: 0,
# security: 'PUBLIC',
# userCount: 1,
# id: '5c981cecd73408ce4fbbad31',
# name: 'apprise/community'
# }
# ]
for entry in response:
self._room_mapping[entry['name'].lower().split('/')[0]] = {
# The ID of the room
'id': entry['id'],
# A descriptive name (useful for logging)
'uri': entry['uri'],
}
# Create a copy of the targets list
targets = list(self.targets)
while len(targets):
target = targets.pop(0).lower()
if target not in self._room_mapping:
self.logger.warning(
'Failed to locate Gitter room {}'.format(target))
# Flag our error
has_error = True
continue
# prepare our payload
payload = {
'text': body,
}
# Our Notification URL
notify_url = 'rooms/{}/chatMessages'.format(
self._room_mapping[target]['id'])
# Perform our query
postokay, response = self._fetch(
notify_url, payload=dumps(payload), method='POST')
if not postokay:
# Flag our error
has_error = True
return not has_error
def _fetch(self, url, payload=None, method='GET'):
"""
Wrapper to request object
"""
# Prepare our headers:
headers = {
'User-Agent': self.app_id,
'Accept': 'application/json',
'Authorization': 'Bearer ' + self.token,
}
if payload:
# Only set our header payload if it's defined
headers['Content-Type'] = 'application/json'
# Default content response object
content = {}
# Update our URL
url = '{}/{}'.format(GITTER_API_URL, url)
# Some Debug Logging
self.logger.debug('Gitter {} URL: {} (cert_verify={})'.format(
method,
url, self.verify_certificate))
if payload:
self.logger.debug('Gitter Payload: {}' .format(payload))
# By default set wait to None
wait = None
if self.ratelimit_remaining == 0:
# Determine how long we should wait for or if we should wait at
# all. This isn't fool-proof because we can't be sure the client
# time (calling this script) is completely synced up with the
# Gitter server. One would hope we're on NTP and our clocks are
# the same allowing this to role smoothly:
now = datetime.utcnow()
if now < self.ratelimit_reset:
# We need to throttle for the difference in seconds
# We add 0.5 seconds to the end just to allow a grace
# period.
wait = (self.ratelimit_reset - now).total_seconds() + 0.5
# Always call throttle before any remote server i/o is made
self.throttle(wait=wait)
# fetch function
fn = requests.post if method == 'POST' else requests.get
try:
r = fn(
url,
data=payload,
headers=headers,
verify=self.verify_certificate,
)
if r.status_code != requests.codes.ok:
# We had a problem
status_str = \
NotifyGitter.http_response_code_lookup(r.status_code)
self.logger.warning(
'Failed to send Gitter {} to {}: '
'{}error={}.'.format(
method,
url,
', ' if status_str else '',
r.status_code))
self.logger.debug(
'Response Details:\r\n{}'.format(r.content))
# Mark our failure
return (False, content)
try:
content = loads(r.content)
except (AttributeError, TypeError, ValueError):
# ValueError = r.content is Unparsable
# TypeError = r.content is None
# AttributeError = r is None
content = {}
try:
self.ratelimit_remaining = \
int(r.headers.get('X-RateLimit-Remaining'))
self.ratelimit_reset = datetime.utcfromtimestamp(
int(r.headers.get('X-RateLimit-Reset')))
except (TypeError, ValueError):
# This is returned if we could not retrieve this information
# gracefully accept this state and move on
pass
except requests.RequestException as e:
self.logger.warning(
'Exception received when sending Gitter {} to {}: '.
format(method, url))
self.logger.debug('Socket Exception: %s' % str(e))
# Mark our failure
return (False, content)
return (True, content)
def url(self, privacy=False, *args, **kwargs):
"""
Returns the URL built dynamically based on specified arguments.
"""
# Define any arguments set
args = {
'format': self.notify_format,
'overflow': self.overflow_mode,
'image': 'yes' if self.include_image else 'no',
'verify': 'yes' if self.verify_certificate else 'no',
}
return '{schema}://{token}/{targets}/?{args}'.format(
schema=self.secure_protocol,
token=self.pprint(self.token, privacy, safe=''),
targets='/'.join(
[NotifyGitter.quote(x, safe='') for x in self.targets]),
args=NotifyGitter.urlencode(args))
@staticmethod
def parse_url(url):
"""
Parses the URL and returns enough arguments that can allow
us to substantiate this object.
"""
results = NotifyBase.parse_url(url)
if not results:
# We're done early as we couldn't load the results
return results
results['token'] = NotifyGitter.unquote(results['host'])
# Get our entries; split_path() looks after unquoting content for us
# by default
results['targets'] = NotifyGitter.split_path(results['fullpath'])
# Support the 'to' variable so that we can support targets this way too
# The 'to' makes it easier to use yaml configuration
if 'to' in results['qsd'] and len(results['qsd']['to']):
results['targets'] += NotifyGitter.parse_list(results['qsd']['to'])
# Include images with our message
results['include_image'] = \
parse_bool(results['qsd'].get('image', False))
return results
|
SickGear/SickGear
|
lib/apprise/plugins/NotifyGitter.py
|
Python
|
gpl-3.0
| 13,774
|
[
"VisIt"
] |
41f4ff78a19a7266685cc489f1ae85568211a63ee1603e1e62d4ef8f0208bbc4
|
../../../../share/pyshared/orca/orca_platform.py
|
Alberto-Beralix/Beralix
|
i386-squashfs-root/usr/lib/python2.7/dist-packages/orca/orca_platform.py
|
Python
|
gpl-3.0
| 48
|
[
"ORCA"
] |
e5ac06c4513d0fc7c85aba880086262adf9994a37795120694b088ba8be0cd06
|
import numpy as np
import cv2
import sys
from self_driving.optical_flow.python import video
from scipy import misc
def color_seg(img_raw, red_thresh=0, green_thresh=0, blue_thresh=0):
img_color_mask = np.copy(img_raw)
red_mask = img_raw[:,:,0] < red_thresh
green_mask = img_raw[:,:,1] < green_thresh
rgb_mask = np.logical_or(red_mask, green_mask)
img_color_mask[rgb_mask] = [0,0,0]
return img_color_mask
def draw_lines_extrapolate(img, lines, color=[255, 0, 0], thickness=2):
# Assume lines on left and right have opposite signed slopes
left_xs = []
left_ys = []
right_xs = []
right_ys = []
for line in lines:
for x1, y1, x2, y2 in line:
if x2 - x1 == 0: continue; # Infinite slope
slope = float(y2-y1) / float(x2-x1)
if .5 <= abs(slope) < 1.0: # Discard unlikely slopes
if slope > 0:
left_xs.extend([x1, x2])
left_ys.extend([y1, y2])
else:
right_xs.extend([x1, x2])
right_ys.extend([y1, y2])
left_fit = np.polyfit(left_xs, left_ys, 1)
right_fit = np.polyfit(right_xs, right_ys, 1)
y1 = img.shape[0] # Bottom of image
y2 = img.shape[0] / 2+ 50 # Middle of view
x1_left = (y1 - left_fit[1]) / left_fit[0]
x2_left = (y2 - left_fit[1]) / left_fit[0]
x1_right = (y1 - right_fit[1]) / right_fit[0]
x2_right = (y2 - right_fit[1]) / right_fit[0]
y1 = int(y1); y2 = int(y2);
x1_left = int(x1_left); x2_left = int(x2_left);
x1_right = int(x1_right); x2_right = int(x2_right);
cv2.line(img, (x1_left, y1), (x2_left, y2), color, thickness)
cv2.line(img, (x1_right, y1), (x2_right, y2), color, thickness)
if __name__ == '__main__':
try:
fn = sys.argv[1]
except IndexError:
fn = 0
cam = video.create_capture(fn)
index = 0
while True:
ret, img = cam.read()
if img is None:
break
rgb = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
masked_img = color_seg(rgb, red_thresh=200, green_thresh=150, blue_thresh=0)
gray = cv2.cvtColor(masked_img, cv2.COLOR_RGB2GRAY)
# Define a kernel size and apply Gaussian smoothing
kernel_size = 5
blur_gray = cv2.GaussianBlur(gray, (kernel_size, kernel_size), 0)
# Define our parameters for Canny and apply
low_threshold = 50
high_threshold = 150
edges = cv2.Canny(blur_gray, low_threshold, high_threshold)
# Next we'll create a masked edges image using cv2.fillPoly()
mask = np.zeros_like(edges)
ignore_mask_color = 255
# This time we are defining a four sided polygon to mask
imshape = img.shape
vertices = np.array([[(0 + 120, imshape[0]),
(imshape[1] / 2 - 15, imshape[0] / 2 + 40),
(imshape[1] / 2 + 15, imshape[0] / 2 + 40),
(imshape[1] - 50, imshape[0])]],
dtype=np.int32)
cv2.fillPoly(mask, vertices, ignore_mask_color)
masked_edges = cv2.bitwise_and(edges, mask)
# Define the Hough transform parameters
# Make a blank the same size as our image to draw on
rho = 1 # distance resolution in pixels of the Hough grid
theta = np.pi / 180 # angular resolution in radians of the Hough grid
threshold = 5 # minimum number of votes (intersections in Hough grid cell)
min_line_length = 10 # minimum number of pixels making up a line
max_line_gap = 2 # maximum gap in pixels between connectable line segments
line_image = np.copy(img) * 0 # creating a blank to draw lines on
# Run Hough on edge detected image
# Output "lines" is an array containing endpoints of detected line segments
lines = cv2.HoughLinesP(masked_edges, rho, theta, threshold, np.array([]),
min_line_length, max_line_gap)
if lines is None:
continue
draw_lines_extrapolate(line_image, lines, thickness=8)
# Draw the lines on the edge image
lines_edges = cv2.addWeighted(rgb, 1, line_image, 1, 0)
misc.imsave(sys.argv[2] + 'frame_%d.png' % index, lines_edges)
index += 1
cv2.destroyAllWindows()
|
mengli/MachineLearning
|
self_driving/lane_detect/udacity_lane_detect.py
|
Python
|
apache-2.0
| 4,363
|
[
"Gaussian"
] |
6081a5be8f40e7807d1dba5174cad5dedf1f183d9e1e3bd57b020012c98b57ff
|
# Copyright 2015 Planet Labs, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
from setuptools import setup, find_packages
from setuptools import distutils
import os
import versioneer
def get_version():
if os.path.exists("PKG-INFO"):
metadata = distutils.dist.DistributionMetadata("PKG-INFO")
return metadata.version
else:
return versioneer.get_version()
setup(name='datalake-common',
url='https://github.com/planetlabs/datalake-common',
version=get_version(),
cmdclass=versioneer.get_cmdclass(),
description='common datalake parts',
author='Brian Cavagnolo',
author_email='brian@planet.com',
packages=find_packages(),
install_requires=[
'python-dateutil>=2.4.2',
'pytz>=2015.4',
'simplejson>=3.3.1',
'python-dotenv>=0.1.3',
'six>=1.10.0'
],
extras_require={
'test': [
'pytest==3.0.2',
'pip==7.1.0',
'wheel==0.24.0',
'flake8==2.5.0',
],
's3': [
'boto>=2.38.0',
],
'test_s3': [
'moto==0.4.25',
]
})
|
planetlabs/datalake-common
|
setup.py
|
Python
|
apache-2.0
| 1,698
|
[
"Brian"
] |
c86dcb6cb777c0fca5b14d7af9f058b78ea7732662c04e04847ac20e51fc8805
|
from datetime import timedelta
from random import randint
from jarr.controllers import ArticleController, FeedController
from jarr.controllers.article_clusterizer import Clusterizer
from jarr.controllers.cluster import ClusterController
from jarr.lib.clustering_af.grouper import get_best_match_and_score
from jarr.lib.clustering_af.postgres_casting import to_vector
from tests.base import BaseJarrTest
from tests.utils import update_on_all_objs
class ClusterControllerTest(BaseJarrTest):
_contr_cls = ClusterController
def test_delete(self):
clu_ctrl = ClusterController()
for cluster in clu_ctrl.read():
clu_ctrl.delete(cluster.id)
self.assertEqual(0, ClusterController(2).read().count())
self.assertEqual(0, ArticleController(2).read().count())
@staticmethod
def _clone_article(acontr, article, feed):
# making sure collision will happen with this article
for art_to_del in acontr.read(link=article.link, id__ne=article.id):
acontr.delete(art_to_del.id)
suffix = str(randint(0, 9999))
return acontr.create(feed_id=feed.id,
entry_id=article.entry_id + suffix,
link=article.link,
title=article.title + suffix,
content=article.content + suffix,
date=article.date + timedelta(1),
retrieved_date=article.retrieved_date + timedelta(1))
def test_article_get_unread(self):
self.assertEqual({1: 3, 2: 3, 3: 3, 7: 3, 8: 3, 9: 3},
ClusterController(2).count_by_feed(read=False))
self.assertEqual({4: 3, 5: 3, 6: 3, 10: 3, 11: 3, 12: 3},
ClusterController(3).count_by_feed(read=False))
def _test_unread_on_cluster(self, read_reason):
ccontr = ClusterController()
fcontr = FeedController()
cluster = ccontr.read().first()
clusterizer = Clusterizer()
self.assertFalse(clusterizer.get_config(cluster, 'cluster_enabled'))
self.assertTrue(clusterizer.get_config(cluster, 'cluster_wake_up'))
ccontr.update({'id': cluster.id}, {'read': True,
'read_reason': read_reason})
target_feed = fcontr.read(id__ne=cluster.main_article.feed_id,
user_id=cluster.user_id).first()
clusterizer = Clusterizer()
self.assertFalse(clusterizer.get_config(
target_feed, 'cluster_enabled'))
fcontr.update({'id__in': [f.id for f in cluster.feeds]
+ [target_feed.id]},
{'cluster_wake_up': True, 'cluster_enabled': True})
clusterizer = Clusterizer()
self.assertTrue(clusterizer.get_config(cluster, 'cluster_enabled'))
target_feed = fcontr.read(id__ne=cluster.main_article.feed_id,
user_id=cluster.user_id).first()
article = self._clone_article(ArticleController(),
cluster.main_article, target_feed)
clusterizer = Clusterizer()
self.assertTrue(clusterizer.get_config(article, 'cluster_wake_up'))
ClusterController(cluster.user_id).clusterize_pending_articles()
self.assertEqual(2, len(article.cluster.articles))
self.assertInCluster(article, cluster)
return ccontr.get(id=cluster.id)
def test_no_unread_on_cluster(self):
self.assertTrue(self._test_unread_on_cluster('consulted').read)
def test_unread_on_cluster(self):
self.assertFalse(self._test_unread_on_cluster('marked').read)
def test_adding_to_cluster_by_link(self):
ccontr = ClusterController()
cluster = ccontr.read().first()
ccontr.update({'id': cluster.id}, {'read': True,
'read_reason': 'marked'})
cluster = ccontr.get(id=cluster.id)
self.assertTrue(cluster.read)
article = cluster.articles[0]
articles_count = len(cluster.articles)
fcontr = FeedController(cluster.user_id)
acontr = ArticleController(cluster.user_id)
fcontr.update({'id': article.feed_id}, {'cluster_wake_up': True})
feed = fcontr.read(id__ne=article.feed_id).first()
update_on_all_objs(articles=[article], feeds=[feed],
cluster_enabled=True)
self._clone_article(acontr, article, feed)
ccontr.clusterize_pending_articles()
cluster = ccontr.get(id=cluster.id)
self.assertEqual(articles_count + 1, len(cluster.articles))
self.assertFalse(cluster.read)
def test_similarity_clustering(self):
words = 'Monthi Python Shrubberi Holi Graal life Brian'.split()
words2 = 'And now for something completely different'.split()
simple_vector = {word.lower(): i for i, word in enumerate(words, 1)}
content = ' '.join([(w + ' ') * i for i, w in enumerate(words, 1)])
actrl = ArticleController(2)
actrl.update({}, {'vector': to_vector({'content': content})})
for art in actrl.read():
self.assertEqual(art.simple_vector, simple_vector)
art1, art2, art3 = actrl.read().limit(3)
match, score = get_best_match_and_score(art1, [art2])
self.assertEqual(1, round(score, 10))
self.assertEqual(match, art2)
content = ' '.join([(w + ' ') * i for i, w in enumerate(words2, 1)])
actrl.update({'id': art2.id},
{'vector': to_vector({'content': content})})
art2 = actrl.get(id=art2.id)
self.assertNotEqual(art2.simple_vector, art1.simple_vector)
truncated_content = ' '.join([(w + ' ') * i
for i, w in enumerate(words[:-2], 1)])
actrl.update({'id__nin': [art1.id, art2.id, art3.id]},
{'vector': to_vector({'content': truncated_content})})
match, score = get_best_match_and_score(art1, list(actrl.read()))
self.assertEqual(1, round(score, 10))
self.assertNotEqual(match, art2)
self.assertEqual(match, art3)
match, score = get_best_match_and_score(art1, [art2])
self.assertEqual(0, score)
self.assertEqual(match, art2)
def test_no_mixup(self):
acontr = ArticleController()
ccontr = ClusterController()
total_clusters = len(list(ccontr.read()))
total_articles = len(list(acontr.read()))
for cluster in ccontr.read():
self.assertEqual(1, len(cluster.articles))
for article in acontr.read():
acontr.create(
entry_id=article.entry_id,
feed_id=article.feed_id,
title=article.title,
content=article.content,
link=article.link)
for user_id in ArticleController.get_user_id_with_pending_articles():
ClusterController(user_id).clusterize_pending_articles()
self.assertEqual(2 * total_articles, len(list(acontr.read())))
self.assertEqual(2 * total_clusters, len(list(ccontr.read())))
for cluster in ccontr.read():
self.assertEqual(1, len(cluster.articles))
self.assertEqual(1, len({a.user_id for a in cluster.articles}))
main_article = acontr.read().first()
for article in acontr.read():
acontr.create(
user_id=main_article.user_id,
feed_id=main_article.feed_id,
entry_id=article.entry_id,
title=article.title,
content=article.content,
link=article.link)
for cluster in ccontr.read():
self.assertEqual(1, len({a.user_id for a in cluster.articles}))
|
jaesivsm/pyAggr3g470r
|
tests/controllers/cluster_test.py
|
Python
|
agpl-3.0
| 7,816
|
[
"Brian"
] |
d010c0425362092089c944dcb9638f62e0b8a8e800ca360417d3aa87d2eaf13e
|
from ..core import cmutinf, ncmutinf
from .base import (AlphaAngleBaseMetric, ContactBaseMetric, DihedralBaseMetric,
BaseMetric)
import numpy as np
from itertools import product
from multiprocessing import Pool
from contextlib import closing
__all__ = ['AlphaAngleTransferEntropy', 'ContactTransferEntropy',
'DihedralTransferEntropy']
class TransferEntropyBase(BaseMetric):
"""Base transfer entropy object"""
def _partial_tent(self, p):
i, j = p
return self._est(self.n_bins,
self.data[j].values,
self.shuffled_data[i].values,
self.shuffled_data[j].values,
rng=self.rng,
method=self.method)
def _exec(self):
with closing(Pool(processes=self.n_threads)) as pool:
CMI = list(pool.map(self._partial_tent,
product(self.labels, self.labels)))
pool.terminate()
return np.reshape(CMI, (self.labels.size, self.labels.size)).T
def _before_exec(self, traj):
traj1, traj2 = traj
self.data = self._extract_data(traj2)
self.shuffled_data = self._extract_data(traj1)
self.labels = np.unique(self.data.columns.levels[0])
def __init__(self, normed=True, **kwargs):
self._est = ncmutinf if normed else cmutinf
self.partial_transform.__func__.__doc__ = """
Partial transform a mdtraj.Trajectory into an n_residue by n_residue
matrix of transfer entropy scores.
Parameters
----------
traj : tuple
Pair of trajectories to transform (state0, state1)
shuffle : int
Number of shuffle iterations (default: 0)
verbose : bool
Whether to display performance
Returns
-------
result : np.ndarray, shape = (n_residue, n_residue)
Transfer entropy matrix
"""
super(TransferEntropyBase, self).__init__(**kwargs)
class AlphaAngleTransferEntropy(AlphaAngleBaseMetric, TransferEntropyBase):
"""Transfer entropy calculations for alpha angles"""
class ContactTransferEntropy(ContactBaseMetric, TransferEntropyBase):
"""Transfer entropy calculations for contacts"""
class DihedralTransferEntropy(DihedralBaseMetric, TransferEntropyBase):
"""Transfer entropy calculations for dihedral angles"""
|
msmbuilder/mdentropy
|
mdentropy/metrics/tent.py
|
Python
|
mit
| 2,499
|
[
"MDTraj"
] |
76c0ab305d99c34ccbb8b84a783bcb15c03af2dff5d8f9621738132f5807b582
|
import numpy as np
__all__ = ['Linear', 'Sine', 'Cosine', 'Square', 'Cube', 'Powerlaw',
'Exponential', 'Lorentzian', 'Gaussian', 'LogNormal']
def Linear(x, a, b):
"""First-order polynomial
Inputs:
-------
``x``: independent variable
``a``: slope
``b``: offset
Formula:
--------
``a*x+b``
"""
return a * x + b
def Sine(x, a, omega, phi, y0):
"""Sine function
Inputs:
-------
``x``: independent variable
``a``: amplitude
``omega``: circular frequency
``phi``: phase
``y0``: offset
Formula:
--------
``a*sin(x*omega + phi)+y0``
"""
return a * np.sin(x * omega + phi) + y0
def Cosine(x, a, omega, phi, y0):
"""Cosine function
Inputs:
-------
``x``: independent variable
``a``: amplitude
``omega``: circular frequency
``phi``: phase
``y0``: offset
Formula:
--------
``a*cos(x*omega + phi)+y0``
"""
return a * np.cos(x * omega + phi) + y0
def Square(x, a, b, c):
"""Second order polynomial
Inputs:
-------
``x``: independent variable
``a``: coefficient of the second-order term
``b``: coefficient of the first-order term
``c``: additive constant
Formula:
--------
``a*x^2 + b*x + c``
"""
return a * x ** 2 + b * x + c
def Cube(x, a, b, c, d):
"""Third order polynomial
Inputs:
-------
``x``: independent variable
``a``: coefficient of the third-order term
``b``: coefficient of the second-order term
``c``: coefficient of the first-order term
``d``: additive constant
Formula:
--------
``a*x^3 + b*x^2 + c*x + d``
"""
return a * x ** 3 + b * x ** 2 + c * x + d
def Powerlaw(x, a, alpha):
"""Power-law function
Inputs:
-------
``x``: independent variable
``a``: scaling factor
``alpha``: exponent
Formula:
--------
``a*x^alpha``
"""
return a * x ** alpha
def Exponential(x, a, tau, y0):
"""Exponential function
Inputs:
-------
``x``: independent variable
``a``: scaling factor
``tau``: time constant
``y0``: additive constant
Formula:
--------
``a*exp(x/tau)+y0``
"""
return np.exp(x / tau) * a + y0
def Lorentzian(x, a, x0, sigma, y0):
"""Lorentzian peak
Inputs:
-------
``x``: independent variable
``a``: scaling factor (extremal value)
``x0``: center
``sigma``: half width at half maximum
``y0``: additive constant
Formula:
--------
``a/(1+((x-x0)/sigma)^2)+y0``
"""
return a / (1 + ((x - x0) / sigma) ** 2) + y0
def Gaussian(x, a, x0, sigma, y0):
"""Gaussian peak
Inputs:
-------
``x``: independent variable
``a``: scaling factor (extremal value)
``x0``: center
``sigma``: half width at half maximum
``y0``: additive constant
Formula:
--------
``a*exp(-(x-x0)^2)/(2*sigma^2)+y0``
"""
return a * np.exp(-(x - x0) ** 2 / (2 * sigma ** 2)) + y0
def LogNormal(x, a, mu, sigma):
"""PDF of a log-normal distribution
Inputs:
-------
``x``: independent variable
``a``: amplitude
``mu``: center parameter
``sigma``: width parameter
Formula:
--------
``a/ (2*pi*sigma^2*x^2)^0.5 * exp(-(log(x)-mu)^2/(2*sigma^2))
"""
return a / np.sqrt(2 * np.pi * sigma ** 2 * x ** 2) *\
np.exp(-(np.log(x) - mu) ** 2 / (2 * sigma ** 2))
|
awacha/sastool
|
sastool/fitting/fitfunctions/basic.py
|
Python
|
bsd-3-clause
| 3,673
|
[
"Gaussian"
] |
e83e09ecf186bcb45879531865cbfde54f80df3aea36214322fbaa7a217041d5
|
#!/usr/bin/env python
# =============================================================================
# GLOBAL IMPORTS
# =============================================================================
import os
import time
import json
import glob
import logging
import numpy as np
import pandas as pd
from simtk import unit
from pkganalysis.submission import load_submissions
from pkganalysis.unbiasedanalyzer import analyze_directory, BARAnalyzer, InstantaneousWorkAnalyzer
from pkganalysis.sampling import (SamplingSubmission, energy_evaluations_iteration_cutoffs,
cpu_time_iteration_cutoffs, get_iteration_cutoffs,
YANK_N_ITERATIONS)
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)
# =============================================================================
# CONSTANTS
# =============================================================================
# Paths.
SCRIPT_DIR_PATH = os.path.dirname(os.path.realpath(__file__))
# Paths to submissions.
SUBMISSIONS_DIR_PATH = os.path.join(os.path.dirname(SCRIPT_DIR_PATH), 'Submissions')
SAMPLING_SUBMISSIONS_DIR_PATH = os.path.join(SUBMISSIONS_DIR_PATH, '975')
SUBMISSIONS_USER_MAP_FILE_PATH = os.path.join(SUBMISSIONS_DIR_PATH, 'SAMPL6_user_map.csv')
# The directory containing YANK output data.
YANK_SAMPLING_DIR_PATH = os.path.join(os.path.dirname(SCRIPT_DIR_PATH), 'SAMPLing')
# Output paths of the files generated by this analysis.
ANALYSIS_DIR_PATH = os.path.join(SCRIPT_DIR_PATH, 'YankAnalysis')
SAMPLING_ANALYSIS_DIR_PATH = os.path.join(ANALYSIS_DIR_PATH, 'Sampling')
YANK_CPU_TIMES_FILE_PATH = os.path.join(SAMPLING_ANALYSIS_DIR_PATH, 'yank_cpu_times.json')
# This was estimated from the average timings of 10 non-checkpoint iterations of a
# YANK simulation using the mixing replicas and I/O writing as serial bottlenecks.
YANK_PARALLEL_EFFICIENCY = {'complex': 0.87, 'solvent': 0.92}
# All system ids.
SYSTEM_IDS = [
'CB8-G3-0', 'CB8-G3-1', 'CB8-G3-2', 'CB8-G3-3', 'CB8-G3-4',
'OA-G3-0', 'OA-G3-1', 'OA-G3-2', 'OA-G3-3', 'OA-G3-4',
'OA-G6-0', 'OA-G6-1', 'OA-G6-2', 'OA-G6-3', 'OA-G6-4'
]
# Systems, phases, and first iteration that used 8 GPUs instead of 4.
YANK_8GPUS = {
'CB8-G3-0': {'complex': 0, 'solvent': 20001},
'CB8-G3-1': {'complex': 0, 'solvent': 20001},
'CB8-G3-2': {'complex': 0},
'CB8-G3-3': {'complex': 0, 'solvent': 20001},
'CB8-G3-4': {'complex': 0, 'solvent': 20001},
'OA-G3-1': {'complex': 0},
'OA-G6-2': {'complex': 0},
}
# Restraint distance cutoff that defines the binding site. These were obtained
# as the 99.99-percentile of the restraint distances in the bound state. The
# maximum across the 5 replicates was taken.
RESTRAINT_DISTANCE_CUTOFFS = {
'CB8-G3': 0.458306729794 * unit.nanometers,
'OA-G3': 0.5773037076 * unit.nanometers,
'OA-G6': 0.606282174587 * unit.nanometers,
}
# =============================================================================
# UTILITY FUNCTIONS
# =============================================================================
def get_system_experiment_dir(system_id):
"""Return the path to YANK output directory for a system."""
subsubdir = system_id.replace('-', '')
subdir = 'experiment-' + subsubdir[:-1]
return os.path.join(YANK_SAMPLING_DIR_PATH, 'experiments', subdir, subsubdir)
# =============================================================================
# EXTRACT YANK CPU TIME
# =============================================================================
def diff_timestamps(timestamp1, timestamp2):
"""Compute time delta in seconds from the two string timestamps."""
t1 = time.mktime(time.strptime(timestamp1))
t2 = time.mktime(time.strptime(timestamp2))
return t2 - t1
def extract_yank_cpu_time(system_id):
"""Extract the total CPU/GPU simulation time from a YANK simulation NC file."""
from yank.repex import Reporter
def time_str(time_in_seconds):
return '{:.2f}s ({:.2f}h, {:.2f}d)'.format(time_in_seconds, time_in_seconds/3600,
time_in_seconds/3600/24)
total_time = 0.0
output_dir_path = get_system_experiment_dir(system_id)
for phase_name in ['complex', 'solvent']:
phase_time = 0.0
nc_file_path = os.path.join(output_dir_path, phase_name + '.nc')
# Read all the timestamps.
reporter = None
try:
reporter = Reporter(nc_file_path, open_mode='r')
timestamps = reporter.read_timestamp()
finally:
if reporter is not None:
reporter.close()
timestamps = timestamps.tolist()
assert len(timestamps) == YANK_N_ITERATIONS + 1
# Separate 4 GPUs from 8 GPUs calculations.
try:
switch_iteration = YANK_8GPUS[system_id][phase_name]
except KeyError:
# All the calculation with 4 GPUs.
timestamps = {4: timestamps, 8: []}
else:
timestamps = {4: timestamps[:switch_iteration],
8: timestamps[switch_iteration:]}
# Compute the average mean duration ignoring interruptions.
mean_iteration_durations = {}
for n_gpus, stamps in timestamps.items():
if len(stamps) == 0:
continue
# Compute time deltas.
timedeltas = [diff_timestamps(stamps[i-1], stamps[i]) for i in range(1, len(stamps))]
# Detect interruptions by finding iterations that > 10 times the median.
median_delta = np.median(timedeltas)
interruption_iterations = [i for i in range(len(timedeltas))
if timedeltas[i] > 10 * median_delta]
logger.info('Detected interruptions for {}/{} at '
'iterations {}'.format(system_id, phase_name, interruption_iterations))
# Compute average iteration duration.
mean_iteration_durations[n_gpus] = np.mean([t for i, t in enumerate(timedeltas)
if i not in interruption_iterations])
# Compute total wallclock time.
interruption_iterations = interruption_iterations + [len(stamps)-1]
for interruption_idx, interruption_iteration in enumerate(interruption_iterations):
if interruption_idx == 0:
timestamp1 = stamps[0]
else:
# Use mean iteration duration to fill the previous interruption.
phase_time += n_gpus * mean_iteration_durations[n_gpus]
timestamp1 = stamps[interruption_iterations[interruption_idx-1] + 1]
timestamp2 = stamps[interruption_iteration]
phase_time += n_gpus * diff_timestamps(timestamp1, timestamp2)
# Add extra iteration for calculations that switched from 4 to 8 GPUs.
if len(mean_iteration_durations) == 2:
phase_time += 8 * mean_iteration_durations[8]
# Take into account parallelization efficiency.
phase_time *= YANK_PARALLEL_EFFICIENCY[phase_name]
logger.info('CPU time for phase {}/{}: {}'.format(system_id, phase_name, time_str(phase_time)))
total_time += phase_time
logger.info('Total CPU time for {}: {}'.format(system_id, time_str(total_time)))
return total_time
def extract_all_yank_cpu_times():
"""Extract the total times of all YANK simulations.
Returns
-------
total_times : dict
The dictionary system_id: total_time (in seconds).
"""
# Check if we have already calculated it.
try:
with open(YANK_CPU_TIMES_FILE_PATH, 'r') as f:
total_times = json.load(f)
logger.info('Found pre-computed YANK total times '
'in {}'.format(YANK_CPU_TIMES_FILE_PATH))
return total_times
except FileNotFoundError:
pass
# Extract all total times.
logger.info('Computing YANK total times')
total_times = {system_id: extract_yank_cpu_time(system_id) for system_id in SYSTEM_IDS}
# Save the result.
with open(YANK_CPU_TIMES_FILE_PATH, 'w') as f:
json.dump(total_times, f, indent=4, sort_keys=True)
return total_times
# =============================================================================
# GENERATE ANALYSIS ITERATION CUTOFFS
# =============================================================================
def generate_iteration_cutoffs():
"""Read all the submissions costs and determine the corresponding YANK iterations."""
# Import user map and submissions.
with open(SUBMISSIONS_USER_MAP_FILE_PATH, 'r') as f:
user_map = pd.read_csv(f)
submissions = load_submissions(SamplingSubmission, SAMPLING_SUBMISSIONS_DIR_PATH, user_map)
# Extract YANK CPU times.
yank_cpu_times = extract_all_yank_cpu_times()
# Generate all iterations that need to be analyzed.
systems_iteration_cutoffs = {system_id: set() for system_id in SYSTEM_IDS}
for submission in submissions:
for system_id, row in submission.cost.iterrows():
system_name = system_id[:-2]
# Generate the 100 iteration cutoffs splitting the YANK calculation by energy evaluations.
n_energy_evaluations = row['N energy evaluations']
iteration_cutoffs = energy_evaluations_iteration_cutoffs(n_energy_evaluations, system_name)
systems_iteration_cutoffs[system_id].update(iteration_cutoffs)
# For GROMACS-EE, analyze also the iterations corresponding to energy evaluations
# resulting from adding the full cost of the equilibration stage to each replica.
if submission.paper_name == 'GROMACS/EE' and system_name != 'CB8-G3':
mean_free_energies = submission.mean_free_energies()
mean_data = mean_free_energies[mean_free_energies['System name'] == system_name]
first_nonzero_idx = np.nonzero(mean_data['$\Delta$G [kcal/mol]'].values)[0][0]
calibration_cost = mean_data['N energy evaluations'].values[first_nonzero_idx] * 4
n_energy_evaluations += calibration_cost
iteration_cutoffs = energy_evaluations_iteration_cutoffs(n_energy_evaluations, system_name,
start=calibration_cost)
systems_iteration_cutoffs[system_id].update(iteration_cutoffs)
# Use CPU time when available and fallback on wall-clock time if not.
tot_time = row['CPU time'] if not np.isnan(row['CPU time']) else row['Wall clock time']
# Generate the 100 iteration cutoffs splitting the YANK calculation by CPU time.
try:
iteration_cutoffs = cpu_time_iteration_cutoffs(tot_time, system_id, yank_cpu_times)
except AssertionError:
print('Skipping iterations corresponding to CPU time for submission {}, system {} '
'with total CPU time {}'.format(submission.receipt_id, system_id, tot_time))
else:
systems_iteration_cutoffs[system_id].update(iteration_cutoffs)
# Add iteration cutoffs for all 40000 iterations.
for system_id, iteration_cutoffs in systems_iteration_cutoffs.items():
iteration_cutoffs.update(get_iteration_cutoffs(YANK_N_ITERATIONS))
# Convert sets to ordered lists.
for system_id, iteration_cutoffs in systems_iteration_cutoffs.items():
systems_iteration_cutoffs[system_id] = sorted(iteration_cutoffs)
return systems_iteration_cutoffs
def get_computed_free_energies(system_id, dir_path=SAMPLING_ANALYSIS_DIR_PATH, file_prefix='yank', cleanup=False):
"""Read data and cleanup jobid files.
Parameters
----------
file_prefix: str, optional
The prefix of the job id files without system name and jobid.
"""
# Read all free energies.
file_base_path = os.path.join(dir_path, '{}-{}'.format(file_prefix, system_id))
output_file_path = file_base_path + '.json'
# Read current merged file.
free_energies = {}
if os.path.exists(output_file_path):
with open(output_file_path, 'r') as f:
free_energies.update(json.load(f))
# Merge parallel jobs output.
for file_path in glob.glob(file_base_path + '-*.json'):
with open(file_path, 'r') as f:
free_energies.update(json.load(f))
# Delete parallel job files.
if cleanup:
# Write free energies merged result.
with open(output_file_path, 'w') as f:
json.dump(free_energies, f, indent=4, sort_keys=True)
# Remove jobid files.
for file_path in glob.glob(file_base_path + '-*.json'):
os.remove(file_path)
# Convert iterations from string to integers.
try:
# If k are just iterations, convert the strings to integers.
return {int(k): v for k, v in free_energies.items()}
except ValueError:
# In this case, free_energies is the decomposition data.
return free_energies
# =============================================================================
# MAIN
# =============================================================================
def analyze_yank(jobid, jobspersystem, cleanup=False, dry_run=False, analyzer_class=None,
output_dir_path=SAMPLING_ANALYSIS_DIR_PATH, file_prefix='yank',
iteration_cutoffs=None, print_iterations_to_compute=True, **analyzer_kwargs):
"""Analyze YANK at all the submitted wall-clock times/number of energy evaluations."""
# Determine the iterations at which to analyze the YANK calculations.
if iteration_cutoffs is None:
systems_iteration_cutoffs = generate_iteration_cutoffs()
else:
systems_iteration_cutoffs = {system_id: iteration_cutoffs for system_id in SYSTEM_IDS}
# Load iterations that have been already calculated.
for system_id, iteration_cutoffs in systems_iteration_cutoffs.items():
computed_iterations = get_computed_free_energies(system_id, dir_path=output_dir_path,
file_prefix=file_prefix, cleanup=cleanup)
# Compute only the iterations that we haven't computed.
iterations_to_compute = set(iteration_cutoffs) - set(computed_iterations)
systems_iteration_cutoffs[system_id] = sorted(iterations_to_compute)
# Remove System IDs with no iterations to compute.
systems_iteration_cutoffs = {k: v for k, v in systems_iteration_cutoffs.items() if len(v) > 0}
# If this is a dry run, just print how many iterations we need to analyze for each system.
if dry_run:
if print_iterations_to_compute:
for k, v in systems_iteration_cutoffs.items():
print(k, len(v))
return systems_iteration_cutoffs
# Split all iteration cutoffs for jobs.
jobs_systems_iteration_cutoffs = []
for system_id, iteration_cutoffs in systems_iteration_cutoffs.items():
for i, cutoffs in enumerate(np.array_split(iteration_cutoffs, jobspersystem)):
system_job_id = system_id + '-' + str(i)
jobs_systems_iteration_cutoffs.append((system_id, system_job_id, cutoffs.tolist()))
# Determine iterations assigned to this job.
if jobid:
n_jobs = len(jobs_systems_iteration_cutoffs)
jobs_to_run = [jobs_systems_iteration_cutoffs[jobid - 1]]
logger.info('Running job id {}/{} for a total of {} iterations.'
''.format(jobid, n_jobs, len(jobs_to_run[0][-1])))
else:
jobs_to_run = jobs_systems_iteration_cutoffs
# Run analyzer.
for system_id, system_job_id, iteration_cutoffs in jobs_to_run:
logger.info('Computing free energy profile of {}'.format(system_job_id))
system_name = system_id[:-2]
experiment_directory = get_system_experiment_dir(system_id)
distance_cutoff = RESTRAINT_DISTANCE_CUTOFFS[system_name]
# Analyze all iterations.
free_energies = analyze_directory(experiment_directory,
distance_cutoffs=distance_cutoff,
iteration_cutoffs=iteration_cutoffs,
analyzer_class=analyzer_class,
**analyzer_kwargs)
# Store analysis results.
free_energies = {i: f for i, f in zip(iteration_cutoffs, free_energies)}
output_file_path = os.path.join(output_dir_path, '{}-{}.json'.format(file_prefix, system_job_id))
with open(output_file_path, 'w') as f:
json.dump(free_energies, f, indent=4, sort_keys=True)
return systems_iteration_cutoffs
def analyze_yank_restraint(system_id):
"""Analyze YANK at different restraint distance cutoffs."""
# Infer the experiment directory. The folder names have no hashes.
system_id = system_id.replace('-', '')
store_dir_path = os.path.join('..', 'SAMPLing', 'experiments',
'experiment-'+system_id[:-1], system_id)
# We recompute the free energy at intervals of 1A from 3 to 15A.
restraint_distance_cutoffs = np.linspace(3.0, 15.0, num=13) * unit.angstrom
free_energies = analyze_directory(store_dir_path, distance_cutoffs=restraint_distance_cutoffs)
# Store analysis results.
free_energies = {c: f for c, f in zip(restraint_distance_cutoffs / unit.angstrom, free_energies)}
restraint_analysis_dir_path = os.path.join(ANALYSIS_DIR_PATH, 'RestraintAnalysis')
os.makedirs(restraint_analysis_dir_path, exist_ok=True)
output_file_path = os.path.join(restraint_analysis_dir_path, '{}.json'.format(system_id))
with open(output_file_path, 'w') as f:
json.dump(free_energies, f, indent=4, sort_keys=True)
def analyze_yank_bias(jobid, jobspersystem, cleanup=False, dry_run=False, analyzer_class=None):
"""Analyze YANK discarding the initial iterations to see if it helps with the bias."""
# starting_iterations = [250, 500, 1000] + list(range(2000, 20000, 2000))
starting_iterations = [0, 5, 10, 25, 50, 100, 250, 500, 1000, 2000, 4000, 8000, 16000, 24000, 32000]
# Obtain the iterations that have been already analyzed by YANK.
# Using only these allow us to compare the subset to the full simulation later.
systems_iteration_cutoffs = generate_iteration_cutoffs()
# Save bias analysis with BAR in a separate directory.
if analyzer_class is not None and analyzer_class.__name__ == 'BARAnalyzer':
subdir_name = 'BiasAnalysis-BAR'
else:
subdir_name = 'BiasAnalysis'
# Analyze YANK free energy trajectory discarding progressively more and more data.
# Here we accumulate the jobs that we need to run before distributing them.
iterations_to_analyze = {}
for starting_iteration in starting_iterations:
# Determine the 100 iterations to analyze discarding the initial starting_iteration.
target_iterations = np.linspace(starting_iteration, YANK_N_ITERATIONS, 101)
target_iterations = [int(x) for x in target_iterations[1:]] # Convert to integers.
# Create the analysis directory if it doesn't exist.
bias_analysis_dir_path = os.path.join(ANALYSIS_DIR_PATH, subdir_name, 'iter' + str(starting_iteration))
os.makedirs(bias_analysis_dir_path, exist_ok=True)
# Determine 100 iterations starting from starting_iteration that have been already analyzed.
for system_id, iteration_cutoffs in systems_iteration_cutoffs.items():
# Retrieve free energies that have been already computed and cleanup.
analyzed_iterations = get_computed_free_energies(system_id, dir_path=bias_analysis_dir_path,
cleanup=cleanup)
# If we cleanup, cleanup also decomposition files.
if cleanup:
get_computed_free_energies(system_id, dir_path=bias_analysis_dir_path,
file_prefix='fe-decomposition', cleanup=True)
iteration_indices = np.searchsorted(iteration_cutoffs, target_iterations)
# Transform to a set to make sure we don't have duplicate iterations.
iterations = sorted(set([int(iteration_cutoffs[i]) for i in iteration_indices]))
# Don't recompute iterations that we have already analyzed.
iterations = [i for i in iterations if i not in analyzed_iterations]
if len(iterations) > 0:
iterations_to_analyze[(starting_iteration, system_id)] = iterations
# If this is a dry run, just print how many iterations we need to analyze for each system.
if dry_run:
for k, v in iterations_to_analyze.items():
print(k, len(v))
return
# Split all iteration cutoffs for jobs.
jobs_iterations_to_analyze = []
for (starting_iteration, system_id), iteration_cutoffs in iterations_to_analyze.items():
for i, cutoffs in enumerate(np.array_split(iteration_cutoffs, jobspersystem)):
system_job_id = system_id + '-' + str(i)
jobs_iterations_to_analyze.append((system_id, starting_iteration, system_job_id, cutoffs.tolist()))
# Determine iterations assigned to this job.
if jobid:
n_jobs = len(jobs_iterations_to_analyze)
jobs_to_run = [jobs_iterations_to_analyze[jobid - 1]]
logger.info('Running job id {}/{} for a total of {} iterations.'
''.format(jobid, n_jobs, len(jobs_to_run[0][-1])))
else:
jobs_to_run = jobs_iterations_to_analyze
# Run analyzer.
for system_id, starting_iteration, system_job_id, iteration_cutoffs in jobs_to_run:
logger.info('Computing free energy profile of {} starting from iteration {}'.format(
system_job_id, starting_iteration))
system_name = system_id[:-2]
experiment_directory = get_system_experiment_dir(system_id)
distance_cutoff = RESTRAINT_DISTANCE_CUTOFFS[system_name]
# Analyze all iterations.
free_energies, decomposition = analyze_directory(experiment_directory,
n_discarded_initial_iterations=starting_iteration,
distance_cutoffs=distance_cutoff,
iteration_cutoffs=iteration_cutoffs,
analyzer_class=analyzer_class,
return_decomposition=True)
# Store analysis results.
free_energies = {i: f for i, f in zip(iteration_cutoffs, free_energies)}
bias_analysis_dir_path = os.path.join(ANALYSIS_DIR_PATH, subdir_name, 'iter' + str(starting_iteration))
output_file_path = os.path.join(bias_analysis_dir_path, 'yank-{}.json'.format(system_job_id))
with open(output_file_path, 'w') as f:
json.dump(free_energies, f, indent=4, sort_keys=True)
# Store free energy decomposition data.
output_file_path = os.path.join(bias_analysis_dir_path, 'fe-decomposition-{}.json'.format(system_job_id))
with open(output_file_path, 'w') as f:
json.dump(decomposition, f, indent=4, sort_keys=True)
def analyze_yank_correlation(jobid, jobspersystem, cleanup=False, dry_run=False):
"""Analyze YANK discarding the initial iterations to see if it helps with the bias."""
statistical_inefficiencies = [2, 5, 10, 20, 50, 100, 200]
output_dir_pattern = os.path.join(ANALYSIS_DIR_PATH, 'CorrelationAnalysis', 'statineff-{}')
# Analyze only 100 iterations for each statistical inefficiency.
iteration_cutoffs = get_iteration_cutoffs(YANK_N_ITERATIONS)
# First run a dry run to figure out how many systems we need to compute for each statistical inefficiency.
n_jobs_per_ineff = [0]
for stat_inefficiency in statistical_inefficiencies:
output_dir_path = output_dir_pattern.format(stat_inefficiency)
os.makedirs(output_dir_path, exist_ok=True)
systems_iteration_cutoffs = analyze_yank(jobid, jobspersystem, cleanup=cleanup, dry_run=True,
output_dir_path=output_dir_path, file_prefix='yank',
iteration_cutoffs=iteration_cutoffs,
print_iterations_to_compute=False,
fixed_statistical_inefficiency=stat_inefficiency)
if dry_run:
for k, v in systems_iteration_cutoffs.items():
print(stat_inefficiency, k, len(v))
n_jobs_per_ineff.append(len(systems_iteration_cutoffs) * jobspersystem)
# Compute the cumulative number of jobs to identify which one we need to run.
cum_n_jobs_per_ineff = np.array(n_jobs_per_ineff).cumsum()
print('Total number of jobs:', sum(n_jobs_per_ineff))
# If this is a dry run, there's nothing else to do.
if dry_run:
return
# Identify statistical efficiency and jobid.
for i, cum_n_jobs in enumerate(cum_n_jobs_per_ineff):
if cum_n_jobs >= jobid:
break
stat_inefficiency = statistical_inefficiencies[i-1]
jobid -= cum_n_jobs_per_ineff[i-1]
output_dir_path = output_dir_pattern.format(stat_inefficiency)
analyze_yank(jobid, jobspersystem, cleanup=cleanup, dry_run=dry_run,
output_dir_path=output_dir_path, file_prefix='yank',
iteration_cutoffs=iteration_cutoffs,
fixed_statistical_inefficiency=stat_inefficiency)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--jobid', type=int)
parser.add_argument('--jobspersystem', type=int, default=1)
args = parser.parse_args()
# Run YANK analysis.
analyze_yank(args.jobid, args.jobspersystem, cleanup=True, dry_run=True)
# analyze_yank(args.jobid, args.jobspersystem, cleanup=True, dry_run=True,
# analyzer_class=InstantaneousWorkAnalyzer)
# Collect data for YANK restraint sensitivity analysis.
# system_id = SYSTEM_IDS[args.jobid-1]
# analyze_yank_restraint(system_id)
# Collect data for YANK bias analysis.
# analyze_yank_bias(args.jobid, args.jobspersystem, cleanup=True, dry_run=True)
# Collect data for YANK correlation analysis.
# analyze_yank_correlation(args.jobid, args.jobspersystem, cleanup=True, dry_run=True)
|
MobleyLab/SAMPL6
|
host_guest/Analysis/Scripts/analyze_yank.py
|
Python
|
mit
| 26,710
|
[
"Gromacs"
] |
3bdc4a58bb1071aa01a551d4d70541806b3948cfb2d2fc3462a29d1638871aea
|
#!/usr/bin/env python
from sklearn.mixture import GaussianMixture
from scipy import stats
import numpy as np
import sys
from math import sqrt, log, exp, pi
"""
https://www.kaggle.com/charel/learn-by-example-expectation-maximization
The way to estimate a single gaussian from the dataset is by taking the mean and standard deviation of the data, and then feed that parameter into
a gaussian distributon to generate the a probability density function.
For multiple mixture models, use expectation-maximization algorithm to estimate the parameters
"""
class Gaussian:
def __init__(self, mu, sigma):
self.mu = mu
self.sigma = sigma
def pdf(self, datum):
u = (datum - self.mu) / abs(self.sigma1)
y = (1 / (sqrt(2 * pi) * abs(self.sigma))) * exp(-u * u / 2)
return y
def __repr__(self):
return 'Gaussian({0:4.6}, {1:4.6})'.format(self.mu, self.sigma)
def main():
if len(sys.argv) < 3:
print("usage: file components")
exit(0)
filename = sys.argv[1]
components = int(sys.argv[2])
data = np.genfromtxt(filename, comments='#', delimiter='\n')
best_single = Gaussian(np.mean(data), np.std(data))
print("Best single estimate")
print(best_single)
print()
print("Mixture estimate")
gmm = GaussianMixture(n_components=components, tol=0.000001, max_iter=1000)
gmm.fit(np.expand_dims(data, 1))
n = 0
for mu, sd, p in zip(gmm.means_.flatten(), np.sqrt(gmm.covariances_.flatten()), gmm.weights_):
print('Gaussian {:}: weight = {:.2f} μ = {:.2f} σ = {:.2f}'.format(n, p, mu, sd))
n += 1
main()
|
qeedquan/misc_utilities
|
math/probability/mixtures/estimate-gaussian-mixture.py
|
Python
|
mit
| 1,634
|
[
"Gaussian"
] |
2d46f44771a28be9da330a1a173f9fd5b34249cd6dd7125e4415c7512cd08c75
|
## thermo.py
## I implement symbolic computation in thermodynamics to solve examples and problems
##
############################################################################
## Copyleft 2015, Ernest Yeung <ernestyalumni@gmail.com>
##
## 20151019
##
## This program, along with all its code, is free software; you can redistribute
## it and/or modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You can have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software Foundation, Inc.,
## S1 Franklin Street, Fifth Floor, Boston, MA
## 02110-1301, USA
##
## Governing the ethics of using this program, I default to the Caltech Honor Code:
## ``No member of the Caltech community shall take unfair advantage of
## any other member of the Caltech community.''
##
## Donate, and support my other scientific and engineering endeavors at
## ernestyalumni.tilt.com
##
## Facebook : ernestyalumni
## linkedin : ernestyalumni
## Tilt/Open : ernestyalumni
## twitter : ernestyalumni
## youtube : ernestyalumni
## wordpress : ernestyalumni
##
############################################################################
import sympy
from sympy import Symbol, Eq, N
from sympy.solvers import solve
from sympy.abc import gamma
from sympy import Rational as Rat
import decimal
from decimal import Decimal
import Physique
from Physique import FCconv, KCconv, FundConst, conv, T_C, T_K, T_F
#########################
##### Physical Constants
#########################
N_A = FundConst[ FundConst["Quantity"].str.contains("Avogadro")].loc[42,:]
k_BOLTZ = FundConst[ FundConst["Quantity"].str.contains("Boltzmann") ].loc[49,:]
P_frmATM = conv[ conv["Toconvertfrom"].str.contains("atm") ].loc[15,:] # 1 atm to Pascal conversion for pressure
###########################################################################
##### Kittel, Kroemer. Thermal Physics
###########################################################################
##################################################
#### Chapter 10: Phase Transformation
##################################################
#############################################
## 2. Calculation of $dT/dp$ for water
#############################################
L = 2260 # J g^{-1}
boilingwatertemp_K = KCconv.subs(T_C,100).rhs # room temperature in Kelvin
tau_boilingwater = boilingwatertemp_K * k_BOLTZ.Value
P = 1 # atm
dpdtau_1002 = L*18.0153/float(N_A.Value)/( tau_boilingwater**2 )
dtaudp_1002 = 1./ dpdtau_1002 # in J/Pascal
dTdp_1002 = dtaudp_1002 / (k_BOLTZ.Value ) # 28.4348535111262 K/atm
#############################################
## 3. Heat of vaporization of ice
#############################################
L1003 = ( 4.58 - 3.88 )/(0 - (-2.) )*((KCconv.subs(T_C,1.).rhs)**2)/( (4.58-3.88)/(0 - (-2)) * ( 1. ) + 3.88 ) * k_BOLTZ.Value*N_A.Value # 51705.6757640485
###########################################################################
##### Ralph Baierlein, Thermal Physics, Cambridge University Press, 1999
###########################################################################
########################################
#### 1 Background
### Problems
########################################
###################################
## 4. Adiabatic compression
###################################
p_i = Symbol("p_i", positive=True)
V_i = Symbol("V_i", positive=True)
tau_i = Symbol("tau_i", positive=True)
N_i = Symbol("N_i", positive=True)
idealgaslaw_i = Eq( p_i*V_i, N_i*tau_i)
p_f = Symbol("p_f", positive=True)
V_f = Symbol("V_f", positive=True)
tau_f = Symbol("tau_f", positive=True)
idealgaslaw_f = Eq( p_f*V_f, N_i*tau_f)
adia_tV = Eq( tau_i*V_i**(gamma-1) , tau_f*V_f**(gamma-1) )
##############################
# (a)
##############################
roomtemp_K = KCconv.subs(T_C,20).rhs # room temperature in Kelvin
Prob0104ans = adia_tV.subs(gamma,1.4).subs(V_f,1).subs(V_i,15).subs(tau_i, roomtemp_K) # answer to Problem 4 of Chapter 1
Prob0104ans = N( Prob0104ans.lhs) # 866.016969686253 K
Prob0104ansC = solve( KCconv.subs( T_K, Prob0104ans), T_C )[0] # 592.866969686253 C
solve( FCconv.subs( T_C, Prob0104ansC ), T_F)[0] # 1099.16054543526 F
##############################
# (b)
##############################
15*( Prob0104ans / roomtemp_K )
|
ernestyalumni/Propulsion
|
thermo.py
|
Python
|
gpl-2.0
| 5,854
|
[
"Avogadro"
] |
0c1cdce723f121c0ab9c1197ee71112315f632fad5ddf0eb28e113b6d1fd67db
|
# $HeadURL: $
''' Synchronizer
Module that updates the RSS database ( ResourceStatusDB ) with the information
in the Resources section. If there are additions in the CS, those are incorporated
to the DB. If there are deletions, entries in RSS tables for those elements are
deleted ( except the Logs table ).
'''
__RCSID__ = '$Id: $'
from DIRAC import gConfig, gLogger, S_OK
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import Resources, RESOURCE_NODE_MAPPING
from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
from DIRAC.ResourceStatusSystem.Client import ResourceStatusClient
from DIRAC.ResourceStatusSystem.Utilities.RssConfiguration import RssConfiguration
class Synchronizer( object ):
'''
Every time there is a successful write on the CS, Synchronizer().sync() is
executed. It updates the database with the values on the CS.
'''
def __init__( self ):
"""
Constructor.
examples:
>>> s = Synchronizer()
"""
self.log = gLogger.getSubLogger( self.__class__.__name__ )
self.operations = Operations()
self.resources = Resources()
self.rStatus = ResourceStatusClient.ResourceStatusClient()
self.rssConfig = RssConfiguration()
self.diracAdmin = DiracAdmin()
def sync( self, _eventName, _params ):
'''
Main synchronizer method. It synchronizes the three types of elements: Sites,
Resources and Nodes. Each _syncX method returns a dictionary with the additions
and deletions.
examples:
>>> s.sync( None, None )
S_OK()
:Parameters:
**_eventName** - any
this parameter is ignored, but needed by caller function.
**_params** - any
this parameter is ignored, but needed by caller function.
:return: S_OK
'''
defSyncResult = { 'added' : [], 'deleted' : [] }
# Sites
syncSites = self._syncSites()
if not syncSites[ 'OK' ]:
self.log.error( syncSites[ 'Message' ] )
syncSites = ( syncSites[ 'OK' ] and syncSites[ 'Value' ] ) or defSyncResult
# Resources
syncResources = self._syncResources()
if not syncResources[ 'OK' ]:
self.log.error( syncResources[ 'Message' ] )
syncResources = ( syncResources[ 'OK' ] and syncResources[ 'Value' ] ) or defSyncResult
# Nodes
syncNodes = self._syncNodes()
if not syncNodes[ 'OK' ]:
self.log.error( syncNodes[ 'Message' ] )
syncNodes = ( syncNodes[ 'OK' ] and syncNodes[ 'Value' ] ) or defSyncResult
# Notify via email to :
self.notify( syncSites, syncResources, syncNodes )
return S_OK()
def notify( self, syncSites, syncResources, syncNodes ):
"""
Method sending email notification with the result of the synchronization. Email
is sent to Operations( EMail/Production ) email address.
examples:
>>> s.notify( {}, {}, {} )
>>> s.notify( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, {}, {} )
>>> s.notify( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] },
{ 'Computing : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }}, {} )
:Parameters:
**syncSites** - dict() ( keys: added, deleted )
dictionary with the sites added and deleted from the DB
**syncResources** - dict() ( keys: added, deleted )
dictionary with the resources added and deleted from the DB
**syncNodes** - dict() ( keys: added, deleted )
dictionary with the nodes added and deleted from the DB
:return: S_OK
"""
# Human readable summary
msgBody = self.getBody( syncSites, syncResources, syncNodes )
self.log.info( msgBody )
# Email addresses
toAddress = self.operations.getValue( 'EMail/Production', '' )
fromAddress = self.rssConfig.getConfigFromAddress( '' )
if toAddress and fromAddress and msgBody:
# Subject of the email
setup = gConfig.getValue( 'DIRAC/Setup' )
subject = '[RSS](%s) CS Synchronization' % setup
self.diracAdmin.sendMail( toAddress, subject, msgBody, fromAddress = fromAddress )
def getBody( self, syncSites, syncResources, syncNodes ):
"""
Method that given the outputs of the three synchronization methods builds a
human readable string.
examples:
>>> s.getBody( {}, {}, {} )
''
>>> s.getBody( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] }, {}, {} )
'''
SITES:
Site:
deleted:1
RubbishSite
'''
>>> s.getBody( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] },
{ 'Computing : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] }}, {} )
'''
SITES:
Site:
deleted:1
RubbishSite
RESOURCES:
Computing:
added:2
newCE01
newCE02
'''
:Parameters:
**syncSites** - dict() ( keys: added, deleted )
dictionary with the sites added and deleted from the DB
**syncResources** - dict() ( keys: added, deleted )
dictionary with the resources added and deleted from the DB
**syncNodes** - dict() ( keys: added, deleted )
dictionary with the nodes added and deleted from the DB
:return: str
"""
syncMsg = ''
for element, syncResult in [ ( 'SITES', syncSites ), ( 'RESOURCES', syncResources ),
( 'NODES', syncNodes ) ]:
elementsMsg = ''
for elementType, elements in syncResult.items():
elementMsg = ''
if elements[ 'added' ]:
elementMsg += '\n %s added: %d \n' % ( elementType, len( elements[ 'added' ] ) )
elementMsg += ' ' + '\n '.join( elements[ 'added' ] )
if elements[ 'deleted' ]:
elementMsg += '\n %s deleted: %d \n' % ( elementType, len( elements[ 'deleted' ] ) )
elementMsg += ' ' + '\n '.join( elements[ 'deleted' ] )
if elementMsg:
elementsMsg += '\n\n%s:\n' % elementType
elementsMsg += elementMsg
if elementsMsg:
syncMsg += '\n\n%s:' % element + elementsMsg
return syncMsg
#.............................................................................
# Sync methods: Site, Resource & Node
def _syncSites( self ):
"""
Method that synchronizes sites ( using their canonical name: CERN.ch ) with
elementType = 'Site'. It gets from the CS the eligible site names and then
synchronizes them with the DB. If not on the DB, they are added. If in the DB
but not on the CS, they are deleted.
examples:
>> s._syncSites()
S_OK( { 'Site' : { 'added' : [], 'deleted' : [ 'RubbishSite' ] } } )
:return: S_OK( { 'Site' : { 'added' : [], 'deleted' : [] }} ) | S_ERROR
"""
# Get site names from the CS
foundSites = self.resources.getEligibleSites()
if not foundSites[ 'OK' ]:
return foundSites
sites = {}
# Synchronize with the DB
resSync = self.__dbSync( 'Site', 'Site', foundSites[ 'Value' ] )
if not resSync[ 'OK' ]:
self.log.error( 'Error synchronizing Sites' )
self.log.error( resSync[ 'Message' ] )
else:
sites = resSync[ 'Value' ]
return S_OK( { 'Site' : sites } )
def _syncResources( self ):
"""
Method that synchronizes resources as defined on RESOURCE_NODE_MAPPING dictionary
keys. It makes one sync round per key ( elementType ). Gets from the CS the
eligible Resource/<elementType> names and then synchronizes them with the DB.
If not on the DB, they are added. If in the DB but not on the CS, they are deleted.
examples:
>>> s._syncResources()
S_OK( { 'Computing' : { 'added' : [ 'newCE01', 'newCE02' ], 'deleted' : [] },
'Storage' : { 'added' : [], 'deleted' : [] },
... } )
:return: S_OK( { 'RESOURCE_NODE_MAPPINGKey1' : { 'added' : [], 'deleted' : [] }, ...} )
"""
resources = {}
# Iterate over the different elementTypes for Resource ( Computing, Storage... )
for elementType in RESOURCE_NODE_MAPPING.keys():
# Get Resource / <elementType> names from CS
foundResources = self.resources.getEligibleResources( elementType )
if not foundResources[ 'OK' ]:
self.log.error( foundResources[ 'Message' ] )
continue
# Translate CS result into a list
foundResources = foundResources[ 'Value' ]
# Synchronize with the DB
resSync = self.__dbSync( 'Resource', elementType, foundResources )
if not resSync[ 'OK' ]:
self.log.error( 'Error synchronizing %s %s' % ( 'Resource', elementType ) )
self.log.error( resSync[ 'Message' ] )
else:
resources[ elementType ] = resSync[ 'Value' ]
return S_OK( resources )
def _syncNodes( self ):
"""
Method that synchronizes resources as defined on RESOURCE_NODE_MAPPING dictionary
values. It makes one sync round per key ( elementType ). Gets from the CS the
eligible Node/<elementType> names and then synchronizes them with the DB.
If not on the DB, they are added. If in the DB but not on the CS, they are deleted.
examples:
>>> s._syncNodes()
S_OK( { 'Queue' : { 'added' : [], 'deleted' : [] },
... } )
:return: S_OK( { 'RESOURCE_NODE_MAPPINGValue1' : { 'added' : [], 'deleted' : [] }, ...} )
"""
nodes = {}
# Iterate over the different elementTypes for Node ( Queue, AccessProtocol... )
for elementType in RESOURCE_NODE_MAPPING.values():
# Get Node / <elementType> names from CS
foundNodes = self.resources.getEligibleNodes( elementType )
if not foundNodes[ 'OK' ]:
self.log.error( foundNodes[ 'Value' ] )
continue
# Translate CS result into a list : maps NodeName to SiteName<>NodeName to
# avoid duplicates
# Looong list comprehension, sorry !
foundNodes = [ '%s<>%s' % ( key, item ) for key, subDict in foundNodes[ 'Value' ].items()
for subList in subDict.values() for item in subList ]
# Synchronize with the DB
resSync = self.__dbSync( 'Node', elementType, foundNodes )
if not resSync[ 'OK' ]:
self.log.error( 'Error synchronizing %s %s' % ( 'Node', elementType ) )
self.log.error( resSync[ 'Message' ] )
else:
nodes[ elementType ] = resSync[ 'Value' ]
return S_OK( nodes )
#.............................................................................
# DB sync actions
def __dbSync( self, elementFamily, elementType, elementsCS ):
"""
Method synchronizing CS and DB. Compares <elementsCS> with <elementsDB>
given the elementFamily and elementType ( e.g. Resource / Computing ).
If there are missing elements in the DB, are inserted. If are missing elements
in the CS, are deleted from the DB. Note that the logs from the RSS DB
are kept ! ( just in case ).
:Parameters:
**elementFamily** - str
any of the valid element families : Site, Resource, Node
**elementType** - str
any of the valid element types for <elementFamily>
**elementsCS** - list
list with the elements for <elementFamily>/<elementType> found in the CS
:return: S_OK( { 'added' : [], 'deleted' : [] } ) | S_ERROR
"""
# deleted, added default response
syncRes = {
'deleted' : [],
'added' : [],
}
# Gets <elementFamily>/<elementType> elements from DB
elementsDB = self.rStatus.selectStatusElement( elementFamily, 'Status',
elementType = elementType,
meta = { 'columns' : [ 'name' ] } )
if not elementsDB[ 'OK' ]:
return elementsDB
elementsDB = [ elementDB[ 0 ] for elementDB in elementsDB[ 'Value' ] ]
# Elements in DB but not in CS -> to be deleted
toBeDeleted = list( set( elementsDB ).difference( set( elementsCS ) ) )
if toBeDeleted:
resDelete = self.__dbDelete( elementFamily, elementType, toBeDeleted )
if not resDelete[ 'OK' ]:
return resDelete
else:
syncRes[ 'deleted' ] = toBeDeleted
# Elements in CS but not in DB -> to be added
toBeAdded = list( set( elementsCS ).difference( set( elementsDB ) ) )
if toBeAdded:
resInsert = self.__dbInsert( elementFamily, elementType, toBeAdded )
if not resInsert[ 'OK' ]:
return resInsert
else:
syncRes[ 'added' ] = toBeAdded
return S_OK( syncRes )
def __dbDelete( self, elementFamily, elementType, toBeDeleted ):
"""
Method that given the elementFamily and elementType, deletes all entries
in the History and Status tables for the given elements in toBeDeleted ( all
their status Types ).
:Parameters:
**elementFamily** - str
any of the valid element families : Site, Resource, Node
**elementType** - str
any of the valid element types for <elementFamily>, just used for logging
purposes.
**toBeDeleted** - list
list with the elements to be deleted
:return: S_OK | S_ERROR
"""
self.log.info( 'Deleting %s %s:' % ( elementFamily, elementType ) )
self.log.info( toBeDeleted )
return self.rStatus._extermineStatusElement( elementFamily, toBeDeleted )
def __dbInsert( self, elementFamily, elementType, toBeAdded ):
"""
Method that given the elementFamily and elementType, adds all elements in
toBeAdded with their respective statusTypes, obtained from the CS. They
are synchronized with status 'Unknown' and reason 'Synchronized'.
:Parameters:
**elementFamily** - str
any of the valid element families : Site, Resource, Node
**elementType** - str
any of the valid element types for <elementFamily>
**toBeDeleted** - list
list with the elements to be added
:return: S_OK | S_ERROR
"""
self.log.info( 'Adding %s %s:' % ( elementFamily, elementType ) )
self.log.info( toBeAdded )
statusTypes = self.rssConfig.getConfigStatusType( elementType )
for element in toBeAdded:
for statusType in statusTypes:
resInsert = self.rStatus.addIfNotThereStatusElement( elementFamily, 'Status',
name = element,
statusType = statusType,
status = 'Unknown',
elementType = elementType,
reason = 'Synchronized')
if not resInsert[ 'OK' ]:
return resInsert
return S_OK()
#...............................................................................
#
# def _syncUsers( self ):
# '''
# Sync Users: compares CS with DB and does the necessary modifications.
# '''
#
# gLogger.verbose( '-- Synchronizing users --')
#
# usersCS = CSHelpers.getRegistryUsers()
# if not usersCS[ 'OK' ]:
# return usersCS
# usersCS = usersCS[ 'Value' ]
#
# gLogger.verbose( '%s users found in CS' % len( usersCS ) )
#
# usersDB = self.rManagement.selectUserRegistryCache( meta = { 'columns' : [ 'login' ] } )
# if not usersDB[ 'OK' ]:
# return usersDB
# usersDB = [ userDB[0] for userDB in usersDB[ 'Value' ] ]
#
# # Users that are in DB but not in CS
# toBeDeleted = list( set( usersDB ).difference( set( usersCS.keys() ) ) )
# gLogger.verbose( '%s users to be deleted' % len( toBeDeleted ) )
#
# # Delete users
# # FIXME: probably it is not needed since there is a DatabaseCleanerAgent
# for userLogin in toBeDeleted:
#
# deleteQuery = self.rManagement.deleteUserRegistryCache( login = userLogin )
#
# gLogger.verbose( '... %s' % userLogin )
# if not deleteQuery[ 'OK' ]:
# return deleteQuery
#
# # AddOrModify Users
# for userLogin, userDict in usersCS.items():
#
# _name = userDict[ 'DN' ].split( '=' )[ -1 ]
# _email = userDict[ 'Email' ]
#
# query = self.rManagement.addOrModifyUserRegistryCache( userLogin, _name, _email )
# gLogger.verbose( '-> %s' % userLogin )
# if not query[ 'OK' ]:
# return query
#
# return S_OK()
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
|
sposs/DIRAC
|
ResourceStatusSystem/Utilities/Synchronizer.py
|
Python
|
gpl-3.0
| 17,289
|
[
"DIRAC"
] |
47ee71504fe6a5c9a4f6e14de6ff733cc279a55532b2021eb2c240142863eaee
|
# Copyright 2008 Brian Boyer, Ryan Mark, Angela Nitzke, Joshua Pollock,
# Stuart Tiffen, Kayla Webley and the Medill School of Journalism, Northwestern
# University.
#
# This file is part of Crunchberry Pie.
#
# Crunchberry Pie is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Crunchberry Pie is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
#You should have received a copy of the GNU General Public License
#along with Crunchberry Pie. If not, see <http://www.gnu.org/licenses/>.
import datetime
import unittest
from bartender.models import Article
from django.contrib.auth.models import User
from questions.helpers import get_featured_question
from questions.models import Question, Answer
class HelpersTestCase(unittest.TestCase):
def setUp(self):
self.test_user = User.objects.create(username='4321', password='test')
self.test_article = Article.objects.create(headline='headline',body='body')
#out of range question, five days ago
five_days_ago = datetime.datetime.today() - datetime.timedelta(days=5)
self.test_question = Question.objects.create(text='five days ago',user=self.test_user,article=self.test_article)
self.test_question.created = five_days_ago
self.test_question.save()
def test_get_featured_question(self):
#no questions in range
fq = get_featured_question()
self.assertEqual(None, fq['hot_question'])
self.assertEqual(None, fq['answer_me'])
#no answered question, so prompting for an answer
unanswered_question = Question.objects.create(text='boring question',user=self.test_user,article=self.test_article)
fq = get_featured_question()
self.assertEqual(None, fq['hot_question'])
self.assertEqual(unanswered_question, fq['answer_me'])
#an answered question! we have a hot one!
hot_question = Question.objects.create(text='exciting question',user=self.test_user,article=self.test_article)
Answer.objects.create(text='answer to exciting question',question=hot_question,user=self.test_user)
fq = get_featured_question()
self.assertEqual(hot_question, fq['hot_question'])
self.assertEqual(None, fq['answer_me'])
#a question with more answers, hotter stuff
hotter_question = Question.objects.create(text='hotter question today',user=self.test_user,article=self.test_article)
Answer.objects.create(text='hotter answer one',question=hotter_question,user=self.test_user)
Answer.objects.create(text='hotter answer two',question=hotter_question,user=self.test_user)
fq = get_featured_question()
self.assertEqual(hotter_question, fq['hot_question'])
self.assertEqual(None, fq['answer_me'])
def tearDown(self):
pass
|
brianboyer/newsmixer
|
social/questions/tests.py
|
Python
|
gpl-3.0
| 3,210
|
[
"Brian",
"exciting"
] |
400bdf88ced2f22c685fbd6d331bcfd41c77cbbcd166b5d14161405deb933836
|
import numpy as np
from scipy.interpolate import CubicSpline
from ..numerics.utilities import chol_inv
class VarGP(object):
"""
Variational Gaussian Process Approximation class.
"""
__slots__ = ("model", "fwd_ode", "bwd_ode", "kl0", "likelihood",
"obs_y", "obs_t", "dt", "dim_n", "dim_d", "dim_tot",
"output")
def __init__(self, model, m0, s0, fwd_ode, bwd_ode, likelihood,
kl0, obs_y, obs_t):
"""
Default constructor of VGPA object.
:param model: objects represents the dynamical system.
:param m0: initial marginal mean m(t=0). For the moment
this is kept fixed, but it can also be optimized.
:param s0: initial marginal co-variance s(t=0). For the
moment this is kept fixed, but it can also be optimized.
:param fwd_ode: forward ode solver.
:param bwd_ode: backward ode solver.
:param likelihood: likelihood object.
:param kl0: Kullback-Liebler at initial moment KL(t=0).
:param obs_y: observation values (including noise).
:param obs_t: observation times (discrete).
"""
# Stochastic model.
self.model = model
# Forward / backward integrators.
self.fwd_ode = fwd_ode
self.bwd_ode = bwd_ode
# Prior / likelihood functions.
self.kl0 = kl0
self.likelihood = likelihood
# Observations / times.
self.obs_y = obs_y
self.obs_t = obs_t
# Extract auxiliary variables.
self.dt = self.model.time_step
# Get the dimensions.
if self.model.single_dim:
self.dim_n, self.dim_d = self.model.sample_path.size, 1
else:
self.dim_n, self.dim_d = self.model.sample_path.shape
# _end_if_
# Total number of linear variables: a(t).
self.dim_tot = self.dim_n * self.dim_d * self.dim_d
# Output variables.
self.output = {"m0": m0, "s0": s0}
# _end_def_
def initialization(self):
"""
This function initializes the variational parameters A(t) and b(t).
This is done with a simple interpolation technique (Cubic Splines).
In the case where the dimensions are more than one the interpolation
happens on each dimension separately.
:return: a single array containing all the variational parameters.
"""
# Time window of inference.
time_window = self.model.time_window
# Replicate the first and last time points.
time_x = [time_window[0], *time_window[self.obs_t], time_window[-1]]
# Switch according to the dimensionality.
if self.model.single_dim:
# Replicate the first and last observations.
obs_z = np.hstack((self.obs_y[0], self.obs_y, self.obs_y[-1]))
# Linear variational parameters.
a0 = 0.5 * (self.model.sigma / 0.25) * np.ones(self.dim_n)
# Build a uni-variate extrapolation
# (with cubic splines).
fb0 = CubicSpline(time_x, obs_z)
# Generate the offset parameters on the whole time window.
b0 = fb0(time_window)
else:
# Replicate the first and last observations.
obs_z = np.vstack((self.obs_y[0], self.obs_y, self.obs_y[-1]))
# Cubic spline extrapolation for
# each dimension separately.
fb0 = CubicSpline(time_x, obs_z)
mt0 = fb0(time_window)
# Preallocate variational parameters.
a0 = np.zeros((self.dim_n, self.dim_d, self.dim_d))
b0 = np.zeros((self.dim_n, self.dim_d))
# Initial covariance matrix S(t=0)
s0 = 0.25 * np.eye(self.dim_d)
# Compute the discrete differences
# (approximation of Dm(t)/Dt).
dmt0 = np.diff(mt0, axis=0) / self.dt
# System Noise / S(t=0).
diag_k = np.diag(self.model.sigma.diagonal() / s0.diagonal())
# Construct a0(t) and b0(t) assuming a0(t) and s(t) are diagonal.
for k in range(self.dim_n - 1):
a0[k] = 0.5 * diag_k
b0[k] = dmt0[k] + a0[k].diagonal() * mt0[k]
# _end_for_
# At the last point (t=tf) we assume the gradient Dmt0 is zero.
a0[-1] = 0.5 * diag_k
b0[-1] = a0[-1].diagonal() * mt0[-1]
# _end_if_
# Concatenate the results into one (big) array.
return np.concatenate((a0.ravel(), b0.ravel()))
# _end_def_
def free_energy(self, x):
"""
Computes the variational free energy, along with parameters
related to the variational posterior process defined by the
linear / offset parameters a(t) and b(t).
:param x: initial variational linear and offset parameters.
:return: E0 + Esde + Eobs (scalar).
"""
# Switch according to dim_d.
if self.dim_d == 1:
# Extract a(t) and b(t).
linear_a = x[:self.dim_tot]
offset_b = x[self.dim_tot:]
else:
# Extract a(t) and b(t).
linear_a = x[:self.dim_tot].reshape(self.dim_n,
self.dim_d, self.dim_d)
offset_b = x[self.dim_tot:].reshape(self.dim_n, self.dim_d)
# _end_if_
# Initial posterior moments.
m0 = self.output["m0"]
s0 = self.output["s0"]
# Forward sweep to get consistent 'm(t)' and 's(t)'.
mt, st = self.fwd_ode(linear_a, offset_b, m0, s0, self.model.sigma)
# Energy from the observations (Likelihood).
Eobs = self.likelihood(mt, st)
# Energy from the SDE, along with expectations and gradients.
Esde, (Efx, Edf), (dEsde_dm, dEsde_ds, *_) = self.model.energy(linear_a, offset_b,
mt, st, self.obs_t)
# Compute the required gradients from the Eobs.
dEobs_dm, dEobs_ds, *_ = self.likelihood.gradients(mt, st)
# Backward sweep to ensure constraints are satisfied.
lamt, psit = self.bwd_ode(linear_a, dEsde_dm, dEsde_ds, dEobs_dm, dEobs_ds)
# Energy from the initial moment (t=0). If "m0" and "s0" are
# not optimized, this value is going to be constant in time.
E0 = self.kl0(m0, s0)
# Store the parameters that will be
# used later in the gradient method.
self.output["mt"] = mt
self.output["st"] = st
self.output["Efx"] = Efx
self.output["Edf"] = Edf
self.output["lamt"] = lamt
self.output["psit"] = psit
# Total free energy value.
return np.asscalar(E0 + Esde + Eobs)
# _end_def_
def gradient(self, x, eval_fun=False):
"""
Returns the gradient of the Lagrangian w.r.t.
the variational parameters a(t) (linear) and
b(t) (offset).
:param x: variational linear + offset parameters
(dim_n * dim_d * (dim_d + 1)).
:param eval_fun: it determines whether we have to
evaluate first the variational free energy to update
the parameters before the gradients.
:return: grouped gradient of the Lagrangian w.r.t.
the variational linear parameters 'a(t)' (dim_n x dim_d x dim_d)
and the variational offset parameters 'b(t)' (dim_n x dim_d).
"""
# Occasionally we have to evaluate the gradient
# at different input parameters. In this case we
# need to make sure that all the marginal and
# Lagrangian parameters are consistent.
if eval_fun:
_ = self.free_energy(x)
# _end_if_
# Switch to single dimension.
if self.model.single_dim:
# Unpack data.
at = x[:self.dim_tot]
bt = x[self.dim_tot:]
# Preallocate the return arrays.
gLa = np.zeros(self.dim_n)
gLb = np.zeros(self.dim_n)
else:
# Unpack data.
at = x[:self.dim_tot].reshape(self.dim_n, self.dim_d, self.dim_d)
bt = x[self.dim_tot:].reshape(self.dim_n, self.dim_d)
# Preallocate the return arrays.
gLa = np.zeros((self.dim_n, self.dim_d, self.dim_d))
gLb = np.zeros((self.dim_n, self.dim_d))
# _end_if_
# Posterior moments: m(t), S(t).
mt = self.output["mt"]
st = self.output["st"]
# Lagrange multipliers: lam(t), psi(t).
lamt = self.output["lamt"]
psit = self.output["psit"]
# Expectation values.
Efx = self.output["Efx"]
Edf = self.output["Edf"]
# Inverse of Sigma noise.
inv_sigma = self.model.inverse_sigma
# Main loop.
for k in range(self.dim_n):
# Get the values at time 'tk'.
ak = at[k]
sk = st[k]
mk = mt[k]
lamk = lamt[k]
# Gradient of Esde w.r.t. 'b' -Eq(29)-
dEsde_dbt = self._dEsde_db(inv_sigma, Efx[k], mk, ak, bt[k])
# Gradient of Esde w.r.t. 'A' -Eq(28)-
dEsde_dat = self._dEsde_da(inv_sigma, ak, mk, sk, Edf[k], dEsde_dbt)
# Gradient of Lagrangian w.r.t. 'a(t)' -Eq(12)-
gLa[k] = self._grad_at(dEsde_dat, lamk, mk, psit[k], sk)
# Gradient of Lagrangian w.r.t. 'b(t)' -Eq(13)-
gLb[k] = dEsde_dbt + lamk
# _end_for_
# Scale the results with the time increment.
gLa = self.dt * gLa
gLb = self.dt * gLb
# Group the gradients together and exit.
return np.concatenate((gLa.flatten(), gLb.flatten()))
# _end_def_
@property
def arg_out(self):
"""
Accessor for the output dictionary.
:return: the output dictionary.
"""
return self.output
# _end_def_
def _grad_at(self, dEsde_dak, lamk, mk, psik, sk):
"""
Auxiliary function. Return automatically
the 1D or nD version of the calculation.
"""
if self.model.single_dim:
return dEsde_dak - (lamk * mk) - (2.0 * psik * sk)
else:
return dEsde_dak - np.outer(lamk, mk) - 2.0 * psik.dot(sk)
# _end_if_
# _end_def_
def _dEsde_da(self, inv_sigma, at, mt, st, Edf, dEsde_dbt):
"""
Auxiliary function. Return automatically
the 1D or nD version of the calculation.
"""
if self.model.single_dim:
return inv_sigma * (Edf + at) * st - (dEsde_dbt * mt)
else:
return inv_sigma.dot(Edf + at).dot(st) - np.outer(dEsde_dbt, mt)
# _end_if_
# _end_def_
def _dEsde_db(self, inv_sigma, Efx, mt, at, bt):
"""
Auxiliary function. Return automatically
the 1D or nD version of the calculation.
"""
if self.model.single_dim:
return inv_sigma * (-Efx - (at * mt) + bt)
else:
return inv_sigma.dot(-Efx - at.dot(mt) + bt)
# _end_if_
# _end_def_
# _end_class_
# Auxiliary function.
def grad_Esde_dm_ds(x, fun, mt, st, at, bt, diag_inv_sigma):
"""
Returns the gradient of the -SDE- energy function with respect
to the marginal means and variances. This method is used when
the analytic expressions for the gradients are difficult to be
computed, hence we use approximations such as the unscented
transformation.
:param x: input state samples (dim_n x dim_d).
:param fun: drift function.
:param mt: marginal mean at time 't' (dim_d).
:param st: marginal covar. at time 't' (dim_d x dim_d).
:param at: linear parameter (dim_d x dim_d).
:param bt: offset parameter (dim_d).
:param diag_inv_sigma: diagonal elements of inverse system
noise (dim_d)
:return: gradients w.r.t. to 'mt' and 'st' with dimensions:
[dim_n x dim_d * (dim_d + 1)].
"""
# Get the dimensions of the input array.
dim_n, dim_d = x.shape
# Preallocate array: [dim_n x dim_d^2].
dst = np.zeros((dim_n, dim_d * dim_d))
# Compute auxiliary quantity:
x_mat = (fun(x) + x.dot(at.T) - np.tile(bt, (dim_n, 1))) ** 2
var = diag_inv_sigma.dot(x_mat.T)
# Gradient w.r.t. 'mt': [dim_n x dim_d]
dmt = np.linalg.solve(st, (np.tile(var, (dim_d, 1)) * x.T)).T
# Inverse of marginal covariance.
inv_st, _ = chol_inv(st)
# Calculate the gradients w.r.t. 'st'.
for k in range(dim_n):
# Take the values at sample 'k'.
zt = x[k] - mt
# Square matrix.
zk = np.outer(zt, zt)
# Gradient w.r.t. 'st'.
dst[k] = var[k] * np.linalg.solve(st, zk).dot(inv_st).ravel()
# _end_for_
# Scale the results.
dmt = 0.5 * dmt
dst = 0.5 * dst
# Group the gradients together and exit.
return np.concatenate((dmt, dst), axis=1)
# _end_def_
|
vrettasm/VGPA
|
code/src/variational.py
|
Python
|
gpl-3.0
| 12,977
|
[
"Gaussian"
] |
a66b062030143715bbe5dfa09d012854f9fa69f205f504b8967b84e361721e87
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from pyspark import since, keyword_only
from pyspark.ml.param.shared import *
from pyspark.ml.util import *
from pyspark.ml.wrapper import JavaEstimator, JavaModel, JavaWrapper
from pyspark.ml.common import inherit_doc
from pyspark.sql import DataFrame
__all__ = ['AFTSurvivalRegression', 'AFTSurvivalRegressionModel',
'DecisionTreeRegressor', 'DecisionTreeRegressionModel',
'GBTRegressor', 'GBTRegressionModel',
'GeneralizedLinearRegression', 'GeneralizedLinearRegressionModel',
'GeneralizedLinearRegressionSummary', 'GeneralizedLinearRegressionTrainingSummary',
'IsotonicRegression', 'IsotonicRegressionModel',
'LinearRegression', 'LinearRegressionModel',
'LinearRegressionSummary', 'LinearRegressionTrainingSummary',
'RandomForestRegressor', 'RandomForestRegressionModel']
@inherit_doc
class LinearRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasMaxIter,
HasRegParam, HasTol, HasElasticNetParam, HasFitIntercept,
HasStandardization, HasSolver, HasWeightCol, HasAggregationDepth,
JavaMLWritable, JavaMLReadable):
"""
Linear regression.
The learning objective is to minimize the squared error, with regularization.
The specific squared error loss function used is: L = 1/2n ||A coefficients - y||^2^
This supports multiple types of regularization:
* none (a.k.a. ordinary least squares)
* L2 (ridge regression)
* L1 (Lasso)
* L2 + L1 (elastic net)
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, 2.0, Vectors.dense(1.0)),
... (0.0, 2.0, Vectors.sparse(1, [], []))], ["label", "weight", "features"])
>>> lr = LinearRegression(maxIter=5, regParam=0.0, solver="normal", weightCol="weight")
>>> model = lr.fit(df)
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> abs(model.transform(test0).head().prediction - (-1.0)) < 0.001
True
>>> abs(model.coefficients[0] - 1.0) < 0.001
True
>>> abs(model.intercept - 0.0) < 0.001
True
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> abs(model.transform(test1).head().prediction - 1.0) < 0.001
True
>>> lr.setParams("vector")
Traceback (most recent call last):
...
TypeError: Method setParams forces keyword arguments.
>>> lr_path = temp_path + "/lr"
>>> lr.save(lr_path)
>>> lr2 = LinearRegression.load(lr_path)
>>> lr2.getMaxIter()
5
>>> model_path = temp_path + "/lr_model"
>>> model.save(model_path)
>>> model2 = LinearRegressionModel.load(model_path)
>>> model.coefficients[0] == model2.coefficients[0]
True
>>> model.intercept == model2.intercept
True
>>> model.numFeatures
1
.. versionadded:: 1.4.0
"""
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True,
standardization=True, solver="auto", weightCol=None, aggregationDepth=2):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, \
standardization=True, solver="auto", weightCol=None, aggregationDepth=2)
"""
super(LinearRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.LinearRegression", self.uid)
self._setDefault(maxIter=100, regParam=0.0, tol=1e-6)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True,
standardization=True, solver="auto", weightCol=None, aggregationDepth=2):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxIter=100, regParam=0.0, elasticNetParam=0.0, tol=1e-6, fitIntercept=True, \
standardization=True, solver="auto", weightCol=None, aggregationDepth=2)
Sets params for linear regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return LinearRegressionModel(java_model)
class LinearRegressionModel(JavaModel, JavaPredictionModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`LinearRegression`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def coefficients(self):
"""
Model coefficients.
"""
return self._call_java("coefficients")
@property
@since("1.4.0")
def intercept(self):
"""
Model intercept.
"""
return self._call_java("intercept")
@property
@since("2.0.0")
def summary(self):
"""
Gets summary (e.g. residuals, mse, r-squared ) of model on
training set. An exception is thrown if
`trainingSummary is None`.
"""
if self.hasSummary:
java_lrt_summary = self._call_java("summary")
return LinearRegressionTrainingSummary(java_lrt_summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@property
@since("2.0.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model
instance.
"""
return self._call_java("hasSummary")
@since("2.0.0")
def evaluate(self, dataset):
"""
Evaluates the model on a test dataset.
:param dataset:
Test dataset to evaluate model on, where dataset is an
instance of :py:class:`pyspark.sql.DataFrame`
"""
if not isinstance(dataset, DataFrame):
raise ValueError("dataset must be a DataFrame but got %s." % type(dataset))
java_lr_summary = self._call_java("evaluate", dataset)
return LinearRegressionSummary(java_lr_summary)
class LinearRegressionSummary(JavaWrapper):
"""
.. note:: Experimental
Linear regression results evaluated on a dataset.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def predictions(self):
"""
Dataframe outputted by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.0.0")
def predictionCol(self):
"""
Field in "predictions" which gives the predicted value of
the label at each instance.
"""
return self._call_java("predictionCol")
@property
@since("2.0.0")
def labelCol(self):
"""
Field in "predictions" which gives the true label of each
instance.
"""
return self._call_java("labelCol")
@property
@since("2.0.0")
def featuresCol(self):
"""
Field in "predictions" which gives the features of each instance
as a vector.
"""
return self._call_java("featuresCol")
@property
@since("2.0.0")
def explainedVariance(self):
"""
Returns the explained variance regression score.
explainedVariance = 1 - variance(y - \hat{y}) / variance(y)
.. seealso:: `Wikipedia explain variation \
<http://en.wikipedia.org/wiki/Explained_variation>`_
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("explainedVariance")
@property
@since("2.0.0")
def meanAbsoluteError(self):
"""
Returns the mean absolute error, which is a risk function
corresponding to the expected value of the absolute error
loss or l1-norm loss.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("meanAbsoluteError")
@property
@since("2.0.0")
def meanSquaredError(self):
"""
Returns the mean squared error, which is a risk function
corresponding to the expected value of the squared error
loss or quadratic loss.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("meanSquaredError")
@property
@since("2.0.0")
def rootMeanSquaredError(self):
"""
Returns the root mean squared error, which is defined as the
square root of the mean squared error.
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("rootMeanSquaredError")
@property
@since("2.0.0")
def r2(self):
"""
Returns R^2^, the coefficient of determination.
.. seealso:: `Wikipedia coefficient of determination \
<http://en.wikipedia.org/wiki/Coefficient_of_determination>`
.. note:: This ignores instance weights (setting all to 1.0) from
`LinearRegression.weightCol`. This will change in later Spark
versions.
"""
return self._call_java("r2")
@property
@since("2.0.0")
def residuals(self):
"""
Residuals (label - predicted value)
"""
return self._call_java("residuals")
@property
@since("2.0.0")
def numInstances(self):
"""
Number of instances in DataFrame predictions
"""
return self._call_java("numInstances")
@property
@since("2.0.0")
def devianceResiduals(self):
"""
The weighted residuals, the usual residuals rescaled by the
square root of the instance weights.
"""
return self._call_java("devianceResiduals")
@property
@since("2.0.0")
def coefficientStandardErrors(self):
"""
Standard error of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("coefficientStandardErrors")
@property
@since("2.0.0")
def tValues(self):
"""
T-statistic of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("tValues")
@property
@since("2.0.0")
def pValues(self):
"""
Two-sided p-value of estimated coefficients and intercept.
This value is only available when using the "normal" solver.
If :py:attr:`LinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("pValues")
@inherit_doc
class LinearRegressionTrainingSummary(LinearRegressionSummary):
"""
.. note:: Experimental
Linear regression training results. Currently, the training summary ignores the
training weights except for the objective trace.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def objectiveHistory(self):
"""
Objective function (scaled loss + regularization) at each
iteration.
This value is only available when using the "l-bfgs" solver.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("objectiveHistory")
@property
@since("2.0.0")
def totalIterations(self):
"""
Number of training iterations until termination.
This value is only available when using the "l-bfgs" solver.
.. seealso:: :py:attr:`LinearRegression.solver`
"""
return self._call_java("totalIterations")
@inherit_doc
class IsotonicRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol,
HasWeightCol, JavaMLWritable, JavaMLReadable):
"""
Currently implemented using parallelized pool adjacent violators algorithm.
Only univariate (single feature) algorithm supported.
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> ir = IsotonicRegression()
>>> model = ir.fit(df)
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> model.boundaries
DenseVector([0.0, 1.0])
>>> ir_path = temp_path + "/ir"
>>> ir.save(ir_path)
>>> ir2 = IsotonicRegression.load(ir_path)
>>> ir2.getIsotonic()
True
>>> model_path = temp_path + "/ir_model"
>>> model.save(model_path)
>>> model2 = IsotonicRegressionModel.load(model_path)
>>> model.boundaries == model2.boundaries
True
>>> model.predictions == model2.predictions
True
.. versionadded:: 1.6.0
"""
isotonic = \
Param(Params._dummy(), "isotonic",
"whether the output sequence should be isotonic/increasing (true) or" +
"antitonic/decreasing (false).", typeConverter=TypeConverters.toBoolean)
featureIndex = \
Param(Params._dummy(), "featureIndex",
"The index of the feature if featuresCol is a vector column, no effect otherwise.",
typeConverter=TypeConverters.toInt)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
weightCol=None, isotonic=True, featureIndex=0):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
weightCol=None, isotonic=True, featureIndex=0):
"""
super(IsotonicRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.IsotonicRegression", self.uid)
self._setDefault(isotonic=True, featureIndex=0)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
weightCol=None, isotonic=True, featureIndex=0):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
weightCol=None, isotonic=True, featureIndex=0):
Set the params for IsotonicRegression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return IsotonicRegressionModel(java_model)
def setIsotonic(self, value):
"""
Sets the value of :py:attr:`isotonic`.
"""
return self._set(isotonic=value)
def getIsotonic(self):
"""
Gets the value of isotonic or its default value.
"""
return self.getOrDefault(self.isotonic)
def setFeatureIndex(self, value):
"""
Sets the value of :py:attr:`featureIndex`.
"""
return self._set(featureIndex=value)
def getFeatureIndex(self):
"""
Gets the value of featureIndex or its default value.
"""
return self.getOrDefault(self.featureIndex)
class IsotonicRegressionModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`IsotonicRegression`.
.. versionadded:: 1.6.0
"""
@property
@since("1.6.0")
def boundaries(self):
"""
Boundaries in increasing order for which predictions are known.
"""
return self._call_java("boundaries")
@property
@since("1.6.0")
def predictions(self):
"""
Predictions associated with the boundaries at the same index, monotone because of isotonic
regression.
"""
return self._call_java("predictions")
class TreeEnsembleParams(DecisionTreeParams):
"""
Mixin for Decision Tree-based ensemble algorithms parameters.
"""
subsamplingRate = Param(Params._dummy(), "subsamplingRate", "Fraction of the training data " +
"used for learning each decision tree, in range (0, 1].",
typeConverter=TypeConverters.toFloat)
def __init__(self):
super(TreeEnsembleParams, self).__init__()
@since("1.4.0")
def setSubsamplingRate(self, value):
"""
Sets the value of :py:attr:`subsamplingRate`.
"""
return self._set(subsamplingRate=value)
@since("1.4.0")
def getSubsamplingRate(self):
"""
Gets the value of subsamplingRate or its default value.
"""
return self.getOrDefault(self.subsamplingRate)
class TreeRegressorParams(Params):
"""
Private class to track supported impurity measures.
"""
supportedImpurities = ["variance"]
impurity = Param(Params._dummy(), "impurity",
"Criterion used for information gain calculation (case-insensitive). " +
"Supported options: " +
", ".join(supportedImpurities), typeConverter=TypeConverters.toString)
def __init__(self):
super(TreeRegressorParams, self).__init__()
@since("1.4.0")
def setImpurity(self, value):
"""
Sets the value of :py:attr:`impurity`.
"""
return self._set(impurity=value)
@since("1.4.0")
def getImpurity(self):
"""
Gets the value of impurity or its default value.
"""
return self.getOrDefault(self.impurity)
class RandomForestParams(TreeEnsembleParams):
"""
Private class to track supported random forest parameters.
"""
supportedFeatureSubsetStrategies = ["auto", "all", "onethird", "sqrt", "log2"]
numTrees = Param(Params._dummy(), "numTrees", "Number of trees to train (>= 1).",
typeConverter=TypeConverters.toInt)
featureSubsetStrategy = \
Param(Params._dummy(), "featureSubsetStrategy",
"The number of features to consider for splits at each tree node. Supported " +
"options: " + ", ".join(supportedFeatureSubsetStrategies) + ", (0.0-1.0], [1-n].",
typeConverter=TypeConverters.toString)
def __init__(self):
super(RandomForestParams, self).__init__()
@since("1.4.0")
def setNumTrees(self, value):
"""
Sets the value of :py:attr:`numTrees`.
"""
return self._set(numTrees=value)
@since("1.4.0")
def getNumTrees(self):
"""
Gets the value of numTrees or its default value.
"""
return self.getOrDefault(self.numTrees)
@since("1.4.0")
def setFeatureSubsetStrategy(self, value):
"""
Sets the value of :py:attr:`featureSubsetStrategy`.
"""
return self._set(featureSubsetStrategy=value)
@since("1.4.0")
def getFeatureSubsetStrategy(self):
"""
Gets the value of featureSubsetStrategy or its default value.
"""
return self.getOrDefault(self.featureSubsetStrategy)
class GBTParams(TreeEnsembleParams):
"""
Private class to track supported GBT params.
"""
supportedLossTypes = ["squared", "absolute"]
@inherit_doc
class DecisionTreeRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol,
DecisionTreeParams, TreeRegressorParams, HasCheckpointInterval,
HasSeed, JavaMLWritable, JavaMLReadable, HasVarianceCol):
"""
`Decision tree <http://en.wikipedia.org/wiki/Decision_tree_learning>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> dt = DecisionTreeRegressor(maxDepth=2, varianceCol="variance")
>>> model = dt.fit(df)
>>> model.depth
1
>>> model.numNodes
3
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> model.numFeatures
1
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
1.0
>>> dtr_path = temp_path + "/dtr"
>>> dt.save(dtr_path)
>>> dt2 = DecisionTreeRegressor.load(dtr_path)
>>> dt2.getMaxDepth()
2
>>> model_path = temp_path + "/dtr_model"
>>> model.save(model_path)
>>> model2 = DecisionTreeRegressionModel.load(model_path)
>>> model.numNodes == model2.numNodes
True
>>> model.depth == model2.depth
True
>>> model.transform(test1).head().variance
0.0
.. versionadded:: 1.4.0
"""
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, impurity="variance",
seed=None, varianceCol=None):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", seed=None, varianceCol=None)
"""
super(DecisionTreeRegressor, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.DecisionTreeRegressor", self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", seed=None, varianceCol=None):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", seed=None, varianceCol=None)
Sets params for the DecisionTreeRegressor.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return DecisionTreeRegressionModel(java_model)
@inherit_doc
class DecisionTreeModel(JavaModel, JavaPredictionModel):
"""
Abstraction for Decision Tree models.
.. versionadded:: 1.5.0
"""
@property
@since("1.5.0")
def numNodes(self):
"""Return number of nodes of the decision tree."""
return self._call_java("numNodes")
@property
@since("1.5.0")
def depth(self):
"""Return depth of the decision tree."""
return self._call_java("depth")
@property
@since("2.0.0")
def toDebugString(self):
"""Full description of model."""
return self._call_java("toDebugString")
def __repr__(self):
return self._call_java("toString")
@inherit_doc
class TreeEnsembleModel(JavaModel):
"""
(private abstraction)
Represents a tree ensemble model.
"""
@property
@since("2.0.0")
def trees(self):
"""Trees in this ensemble. Warning: These have null parent Estimators."""
return [DecisionTreeModel(m) for m in list(self._call_java("trees"))]
@property
@since("2.0.0")
def getNumTrees(self):
"""Number of trees in ensemble."""
return self._call_java("getNumTrees")
@property
@since("1.5.0")
def treeWeights(self):
"""Return the weights for each tree"""
return list(self._call_java("javaTreeWeights"))
@property
@since("2.0.0")
def totalNumNodes(self):
"""Total number of nodes, summed over all trees in the ensemble."""
return self._call_java("totalNumNodes")
@property
@since("2.0.0")
def toDebugString(self):
"""Full description of model."""
return self._call_java("toDebugString")
def __repr__(self):
return self._call_java("toString")
@inherit_doc
class DecisionTreeRegressionModel(DecisionTreeModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`DecisionTreeRegressor`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def featureImportances(self):
"""
Estimate of the importance of each feature.
This generalizes the idea of "Gini" importance to other losses,
following the explanation of Gini importance from "Random Forests" documentation
by Leo Breiman and Adele Cutler, and following the implementation from scikit-learn.
This feature importance is calculated as follows:
- importance(feature j) = sum (over nodes which split on feature j) of the gain,
where gain is scaled by the number of instances passing through node
- Normalize importances for tree to sum to 1.
.. note:: Feature importance for single decision trees can have high variance due to
correlated predictor variables. Consider using a :py:class:`RandomForestRegressor`
to determine feature importance instead.
"""
return self._call_java("featureImportances")
@inherit_doc
class RandomForestRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasSeed,
RandomForestParams, TreeRegressorParams, HasCheckpointInterval,
JavaMLWritable, JavaMLReadable):
"""
`Random Forest <http://en.wikipedia.org/wiki/Random_forest>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from numpy import allclose
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> rf = RandomForestRegressor(numTrees=2, maxDepth=2, seed=42)
>>> model = rf.fit(df)
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> allclose(model.treeWeights, [1.0, 1.0])
True
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> model.numFeatures
1
>>> model.trees
[DecisionTreeRegressionModel (uid=...) of depth..., DecisionTreeRegressionModel...]
>>> model.getNumTrees
2
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
0.5
>>> rfr_path = temp_path + "/rfr"
>>> rf.save(rfr_path)
>>> rf2 = RandomForestRegressor.load(rfr_path)
>>> rf2.getNumTrees()
2
>>> model_path = temp_path + "/rfr_model"
>>> model.save(model_path)
>>> model2 = RandomForestRegressionModel.load(model_path)
>>> model.featureImportances == model2.featureImportances
True
.. versionadded:: 1.4.0
"""
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20,
featureSubsetStrategy="auto"):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20, \
featureSubsetStrategy="auto")
"""
super(RandomForestRegressor, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.RandomForestRegressor", self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", subsamplingRate=1.0, numTrees=20,
featureSubsetStrategy="auto")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10,
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20,
featureSubsetStrategy="auto"):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, checkpointInterval=10, \
impurity="variance", subsamplingRate=1.0, seed=None, numTrees=20, \
featureSubsetStrategy="auto")
Sets params for linear regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return RandomForestRegressionModel(java_model)
class RandomForestRegressionModel(TreeEnsembleModel, JavaPredictionModel, JavaMLWritable,
JavaMLReadable):
"""
Model fitted by :class:`RandomForestRegressor`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def trees(self):
"""Trees in this ensemble. Warning: These have null parent Estimators."""
return [DecisionTreeRegressionModel(m) for m in list(self._call_java("trees"))]
@property
@since("2.0.0")
def featureImportances(self):
"""
Estimate of the importance of each feature.
Each feature's importance is the average of its importance across all trees in the ensemble
The importance vector is normalized to sum to 1. This method is suggested by Hastie et al.
(Hastie, Tibshirani, Friedman. "The Elements of Statistical Learning, 2nd Edition." 2001.)
and follows the implementation from scikit-learn.
.. seealso:: :py:attr:`DecisionTreeRegressionModel.featureImportances`
"""
return self._call_java("featureImportances")
@inherit_doc
class GBTRegressor(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol, HasMaxIter,
GBTParams, HasCheckpointInterval, HasStepSize, HasSeed, JavaMLWritable,
JavaMLReadable, TreeRegressorParams):
"""
`Gradient-Boosted Trees (GBTs) <http://en.wikipedia.org/wiki/Gradient_boosting>`_
learning algorithm for regression.
It supports both continuous and categorical features.
>>> from numpy import allclose
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0)),
... (0.0, Vectors.sparse(1, [], []))], ["label", "features"])
>>> gbt = GBTRegressor(maxIter=5, maxDepth=2, seed=42)
>>> print(gbt.getImpurity())
variance
>>> model = gbt.fit(df)
>>> model.featureImportances
SparseVector(1, {0: 1.0})
>>> model.numFeatures
1
>>> allclose(model.treeWeights, [1.0, 0.1, 0.1, 0.1, 0.1])
True
>>> test0 = spark.createDataFrame([(Vectors.dense(-1.0),)], ["features"])
>>> model.transform(test0).head().prediction
0.0
>>> test1 = spark.createDataFrame([(Vectors.sparse(1, [0], [1.0]),)], ["features"])
>>> model.transform(test1).head().prediction
1.0
>>> gbtr_path = temp_path + "gbtr"
>>> gbt.save(gbtr_path)
>>> gbt2 = GBTRegressor.load(gbtr_path)
>>> gbt2.getMaxDepth()
2
>>> model_path = temp_path + "gbtr_model"
>>> model.save(model_path)
>>> model2 = GBTRegressionModel.load(model_path)
>>> model.featureImportances == model2.featureImportances
True
>>> model.treeWeights == model2.treeWeights
True
>>> model.trees
[DecisionTreeRegressionModel (uid=...) of depth..., DecisionTreeRegressionModel...]
.. versionadded:: 1.4.0
"""
lossType = Param(Params._dummy(), "lossType",
"Loss function which GBT tries to minimize (case-insensitive). " +
"Supported options: " + ", ".join(GBTParams.supportedLossTypes),
typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0,
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None,
impurity="variance"):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0, \
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None, \
impurity="variance")
"""
super(GBTRegressor, self).__init__()
self._java_obj = self._new_java_obj("org.apache.spark.ml.regression.GBTRegressor", self.uid)
self._setDefault(maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0,
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1,
impurity="variance")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.4.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0,
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0,
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None,
impuriy="variance"):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
maxDepth=5, maxBins=32, minInstancesPerNode=1, minInfoGain=0.0, \
maxMemoryInMB=256, cacheNodeIds=False, subsamplingRate=1.0, \
checkpointInterval=10, lossType="squared", maxIter=20, stepSize=0.1, seed=None, \
impurity="variance")
Sets params for Gradient Boosted Tree Regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return GBTRegressionModel(java_model)
@since("1.4.0")
def setLossType(self, value):
"""
Sets the value of :py:attr:`lossType`.
"""
return self._set(lossType=value)
@since("1.4.0")
def getLossType(self):
"""
Gets the value of lossType or its default value.
"""
return self.getOrDefault(self.lossType)
class GBTRegressionModel(TreeEnsembleModel, JavaPredictionModel, JavaMLWritable, JavaMLReadable):
"""
Model fitted by :class:`GBTRegressor`.
.. versionadded:: 1.4.0
"""
@property
@since("2.0.0")
def featureImportances(self):
"""
Estimate of the importance of each feature.
Each feature's importance is the average of its importance across all trees in the ensemble
The importance vector is normalized to sum to 1. This method is suggested by Hastie et al.
(Hastie, Tibshirani, Friedman. "The Elements of Statistical Learning, 2nd Edition." 2001.)
and follows the implementation from scikit-learn.
.. seealso:: :py:attr:`DecisionTreeRegressionModel.featureImportances`
"""
return self._call_java("featureImportances")
@property
@since("2.0.0")
def trees(self):
"""Trees in this ensemble. Warning: These have null parent Estimators."""
return [DecisionTreeRegressionModel(m) for m in list(self._call_java("trees"))]
@inherit_doc
class AFTSurvivalRegression(JavaEstimator, HasFeaturesCol, HasLabelCol, HasPredictionCol,
HasFitIntercept, HasMaxIter, HasTol, HasAggregationDepth,
JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental
Accelerated Failure Time (AFT) Model Survival Regression
Fit a parametric AFT survival regression model based on the Weibull distribution
of the survival time.
.. seealso:: `AFT Model <https://en.wikipedia.org/wiki/Accelerated_failure_time_model>`_
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(1.0), 1.0),
... (1e-40, Vectors.sparse(1, [], []), 0.0)], ["label", "features", "censor"])
>>> aftsr = AFTSurvivalRegression()
>>> model = aftsr.fit(df)
>>> model.predict(Vectors.dense(6.3))
1.0
>>> model.predictQuantiles(Vectors.dense(6.3))
DenseVector([0.0101, 0.0513, 0.1054, 0.2877, 0.6931, 1.3863, 2.3026, 2.9957, 4.6052])
>>> model.transform(df).show()
+-------+---------+------+----------+
| label| features|censor|prediction|
+-------+---------+------+----------+
| 1.0| [1.0]| 1.0| 1.0|
|1.0E-40|(1,[],[])| 0.0| 1.0|
+-------+---------+------+----------+
...
>>> aftsr_path = temp_path + "/aftsr"
>>> aftsr.save(aftsr_path)
>>> aftsr2 = AFTSurvivalRegression.load(aftsr_path)
>>> aftsr2.getMaxIter()
100
>>> model_path = temp_path + "/aftsr_model"
>>> model.save(model_path)
>>> model2 = AFTSurvivalRegressionModel.load(model_path)
>>> model.coefficients == model2.coefficients
True
>>> model.intercept == model2.intercept
True
>>> model.scale == model2.scale
True
.. versionadded:: 1.6.0
"""
censorCol = Param(Params._dummy(), "censorCol",
"censor column name. The value of this column could be 0 or 1. " +
"If the value is 1, it means the event has occurred i.e. " +
"uncensored; otherwise censored.", typeConverter=TypeConverters.toString)
quantileProbabilities = \
Param(Params._dummy(), "quantileProbabilities",
"quantile probabilities array. Values of the quantile probabilities array " +
"should be in the range (0, 1) and the array should be non-empty.",
typeConverter=TypeConverters.toListFloat)
quantilesCol = Param(Params._dummy(), "quantilesCol",
"quantiles column name. This column will output quantiles of " +
"corresponding quantileProbabilities if it is set.",
typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, featuresCol="features", labelCol="label", predictionCol="prediction",
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor",
quantileProbabilities=list([0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99]),
quantilesCol=None, aggregationDepth=2):
"""
__init__(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor", \
quantileProbabilities=[0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99], \
quantilesCol=None, aggregationDepth=2)
"""
super(AFTSurvivalRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.AFTSurvivalRegression", self.uid)
self._setDefault(censorCol="censor",
quantileProbabilities=[0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99],
maxIter=100, tol=1E-6)
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("1.6.0")
def setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction",
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor",
quantileProbabilities=list([0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99]),
quantilesCol=None, aggregationDepth=2):
"""
setParams(self, featuresCol="features", labelCol="label", predictionCol="prediction", \
fitIntercept=True, maxIter=100, tol=1E-6, censorCol="censor", \
quantileProbabilities=[0.01, 0.05, 0.1, 0.25, 0.5, 0.75, 0.9, 0.95, 0.99], \
quantilesCol=None, aggregationDepth=2):
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return AFTSurvivalRegressionModel(java_model)
@since("1.6.0")
def setCensorCol(self, value):
"""
Sets the value of :py:attr:`censorCol`.
"""
return self._set(censorCol=value)
@since("1.6.0")
def getCensorCol(self):
"""
Gets the value of censorCol or its default value.
"""
return self.getOrDefault(self.censorCol)
@since("1.6.0")
def setQuantileProbabilities(self, value):
"""
Sets the value of :py:attr:`quantileProbabilities`.
"""
return self._set(quantileProbabilities=value)
@since("1.6.0")
def getQuantileProbabilities(self):
"""
Gets the value of quantileProbabilities or its default value.
"""
return self.getOrDefault(self.quantileProbabilities)
@since("1.6.0")
def setQuantilesCol(self, value):
"""
Sets the value of :py:attr:`quantilesCol`.
"""
return self._set(quantilesCol=value)
@since("1.6.0")
def getQuantilesCol(self):
"""
Gets the value of quantilesCol or its default value.
"""
return self.getOrDefault(self.quantilesCol)
class AFTSurvivalRegressionModel(JavaModel, JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental
Model fitted by :class:`AFTSurvivalRegression`.
.. versionadded:: 1.6.0
"""
@property
@since("2.0.0")
def coefficients(self):
"""
Model coefficients.
"""
return self._call_java("coefficients")
@property
@since("1.6.0")
def intercept(self):
"""
Model intercept.
"""
return self._call_java("intercept")
@property
@since("1.6.0")
def scale(self):
"""
Model scale paramter.
"""
return self._call_java("scale")
@since("2.0.0")
def predictQuantiles(self, features):
"""
Predicted Quantiles
"""
return self._call_java("predictQuantiles", features)
@since("2.0.0")
def predict(self, features):
"""
Predicted value
"""
return self._call_java("predict", features)
@inherit_doc
class GeneralizedLinearRegression(JavaEstimator, HasLabelCol, HasFeaturesCol, HasPredictionCol,
HasFitIntercept, HasMaxIter, HasTol, HasRegParam, HasWeightCol,
HasSolver, JavaMLWritable, JavaMLReadable):
"""
.. note:: Experimental
Generalized Linear Regression.
Fit a Generalized Linear Model specified by giving a symbolic description of the linear
predictor (link function) and a description of the error distribution (family). It supports
"gaussian", "binomial", "poisson" and "gamma" as family. Valid link functions for each family
is listed below. The first link function of each family is the default one.
* "gaussian" -> "identity", "log", "inverse"
* "binomial" -> "logit", "probit", "cloglog"
* "poisson" -> "log", "identity", "sqrt"
* "gamma" -> "inverse", "identity", "log"
.. seealso:: `GLM <https://en.wikipedia.org/wiki/Generalized_linear_model>`_
>>> from pyspark.ml.linalg import Vectors
>>> df = spark.createDataFrame([
... (1.0, Vectors.dense(0.0, 0.0)),
... (1.0, Vectors.dense(1.0, 2.0)),
... (2.0, Vectors.dense(0.0, 0.0)),
... (2.0, Vectors.dense(1.0, 1.0)),], ["label", "features"])
>>> glr = GeneralizedLinearRegression(family="gaussian", link="identity", linkPredictionCol="p")
>>> model = glr.fit(df)
>>> transformed = model.transform(df)
>>> abs(transformed.head().prediction - 1.5) < 0.001
True
>>> abs(transformed.head().p - 1.5) < 0.001
True
>>> model.coefficients
DenseVector([1.5..., -1.0...])
>>> model.numFeatures
2
>>> abs(model.intercept - 1.5) < 0.001
True
>>> glr_path = temp_path + "/glr"
>>> glr.save(glr_path)
>>> glr2 = GeneralizedLinearRegression.load(glr_path)
>>> glr.getFamily() == glr2.getFamily()
True
>>> model_path = temp_path + "/glr_model"
>>> model.save(model_path)
>>> model2 = GeneralizedLinearRegressionModel.load(model_path)
>>> model.intercept == model2.intercept
True
>>> model.coefficients[0] == model2.coefficients[0]
True
.. versionadded:: 2.0.0
"""
family = Param(Params._dummy(), "family", "The name of family which is a description of " +
"the error distribution to be used in the model. Supported options: " +
"gaussian (default), binomial, poisson and gamma.",
typeConverter=TypeConverters.toString)
link = Param(Params._dummy(), "link", "The name of link function which provides the " +
"relationship between the linear predictor and the mean of the distribution " +
"function. Supported options: identity, log, inverse, logit, probit, cloglog " +
"and sqrt.", typeConverter=TypeConverters.toString)
linkPredictionCol = Param(Params._dummy(), "linkPredictionCol", "link prediction (linear " +
"predictor) column name", typeConverter=TypeConverters.toString)
@keyword_only
def __init__(self, labelCol="label", featuresCol="features", predictionCol="prediction",
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6,
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None):
"""
__init__(self, labelCol="label", featuresCol="features", predictionCol="prediction", \
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6, \
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None)
"""
super(GeneralizedLinearRegression, self).__init__()
self._java_obj = self._new_java_obj(
"org.apache.spark.ml.regression.GeneralizedLinearRegression", self.uid)
self._setDefault(family="gaussian", maxIter=25, tol=1e-6, regParam=0.0, solver="irls")
kwargs = self._input_kwargs
self.setParams(**kwargs)
@keyword_only
@since("2.0.0")
def setParams(self, labelCol="label", featuresCol="features", predictionCol="prediction",
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6,
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None):
"""
setParams(self, labelCol="label", featuresCol="features", predictionCol="prediction", \
family="gaussian", link=None, fitIntercept=True, maxIter=25, tol=1e-6, \
regParam=0.0, weightCol=None, solver="irls", linkPredictionCol=None)
Sets params for generalized linear regression.
"""
kwargs = self._input_kwargs
return self._set(**kwargs)
def _create_model(self, java_model):
return GeneralizedLinearRegressionModel(java_model)
@since("2.0.0")
def setFamily(self, value):
"""
Sets the value of :py:attr:`family`.
"""
return self._set(family=value)
@since("2.0.0")
def getFamily(self):
"""
Gets the value of family or its default value.
"""
return self.getOrDefault(self.family)
@since("2.0.0")
def setLinkPredictionCol(self, value):
"""
Sets the value of :py:attr:`linkPredictionCol`.
"""
return self._set(linkPredictionCol=value)
@since("2.0.0")
def getLinkPredictionCol(self):
"""
Gets the value of linkPredictionCol or its default value.
"""
return self.getOrDefault(self.linkPredictionCol)
@since("2.0.0")
def setLink(self, value):
"""
Sets the value of :py:attr:`link`.
"""
return self._set(link=value)
@since("2.0.0")
def getLink(self):
"""
Gets the value of link or its default value.
"""
return self.getOrDefault(self.link)
class GeneralizedLinearRegressionModel(JavaModel, JavaPredictionModel, JavaMLWritable,
JavaMLReadable):
"""
.. note:: Experimental
Model fitted by :class:`GeneralizedLinearRegression`.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def coefficients(self):
"""
Model coefficients.
"""
return self._call_java("coefficients")
@property
@since("2.0.0")
def intercept(self):
"""
Model intercept.
"""
return self._call_java("intercept")
@property
@since("2.0.0")
def summary(self):
"""
Gets summary (e.g. residuals, deviance, pValues) of model on
training set. An exception is thrown if
`trainingSummary is None`.
"""
if self.hasSummary:
java_glrt_summary = self._call_java("summary")
return GeneralizedLinearRegressionTrainingSummary(java_glrt_summary)
else:
raise RuntimeError("No training summary available for this %s" %
self.__class__.__name__)
@property
@since("2.0.0")
def hasSummary(self):
"""
Indicates whether a training summary exists for this model
instance.
"""
return self._call_java("hasSummary")
@since("2.0.0")
def evaluate(self, dataset):
"""
Evaluates the model on a test dataset.
:param dataset:
Test dataset to evaluate model on, where dataset is an
instance of :py:class:`pyspark.sql.DataFrame`
"""
if not isinstance(dataset, DataFrame):
raise ValueError("dataset must be a DataFrame but got %s." % type(dataset))
java_glr_summary = self._call_java("evaluate", dataset)
return GeneralizedLinearRegressionSummary(java_glr_summary)
class GeneralizedLinearRegressionSummary(JavaWrapper):
"""
.. note:: Experimental
Generalized linear regression results evaluated on a dataset.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def predictions(self):
"""
Predictions output by the model's `transform` method.
"""
return self._call_java("predictions")
@property
@since("2.0.0")
def predictionCol(self):
"""
Field in :py:attr:`predictions` which gives the predicted value of each instance.
This is set to a new column name if the original model's `predictionCol` is not set.
"""
return self._call_java("predictionCol")
@property
@since("2.0.0")
def rank(self):
"""
The numeric rank of the fitted linear model.
"""
return self._call_java("rank")
@property
@since("2.0.0")
def degreesOfFreedom(self):
"""
Degrees of freedom.
"""
return self._call_java("degreesOfFreedom")
@property
@since("2.0.0")
def residualDegreeOfFreedom(self):
"""
The residual degrees of freedom.
"""
return self._call_java("residualDegreeOfFreedom")
@property
@since("2.0.0")
def residualDegreeOfFreedomNull(self):
"""
The residual degrees of freedom for the null model.
"""
return self._call_java("residualDegreeOfFreedomNull")
@since("2.0.0")
def residuals(self, residualsType="deviance"):
"""
Get the residuals of the fitted model by type.
:param residualsType: The type of residuals which should be returned.
Supported options: deviance (default), pearson, working, and response.
"""
return self._call_java("residuals", residualsType)
@property
@since("2.0.0")
def nullDeviance(self):
"""
The deviance for the null model.
"""
return self._call_java("nullDeviance")
@property
@since("2.0.0")
def deviance(self):
"""
The deviance for the fitted model.
"""
return self._call_java("deviance")
@property
@since("2.0.0")
def dispersion(self):
"""
The dispersion of the fitted model.
It is taken as 1.0 for the "binomial" and "poisson" families, and otherwise
estimated by the residual Pearson's Chi-Squared statistic (which is defined as
sum of the squares of the Pearson residuals) divided by the residual degrees of freedom.
"""
return self._call_java("dispersion")
@property
@since("2.0.0")
def aic(self):
"""
Akaike's "An Information Criterion"(AIC) for the fitted model.
"""
return self._call_java("aic")
@inherit_doc
class GeneralizedLinearRegressionTrainingSummary(GeneralizedLinearRegressionSummary):
"""
.. note:: Experimental
Generalized linear regression training results.
.. versionadded:: 2.0.0
"""
@property
@since("2.0.0")
def numIterations(self):
"""
Number of training iterations.
"""
return self._call_java("numIterations")
@property
@since("2.0.0")
def solver(self):
"""
The numeric solver used for training.
"""
return self._call_java("solver")
@property
@since("2.0.0")
def coefficientStandardErrors(self):
"""
Standard error of estimated coefficients and intercept.
If :py:attr:`GeneralizedLinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
"""
return self._call_java("coefficientStandardErrors")
@property
@since("2.0.0")
def tValues(self):
"""
T-statistic of estimated coefficients and intercept.
If :py:attr:`GeneralizedLinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
"""
return self._call_java("tValues")
@property
@since("2.0.0")
def pValues(self):
"""
Two-sided p-value of estimated coefficients and intercept.
If :py:attr:`GeneralizedLinearRegression.fitIntercept` is set to True,
then the last element returned corresponds to the intercept.
"""
return self._call_java("pValues")
if __name__ == "__main__":
import doctest
import pyspark.ml.regression
from pyspark.sql import SparkSession
globs = pyspark.ml.regression.__dict__.copy()
# The small batch size here ensures that we see multiple batches,
# even in these small test examples:
spark = SparkSession.builder\
.master("local[2]")\
.appName("ml.regression tests")\
.getOrCreate()
sc = spark.sparkContext
globs['sc'] = sc
globs['spark'] = spark
import tempfile
temp_path = tempfile.mkdtemp()
globs['temp_path'] = temp_path
try:
(failure_count, test_count) = doctest.testmod(globs=globs, optionflags=doctest.ELLIPSIS)
spark.stop()
finally:
from shutil import rmtree
try:
rmtree(temp_path)
except OSError:
pass
if failure_count:
exit(-1)
|
SnappyDataInc/spark
|
python/pyspark/ml/regression.py
|
Python
|
apache-2.0
| 59,072
|
[
"Gaussian"
] |
df8c00d5431945011993ed5ddadc439b2aea71ea7973cd1bfb752d6665a99f0a
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (division, print_function, absolute_import,
unicode_literals)
__all__ = ["Sampler", "default_beta_ladder"]
import numpy as np
import multiprocessing as multi
from . import util
def default_beta_ladder(ndim, ntemps=None, Tmax=None):
"""
Returns a ladder of :math:`\beta \equiv 1/T` under a geometric spacing that
is determined by the arguments ``ntemps`` and ``Tmax``. The temperature
selection algorithm works as follows:
Ideally, ``Tmax`` should be specified such that the tempered posterior
looks like the prior at this temperature. If using adaptive parallel
tempering, per `arXiv:1501.05823<http://arxiv.org/abs/1501.05823>`_,
choosing ``Tmax = inf`` is a safe bet, so long as ``ntemps`` is also
specified.
:param ndim:
The number of dimensions in the parameter space.
:param ntemps: (optional)
If set, the number of temperatures to generate.
:param Tmax: (optional)
If set, the maximum temperature for the ladder.
Temperatures are chosen according to the following algorithm:
* If neither ``ntemps`` nor ``Tmax`` is specified, raise an exception
(insufficient information).
* If ``ntemps`` is specified but not ``Tmax``, return a ladder spaced so
that a Gaussian posterior would have a 25% temperature swap acceptance
ratio.
* If ``Tmax`` is specified but not ``ntemps``:
* If ``Tmax = inf``, raise an exception (insufficient information).
* Else, space chains geometrically as above (for 25% acceptance) until
``Tmax`` is reached.
* If ``Tmax`` and ``ntemps`` are specified:
* If ``Tmax = inf``, place one chain at ``inf`` and ``ntemps-1`` in a
25% geometric spacing.
* Else, use the unique geometric spacing defined by ``ntemps`` and
``Tmax``.
"""
if type(ndim) != int or ndim < 1:
raise ValueError('Invalid number of dimensions specified.')
if ntemps is None and Tmax is None:
raise ValueError('Must specify one of ``ntemps`` and ``Tmax``.')
if Tmax is not None and Tmax <= 1:
raise ValueError('``Tmax`` must be greater than 1.')
if ntemps is not None and (type(ntemps) != int or ntemps < 1):
raise ValueError('Invalid number of temperatures specified.')
tstep = np.array([25.2741, 7., 4.47502, 3.5236, 3.0232,
2.71225, 2.49879, 2.34226, 2.22198, 2.12628,
2.04807, 1.98276, 1.92728, 1.87946, 1.83774,
1.80096, 1.76826, 1.73895, 1.7125, 1.68849,
1.66657, 1.64647, 1.62795, 1.61083, 1.59494,
1.58014, 1.56632, 1.55338, 1.54123, 1.5298,
1.51901, 1.50881, 1.49916, 1.49, 1.4813,
1.47302, 1.46512, 1.45759, 1.45039, 1.4435,
1.4369, 1.43056, 1.42448, 1.41864, 1.41302,
1.40761, 1.40239, 1.39736, 1.3925, 1.38781,
1.38327, 1.37888, 1.37463, 1.37051, 1.36652,
1.36265, 1.35889, 1.35524, 1.3517, 1.34825,
1.3449, 1.34164, 1.33847, 1.33538, 1.33236,
1.32943, 1.32656, 1.32377, 1.32104, 1.31838,
1.31578, 1.31325, 1.31076, 1.30834, 1.30596,
1.30364, 1.30137, 1.29915, 1.29697, 1.29484,
1.29275, 1.29071, 1.2887, 1.28673, 1.2848,
1.28291, 1.28106, 1.27923, 1.27745, 1.27569,
1.27397, 1.27227, 1.27061, 1.26898, 1.26737,
1.26579, 1.26424, 1.26271, 1.26121,
1.25973])
if ndim > tstep.shape[0]:
# An approximation to the temperature step at large
# dimension
tstep = 1.0 + 2.0 * np.sqrt(np.log(4.0)) / np.sqrt(ndim)
else:
tstep = tstep[ndim - 1]
appendInf = False
if Tmax == np.inf:
appendInf = True
Tmax = None
ntemps = ntemps - 1
if ntemps is not None:
if Tmax is None:
# Determine Tmax from ntemps.
Tmax = tstep ** (ntemps - 1)
else:
if Tmax is None:
raise ValueError('Must specify at least one of ``ntemps'' and '
'finite ``Tmax``.')
# Determine ntemps from Tmax.
ntemps = int(np.log(Tmax) / np.log(tstep) + 2)
betas = np.logspace(0, -np.log10(Tmax), ntemps)
if appendInf:
# Use a geometric spacing, but replace the top-most temperature with
# infinity.
betas = np.concatenate((betas, [0]))
return betas
class LikePriorEvaluator(object):
"""
Wrapper class for logl and logp.
"""
def __init__(self, logl, logp,
loglargs=[], logpargs=[],
loglkwargs={}, logpkwargs={}):
self.logl = logl
self.logp = logp
self.loglargs = loglargs
self.logpargs = logpargs
self.loglkwargs = loglkwargs
self.logpkwargs = logpkwargs
def __call__(self, x):
lp = self.logp(x, *self.logpargs, **self.logpkwargs)
if np.isnan(lp):
raise ValueError('Prior function returned NaN.')
if lp == float('-inf'):
# Can't return -inf, since this messes with beta=0 behaviour.
ll = 0
else:
ll = self.logl(x, *self.loglargs, **self.loglkwargs)
if np.isnan(ll).any():
raise ValueError('Log likelihood function returned NaN.')
return ll, lp
class Sampler(object):
"""
A parallel-tempered ensemble sampler, using :class:`EnsembleSampler`
for sampling within each parallel chain.
:param nwalkers:
The number of ensemble walkers at each temperature.
:param dim:
The dimension of parameter space.
:param betas: (optional)
Array giving the inverse temperatures, :math:`\\beta=1/T`, used in the
ladder. The default is chosen according to
:function:`default_beta_ladder` using ``ntemps`` and ``Tmax``.
:param ntemps: (optional)
If set, the number of temperatures to use.
:param Tmax: (optional)
If set, the maximum temperature for the ladder.
:param logl:
The log-likelihood function.
:param logp:
The log-prior function.
:param threads: (optional)
The number of parallel threads to use in sampling.
:param pool: (optional)
Alternative to ``threads``. Any object that implements a
``map`` method compatible with the built-in ``map`` will do
here. For example, :class:`multi.Pool` will do.
:param a: (optional)
Proposal scale factor.
:param loglargs: (optional)
Positional arguments for the log-likelihood function.
:param logpargs: (optional)
Positional arguments for the log-prior function.
:param loglkwargs: (optional)
Keyword arguments for the log-likelihood function.
:param logpkwargs: (optional)
Keyword arguments for the log-prior function.
:param adaptation_lag: (optional)
Time lag for temperature dynamics decay. Default: 10000.
:param adaptation_time: (optional)
Time-scale for temperature dynamics. Default: 100.
"""
def __init__(self, nwalkers, dim, logl, logp,
ntemps=None, Tmax=None, betas=None,
threads=1, pool=None, a=2.0,
loglargs=[], logpargs=[],
loglkwargs={}, logpkwargs={},
adaptation_lag=10000, adaptation_time=100,
random=None):
if random is None:
self._random = np.random.mtrand.RandomState()
else:
self._random = random
self._likeprior = LikePriorEvaluator(logl, logp,
loglargs, logpargs,
loglkwargs, logpkwargs)
self.a = a
self.nwalkers = nwalkers
self.dim = dim
self.adaptation_time = adaptation_time
self.adaptation_lag = adaptation_lag
# Set temperature ladder. Append beta=0 to generated ladder.
if betas is not None:
self._betas = np.array(betas).copy()
else:
self._betas = default_beta_ladder(self.dim,
ntemps=ntemps,
Tmax=Tmax)
# Make sure ladder is ascending in temperature.
self._betas[::-1].sort()
if self.nwalkers % 2 != 0:
raise ValueError('The number of walkers must be even.')
if self.nwalkers < 2 * self.dim:
raise ValueError('The number of walkers must be greater than '
'``2*dimension``.')
self.pool = pool
if threads > 1 and pool is None:
self.pool = multi.Pool(threads)
self.reset()
def reset(self, random=None, betas=None, time=None):
"""
Clear the ``time``, ``chain``, ``logposterior``,
``loglikelihood``, ``acceptance_fraction``,
``tswap_acceptance_fraction`` stored properties.
"""
# Reset chain.
self._chain = None
self._logposterior = None
self._loglikelihood = None
self._beta_history = None
# Reset sampler state.
self._time = 0
self._p0 = None
self._logposterior0 = None
self._loglikelihood0 = None
if betas is not None:
self._betas = betas
self.nswap = np.zeros(self.ntemps,
dtype=np.float)
self.nswap_accepted = np.zeros(self.ntemps,
dtype=np.float)
self.nprop = np.zeros((self.ntemps, self.nwalkers),
dtype=np.float)
self.nprop_accepted = np.zeros((self.ntemps, self.nwalkers),
dtype=np.float)
if random is not None:
self._random = random
if time is not None:
self._time = time
def run_mcmc(self, *args, **kwargs):
"""
Identical to ``sample``, but returns the final ensemble and discards
intermediate ensembles.
"""
for x in self.sample(*args, **kwargs):
pass
return x
def sample(self, p0=None,
iterations=1, thin=1,
storechain=True, adapt=False,
swap_ratios=False):
"""
Advance the chains ``iterations`` steps as a generator.
:param p0:
The initial positions of the walkers. Shape should be
``(ntemps, nwalkers, dim)``. Can be omitted if resuming
from a previous run.
:param iterations: (optional)
The number of iterations to perform.
:param thin: (optional)
The number of iterations to perform between saving the
state to the internal chain.
:param storechain: (optional)
If ``True`` store the iterations in the ``chain``
property.
:param adapt: (optional)
If ``True``, the temperature ladder is dynamically adapted as the
sampler runs to achieve uniform swap acceptance ratios between
adjacent chains. See
`arXiv:1501.05823<http://arxiv.org/abs/1501.05823>`_ for details.
:param swap_ratios: (optional)
If ``True``, also yield the instantaneous inter-chain acceptance
ratios in the fourth element of the yielded tuple.
At each iteration, this generator yields
* ``p``, the current position of the walkers.
* ``logpost``, the current posterior values for the walkers.
* ``loglike``, the current likelihood values for the walkers.
* ``ratios``, the instantaneous inter-chain acceptance ratios (if
requested).
"""
# Set initial walker positions.
if p0 is not None:
# Start anew.
self._p0 = p = np.array(p0).copy()
self._logposterior0 = None
self._loglikelihood0 = None
elif self._p0 is not None:
# Now, where were we?
p = self._p0
else:
raise ValueError('Initial walker positions not specified.')
# Check for dodgy inputs.
if np.any(np.isinf(p)):
raise ValueError('At least one parameter value was infinite.')
if np.any(np.isnan(p)):
raise ValueError('At least one parameter value was NaN.')
# If we have no likelihood or prior values, compute them.
if self._logposterior0 is None or self._loglikelihood0 is None:
logl, logp = self._evaluate(p)
logpost = self._tempered_likelihood(logl) + logp
self._loglikelihood0 = logl
self._logposterior0 = logpost
else:
logl = self._loglikelihood0
logpost = self._logposterior0
if (logpost == -np.inf).any():
raise ValueError('Attempting to start with samples outside '
'posterior support.')
# Expand the chain in advance of the iterations
if storechain:
isave = self._expand_chain(iterations // thin)
for i in range(iterations):
self._stretch(p, logpost, logl)
p, ratios = self._temperature_swaps(self._betas, p, logpost, logl)
# TODO Should the notion of a "complete" iteration really include
# the temperature adjustment?
if adapt and self.ntemps > 1:
dbetas = self._get_ladder_adjustment(self._time,
self._betas,
ratios)
self._betas += dbetas
logpost += self._tempered_likelihood(logl, betas=dbetas)
if (self._time + 1) % thin == 0:
if storechain:
self._chain[:, :, isave, :] = p
self._logposterior[:, :, isave] = logpost
self._loglikelihood[:, :, isave] = logl
self._beta_history[:, isave] = self._betas
isave += 1
self._time += 1
if swap_ratios:
yield p, logpost, logl, ratios
else:
yield p, logpost, logl
def _stretch(self, p, logpost, logl):
"""
Perform the stretch-move proposal on each ensemble.
"""
for j in [0, 1]:
# Get positions of walkers to be updated and walker to be sampled.
jupdate = j
jsample = (j + 1) % 2
pupdate = p[:, jupdate::2, :]
psample = p[:, jsample::2, :]
zs = np.exp(self._random.uniform(low=-np.log(self.a),
high=np.log(self.a),
size=(self.ntemps,
self.nwalkers // 2)))
qs = np.zeros((self.ntemps, self.nwalkers // 2, self.dim))
for k in range(self.ntemps):
js = self._random.randint(0, high=self.nwalkers // 2,
size=self.nwalkers // 2)
qs[k, :, :] = psample[k, js, :] + zs[k, :].reshape(
(self.nwalkers // 2, 1)) * (pupdate[k, :, :] -
psample[k, js, :])
qslogl, qslogp = self._evaluate(qs)
qslogpost = self._tempered_likelihood(qslogl) + qslogp
logpaccept = self.dim * np.log(zs) + qslogpost \
- logpost[:, jupdate::2]
logr = np.log(self._random.uniform(low=0.0, high=1.0,
size=(self.ntemps,
self.nwalkers // 2)))
accepts = logr < logpaccept
accepts = accepts.flatten()
pupdate.reshape((-1, self.dim))[accepts, :] = \
qs.reshape((-1, self.dim))[accepts, :]
logpost[:, jupdate::2].reshape((-1,))[accepts] = \
qslogpost.reshape((-1,))[accepts]
logl[:, jupdate::2].reshape((-1,))[accepts] = \
qslogl.reshape((-1,))[accepts]
accepts = accepts.reshape((self.ntemps, self.nwalkers // 2))
self.nprop[:, jupdate::2] += 1.0
self.nprop_accepted[:, jupdate::2] += accepts
def _evaluate(self, ps):
mapf = map if self.pool is None else self.pool.map
results = list(mapf(self._likeprior, ps.reshape((-1, self.dim))))
logl = np.fromiter((r[0] for r in results), np.float,
count=len(results)).reshape((self.ntemps, -1))
logp = np.fromiter((r[1] for r in results), np.float,
count=len(results)).reshape((self.ntemps, -1))
return logl, logp
def _tempered_likelihood(self, logl, betas=None):
"""
Compute tempered log likelihood. This is usually a mundane
multiplication, except for the special case where beta == 0 *and*
we're outside the likelihood support.
Here, we find a singularity that demands more careful attention; we
allow the likelihood to dominate the temperature, since wandering
outside the likelihood support causes a discontinuity.
"""
if betas is None:
betas = self._betas
betas = betas.reshape((-1, 1))
with np.errstate(invalid='ignore'):
loglT = logl * betas
loglT[np.isnan(loglT)] = -np.inf
return loglT
def _temperature_swaps(self, betas, p, logpost, logl):
"""
Perform parallel-tempering temperature swaps on the state
in ``p`` with associated ``logpost`` and ``logl``.
"""
ntemps = len(betas)
ratios = np.zeros(ntemps - 1)
for i in range(ntemps - 1, 0, -1):
bi = betas[i]
bi1 = betas[i - 1]
dbeta = bi1 - bi
iperm = self._random.permutation(self.nwalkers)
i1perm = self._random.permutation(self.nwalkers)
raccept = np.log(self._random.uniform(size=self.nwalkers))
paccept = dbeta * (logl[i, iperm] - logl[i - 1, i1perm])
self.nswap[i] += self.nwalkers
self.nswap[i - 1] += self.nwalkers
asel = (paccept > raccept)
nacc = np.sum(asel)
self.nswap_accepted[i] += nacc
self.nswap_accepted[i - 1] += nacc
ratios[i - 1] = nacc / self.nwalkers
ptemp = np.copy(p[i, iperm[asel], :])
logltemp = np.copy(logl[i, iperm[asel]])
logprtemp = np.copy(logpost[i, iperm[asel]])
p[i, iperm[asel], :] = p[i - 1, i1perm[asel], :]
logl[i, iperm[asel]] = logl[i - 1, i1perm[asel]]
logpost[i, iperm[asel]] = logpost[i - 1, i1perm[asel]] \
- dbeta * logl[i - 1, i1perm[asel]]
p[i - 1, i1perm[asel], :] = ptemp
logl[i - 1, i1perm[asel]] = logltemp
logpost[i - 1, i1perm[asel]] = logprtemp + dbeta * logltemp
return p, ratios
def _get_ladder_adjustment(self, time, betas0, ratios):
"""
Execute temperature adjustment according to dynamics outlined in
`arXiv:1501.05823 <http://arxiv.org/abs/1501.05823>`_.
"""
betas = betas0.copy()
# Modulate temperature adjustments with a hyperbolic decay.
decay = self.adaptation_lag / (time + self.adaptation_lag)
kappa = decay / self.adaptation_time
# Construct temperature adjustments.
dSs = kappa * (ratios[:-1] - ratios[1:])
# Compute new ladder (hottest and coldest chains don't move).
deltaTs = np.diff(1 / betas[:-1])
deltaTs *= np.exp(dSs)
betas[1:-1] = 1 / (np.cumsum(deltaTs) + 1 / betas[0])
# Don't mutate the ladder here; let the client code do that.
return betas - betas0
def _expand_chain(self, nsave):
"""
Expand ``self._chain``, ``self._logposterior``,
``self._loglikelihood``, and ``self._beta_history``
ahead of run to make room for new samples.
:param nsave:
The number of additional iterations for which to make room.
:return ``isave``:
Returns the index at which to begin inserting new entries.
"""
if self._chain is None:
isave = 0
self._chain = np.zeros((self.ntemps, self.nwalkers, nsave,
self.dim))
self._logposterior = np.zeros((self.ntemps, self.nwalkers, nsave))
self._loglikelihood = np.zeros((self.ntemps, self.nwalkers,
nsave))
self._beta_history = np.zeros((self.ntemps, nsave))
else:
isave = self._chain.shape[2]
self._chain = np.concatenate((self._chain,
np.zeros((self.ntemps,
self.nwalkers,
nsave, self.dim))),
axis=2)
self._logposterior = np.concatenate((self._logposterior,
np.zeros((self.ntemps,
self.nwalkers,
nsave))),
axis=2)
self._loglikelihood = np.concatenate((self._loglikelihood,
np.zeros((self.ntemps,
self.nwalkers,
nsave))),
axis=2)
self._beta_history = np.concatenate((self._beta_history,
np.zeros((self.ntemps,
nsave))),
axis=1)
return isave
def log_evidence_estimate(self, logls=None, fburnin=0.1):
"""
Thermodynamic integration estimate of the evidence for the sampler.
:param logls: (optional) The log-likelihoods to use for
computing the thermodynamic evidence. If ``None`` (the
default), use the stored log-likelihoods in the sampler.
Should be of shape ``(Ntemps, Nwalkers, Nsamples)``.
:param fburnin: (optional)
The fraction of the chain to discard as burnin samples; only the
final ``1-fburnin`` fraction of the samples will be used to
compute the evidence; the default is ``fburnin = 0.1``.
:return ``(logZ, dlogZ)``: Returns an estimate of the
log-evidence and the error associated with the finite
number of temperatures at which the posterior has been
sampled.
For details, see ``thermodynamic_integration_log_evidence``.
"""
if logls is None:
if self.loglikelihood is not None:
logls = self.loglikelihood
else:
raise ValueError('No log likelihood values available.')
istart = int(logls.shape[2] * fburnin + 0.5)
mean_logls = np.mean(np.mean(logls, axis=1)[:, istart:], axis=1)
return util.thermodynamic_integration_log_evidence(self._betas,
mean_logls)
@property
def random(self):
"""
Returns the random number generator for the sampler.
"""
return self._random
@property
def betas(self):
"""
Returns the current inverse temperature ladder of the sampler.
"""
return self._betas
@property
def time(self):
"""
Returns the current time, in iterations, of the sampler.
"""
return self._time
@property
def chain(self):
"""
Returns the stored chain of samples; shape ``(Ntemps,
Nwalkers, Nsteps, Ndim)``.
"""
return self._chain
@property
def flatchain(self):
"""Returns the stored chain, but flattened along the walker axis, so
of shape ``(Ntemps, Nwalkers*Nsteps, Ndim)``.
"""
s = self.chain.shape
return self._chain.reshape((s[0], -1, s[3]))
@property
def logprobability(self):
"""
Matrix of logprobability values; shape ``(Ntemps, Nwalkers, Nsteps)``.
"""
return self._logposterior
@property
def loglikelihood(self):
"""
Matrix of log-likelihood values; shape ``(Ntemps, Nwalkers, Nsteps)``.
"""
return self._loglikelihood
@property
def beta_history(self):
"""
Matrix of inverse temperatures; shape ``(Ntemps, Nsteps)``.
"""
return self._beta_history
@property
def tswap_acceptance_fraction(self):
"""
Returns an array of accepted temperature swap fractions for
each temperature; shape ``(ntemps, )``.
"""
return self.nswap_accepted / self.nswap
@property
def ntemps(self):
"""
The number of temperature chains.
"""
return len(self._betas)
@property
def acceptance_fraction(self):
"""
Matrix of shape ``(Ntemps, Nwalkers)`` detailing the
acceptance fraction for each walker.
"""
return self.nprop_accepted / self.nprop
@property
def acor(self):
"""
Returns a matrix of autocorrelation lengths for each
parameter in each temperature of shape ``(Ntemps, Ndim)``.
"""
return self.get_autocorr_time()
def get_autocorr_time(self, window=50):
"""
Returns a matrix of autocorrelation lengths for each
parameter in each temperature of shape ``(Ntemps, Ndim)``.
:param window: (optional)
The size of the windowing function. This is equivalent to the
maximum number of lags to use. (default: 50)
"""
acors = np.zeros((self.ntemps, self.dim))
for i in range(self.ntemps):
x = np.mean(self._chain[i, :, :, :], axis=0)
acors[i, :] = util.autocorr_integrated_time(x, window=window)
return acors
|
asteca/ASteCA
|
packages/best_fit/ptemcee/sampler.py
|
Python
|
gpl-3.0
| 26,776
|
[
"Gaussian"
] |
c8f6cb2215ef15fea499ae6b21736deabc67350edd42ed393f28cabce1781e7a
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
COUNTRIES_LIST = [(u'Afghanistan', u'AF', u'AFG', 4, u'ISO 3166-2:AF'),
(u'\xc5land Islands', u'AX', u'ALA', 248, u'ISO 3166-2:AX'),
(u'Albania', u'AL', u'ALB', 8, u'ISO 3166-2:AL'),
(u'Algeria', u'DZ', u'DZA', 12, u'ISO 3166-2:DZ'),
(u'American Samoa', u'AS', u'ASM', 16, u'ISO 3166-2:AS'),
(u'Andorra', u'AD', u'AND', 20, u'ISO 3166-2:AD'),
(u'Angola', u'AO', u'AGO', 24, u'ISO 3166-2:AO'),
(u'Anguilla', u'AI', u'AIA', 660, u'ISO 3166-2:AI'),
(u'Antarctica', u'AQ', u'ATA', 10, u'ISO 3166-2:AQ'),
(u'Antigua and Barbuda', u'AG', u'ATG', 28, u'ISO 3166-2:AG'),
(u'Argentina', u'AR', u'ARG', 32, u'ISO 3166-2:AR'),
(u'Armenia', u'AM', u'ARM', 51, u'ISO 3166-2:AM'),
(u'Aruba', u'AW', u'ABW', 533, u'ISO 3166-2:AW'),
(u'Australia', u'AU', u'AUS', 36, u'ISO 3166-2:AU'),
(u'Austria', u'AT', u'AUT', 40, u'ISO 3166-2:AT'),
(u'Azerbaijan', u'AZ', u'AZE', 31, u'ISO 3166-2:AZ'),
(u'Bahamas', u'BS', u'BHS', 44, u'ISO 3166-2:BS'),
(u'Bahrain', u'BH', u'BHR', 48, u'ISO 3166-2:BH'),
(u'Bangladesh', u'BD', u'BGD', 50, u'ISO 3166-2:BD'),
(u'Barbados', u'BB', u'BRB', 52, u'ISO 3166-2:BB'),
(u'Belarus', u'BY', u'BLR', 112, u'ISO 3166-2:BY'),
(u'Belgium', u'BE', u'BEL', 56, u'ISO 3166-2:BE'),
(u'Belize', u'BZ', u'BLZ', 84, u'ISO 3166-2:BZ'),
(u'Benin', u'BJ', u'BEN', 204, u'ISO 3166-2:BJ'),
(u'Bermuda', u'BM', u'BMU', 60, u'ISO 3166-2:BM'),
(u'Bhutan', u'BT', u'BTN', 64, u'ISO 3166-2:BT'),
(u'Bolivia', u'BO', u'BOL', 68, u'ISO 3166-2:BO'),
(u'Bosnia and Herzegovina', u'BA', u'BIH', 70, u'ISO 3166-2:BA'),
(u'Botswana', u'BW', u'BWA', 72, u'ISO 3166-2:BW'),
(u'Bouvet Island', u'BV', u'BVT', 74, u'ISO 3166-2:BV'),
(u'Brazil', u'BR', u'BRA', 76, u'ISO 3166-2:BR'),
(u'British Indian Ocean Territory', u'IO', u'IOT', 86, u'ISO 3166-2:IO'),
(u'Brunei Darussalam', u'BN', u'BRN', 96, u'ISO 3166-2:BN'),
(u'Bulgaria', u'BG', u'BGR', 100, u'ISO 3166-2:BG'),
(u'Burkina Faso', u'BF', u'BFA', 854, u'ISO 3166-2:BF'),
(u'Burundi', u'BI', u'BDI', 108, u'ISO 3166-2:BI'),
(u'Cambodia', u'KH', u'KHM', 116, u'ISO 3166-2:KH'),
(u'Cameroon', u'CM', u'CMR', 120, u'ISO 3166-2:CM'),
(u'Canada', u'CA', u'CAN', 124, u'ISO 3166-2:CA'),
(u'Cape Verde', u'CV', u'CPV', 132, u'ISO 3166-2:CV'),
(u'Cayman Islands', u'KY', u'CYM', 136, u'ISO 3166-2:KY'),
(u'Central African Republic', u'CF', u'CAF', 140, u'ISO 3166-2:CF'),
(u'Chad', u'TD', u'TCD', 148, u'ISO 3166-2:TD'),
(u'Chile', u'CL', u'CHL', 152, u'ISO 3166-2:CL'),
(u'China', u'CN', u'CHN', 156, u'ISO 3166-2:CN'),
(u'Christmas Island', u'CX', u'CXR', 162, u'ISO 3166-2:CX'),
(u'Cocos (Keeling) Islands', u'CC', u'CCK', 166, u'ISO 3166-2:CC'),
(u'Colombia', u'CO', u'COL', 170, u'ISO 3166-2:CO'),
(u'Comoros', u'KM', u'COM', 174, u'ISO 3166-2:KM'),
(u'Congo', u'CG', u'COG', 178, u'ISO 3166-2:CG'),
(u'Congo, Democratic Republic of the', u'CD', u'COD', 180, u'ISO 3166-2:CD'),
(u'Cook Islands', u'CK', u'COK', 184, u'ISO 3166-2:CK'),
(u'Costa Rica', u'CR', u'CRI', 188, u'ISO 3166-2:CR'),
(u"Cote d'Ivoire C\xf4te d'Ivoire", u'CI', u'CIV', 384, u'ISO 3166-2:CI'),
(u'Croatia', u'HR', u'HRV', 191, u'ISO 3166-2:HR'),
(u'Cuba', u'CU', u'CUB', 192, u'ISO 3166-2:CU'),
(u'Cyprus', u'CY', u'CYP', 196, u'ISO 3166-2:CY'),
(u'Czech Republic', u'CZ', u'CZE', 203, u'ISO 3166-2:CZ'),
(u'Denmark', u'DK', u'DNK', 208, u'ISO 3166-2:DK'),
(u'Djibouti', u'DJ', u'DJI', 262, u'ISO 3166-2:DJ'),
(u'Dominica', u'DM', u'DMA', 212, u'ISO 3166-2:DM'),
(u'Dominican Republic', u'DO', u'DOM', 214, u'ISO 3166-2:DO'),
(u'Ecuador', u'EC', u'ECU', 218, u'ISO 3166-2:EC'),
(u'Egypt', u'EG', u'EGY', 818, u'ISO 3166-2:EG'),
(u'El Salvador', u'SV', u'SLV', 222, u'ISO 3166-2:SV'),
(u'Equatorial Guinea', u'GQ', u'GNQ', 226, u'ISO 3166-2:GQ'),
(u'Eritrea', u'ER', u'ERI', 232, u'ISO 3166-2:ER'),
(u'Estonia', u'EE', u'EST', 233, u'ISO 3166-2:EE'),
(u'Ethiopia', u'ET', u'ETH', 231, u'ISO 3166-2:ET'),
(u'Falkland Islands (Malvinas)', u'FK', u'FLK', 238, u'ISO 3166-2:FK'),
(u'Faroe Islands', u'FO', u'FRO', 234, u'ISO 3166-2:FO'),
(u'Fiji', u'FJ', u'FJI', 242, u'ISO 3166-2:FJ'),
(u'Finland', u'FI', u'FIN', 246, u'ISO 3166-2:FI'),
(u'France', u'FR', u'FRA', 250, u'ISO 3166-2:FR'),
(u'French Guiana', u'GF', u'GUF', 254, u'ISO 3166-2:GF'),
(u'French Polynesia', u'PF', u'PYF', 258, u'ISO 3166-2:PF'),
(u'French Southern Territories', u'TF', u'ATF', 260, u'ISO 3166-2:TF'),
(u'Gabon', u'GA', u'GAB', 266, u'ISO 3166-2:GA'),
(u'Gambia', u'GM', u'GMB', 270, u'ISO 3166-2:GM'),
(u'Georgia', u'GE', u'GEO', 268, u'ISO 3166-2:GE'),
(u'Germany', u'DE', u'DEU', 276, u'ISO 3166-2:DE'),
(u'Ghana', u'GH', u'GHA', 288, u'ISO 3166-2:GH'),
(u'Gibraltar', u'GI', u'GIB', 292, u'ISO 3166-2:GI'),
(u'Greece', u'GR', u'GRC', 300, u'ISO 3166-2:GR'),
(u'Greenland', u'GL', u'GRL', 304, u'ISO 3166-2:GL'),
(u'Grenada', u'GD', u'GRD', 308, u'ISO 3166-2:GD'),
(u'Guadeloupe', u'GP', u'GLP', 312, u'ISO 3166-2:GP'),
(u'Guam', u'GU', u'GUM', 316, u'ISO 3166-2:GU'),
(u'Guatemala', u'GT', u'GTM', 320, u'ISO 3166-2:GT'),
(u'Guernsey', u'GG', u'GGY', 831, u'ISO 3166-2:GG'),
(u'Guinea', u'GN', u'GIN', 324, u'ISO 3166-2:GN'),
(u'Guinea-Bissau', u'GW', u'GNB', 624, u'ISO 3166-2:GW'),
(u'Guyana', u'GY', u'GUY', 328, u'ISO 3166-2:GY'),
(u'Haiti', u'HT', u'HTI', 332, u'ISO 3166-2:HT'),
(u'Heard Island and McDonald Islands', u'HM', u'HMD', 334, u'ISO 3166-2:HM'),
(u'Holy See (Vatican City State)', u'VA', u'VAT', 336, u'ISO 3166-2:VA'),
(u'Honduras', u'HN', u'HND', 340, u'ISO 3166-2:HN'),
(u'Hong Kong', u'HK', u'HKG', 344, u'ISO 3166-2:HK'),
(u'Hungary', u'HU', u'HUN', 348, u'ISO 3166-2:HU'),
(u'Iceland', u'IS', u'ISL', 352, u'ISO 3166-2:IS'),
(u'India', u'IN', u'IND', 356, u'ISO 3166-2:IN'),
(u'Indonesia', u'ID', u'IDN', 360, u'ISO 3166-2:ID'),
(u'Iran, Islamic Republic of', u'IR', u'IRN', 364, u'ISO 3166-2:IR'),
(u'Iraq', u'IQ', u'IRQ', 368, u'ISO 3166-2:IQ'),
(u'Ireland', u'IE', u'IRL', 372, u'ISO 3166-2:IE'),
(u'Isle of Man', u'IM', u'IMN', 833, u'ISO 3166-2:IM'),
(u'Israel', u'IL', u'ISR', 376, u'ISO 3166-2:IL'),
(u'Italy', u'IT', u'ITA', 380, u'ISO 3166-2:IT'),
(u'Jamaica', u'JM', u'JAM', 388, u'ISO 3166-2:JM'),
(u'Japan', u'JP', u'JPN', 392, u'ISO 3166-2:JP'),
(u'Jersey', u'JE', u'JEY', 832, u'ISO 3166-2:JE'),
(u'Jordan', u'JO', u'JOR', 400, u'ISO 3166-2:JO'),
(u'Kazakhstan', u'KZ', u'KAZ', 398, u'ISO 3166-2:KZ'),
(u'Kenya', u'KE', u'KEN', 404, u'ISO 3166-2:KE'),
(u'Kiribati', u'KI', u'KIR', 296, u'ISO 3166-2:KI'),
(u"Korea, Democratic People's Republic of",
u'KP',
u'PRK',
408,
u'ISO 3166-2:KP'),
(u'Korea, Republic of', u'KR', u'KOR', 410, u'ISO 3166-2:KR'),
(u'Kuwait', u'KW', u'KWT', 414, u'ISO 3166-2:KW'),
(u'Kyrgyzstan', u'KG', u'KGZ', 417, u'ISO 3166-2:KG'),
(u"Lao People's Democratic Republic", u'LA', u'LAO', 418, u'ISO 3166-2:LA'),
(u'Latvia', u'LV', u'LVA', 428, u'ISO 3166-2:LV'),
(u'Lebanon', u'LB', u'LBN', 422, u'ISO 3166-2:LB'),
(u'Lesotho', u'LS', u'LSO', 426, u'ISO 3166-2:LS'),
(u'Liberia', u'LR', u'LBR', 430, u'ISO 3166-2:LR'),
(u'Libyan Arab Jamahiriya', u'LY', u'LBY', 434, u'ISO 3166-2:LY'),
(u'Liechtenstein', u'LI', u'LIE', 438, u'ISO 3166-2:LI'),
(u'Lithuania', u'LT', u'LTU', 440, u'ISO 3166-2:LT'),
(u'Luxembourg', u'LU', u'LUX', 442, u'ISO 3166-2:LU'),
(u'Macao', u'MO', u'MAC', 446, u'ISO 3166-2:MO'),
(u'Macedonia, the former Yugoslav Republic of',
u'MK',
u'MKD',
807,
u'ISO 3166-2:MK'),
(u'Madagascar', u'MG', u'MDG', 450, u'ISO 3166-2:MG'),
(u'Malawi', u'MW', u'MWI', 454, u'ISO 3166-2:MW'),
(u'Malaysia', u'MY', u'MYS', 458, u'ISO 3166-2:MY'),
(u'Maldives', u'MV', u'MDV', 462, u'ISO 3166-2:MV'),
(u'Mali', u'ML', u'MLI', 466, u'ISO 3166-2:ML'),
(u'Malta', u'MT', u'MLT', 470, u'ISO 3166-2:MT'),
(u'Marshall Islands', u'MH', u'MHL', 584, u'ISO 3166-2:MH'),
(u'Martinique', u'MQ', u'MTQ', 474, u'ISO 3166-2:MQ'),
(u'Mauritania', u'MR', u'MRT', 478, u'ISO 3166-2:MR'),
(u'Mauritius', u'MU', u'MUS', 480, u'ISO 3166-2:MU'),
(u'Mayotte', u'YT', u'MYT', 175, u'ISO 3166-2:YT'),
(u'Mexico', u'MX', u'MEX', 484, u'ISO 3166-2:MX'),
(u'Micronesia, Federated States of', u'FM', u'FSM', 583, u'ISO 3166-2:FM'),
(u'Moldova, Republic of', u'MD', u'MDA', 498, u'ISO 3166-2:MD'),
(u'Monaco', u'MC', u'MCO', 492, u'ISO 3166-2:MC'),
(u'Mongolia', u'MN', u'MNG', 496, u'ISO 3166-2:MN'),
(u'Montenegro', u'ME', u'MNE', 499, u'ISO 3166-2:ME'),
(u'Montserrat', u'MS', u'MSR', 500, u'ISO 3166-2:MS'),
(u'Morocco', u'MA', u'MAR', 504, u'ISO 3166-2:MA'),
(u'Mozambique', u'MZ', u'MOZ', 508, u'ISO 3166-2:MZ'),
(u'Myanmar', u'MM', u'MMR', 104, u'ISO 3166-2:MM'),
(u'Namibia', u'NA', u'NAM', 516, u'ISO 3166-2:NA'),
(u'Nauru', u'NR', u'NRU', 520, u'ISO 3166-2:NR'),
(u'Nepal', u'NP', u'NPL', 524, u'ISO 3166-2:NP'),
(u'Netherlands', u'NL', u'NLD', 528, u'ISO 3166-2:NL'),
(u'Netherlands Antilles', u'AN', u'ANT', 530, u'ISO 3166-2:AN'),
(u'New Caledonia', u'NC', u'NCL', 540, u'ISO 3166-2:NC'),
(u'New Zealand', u'NZ', u'NZL', 554, u'ISO 3166-2:NZ'),
(u'Nicaragua', u'NI', u'NIC', 558, u'ISO 3166-2:NI'),
(u'Niger', u'NE', u'NER', 562, u'ISO 3166-2:NE'),
(u'Nigeria', u'NG', u'NGA', 566, u'ISO 3166-2:NG'),
(u'Niue', u'NU', u'NIU', 570, u'ISO 3166-2:NU'),
(u'Norfolk Island', u'NF', u'NFK', 574, u'ISO 3166-2:NF'),
(u'Northern Mariana Islands', u'MP', u'MNP', 580, u'ISO 3166-2:MP'),
(u'Norway', u'NO', u'NOR', 578, u'ISO 3166-2:NO'),
(u'Oman', u'OM', u'OMN', 512, u'ISO 3166-2:OM'),
(u'Pakistan', u'PK', u'PAK', 586, u'ISO 3166-2:PK'),
(u'Palau', u'PW', u'PLW', 585, u'ISO 3166-2:PW'),
(u'Palestinian Territory, Occupied', u'PS', u'PSE', 275, u'ISO 3166-2:PS'),
(u'Panama', u'PA', u'PAN', 591, u'ISO 3166-2:PA'),
(u'Papua New Guinea', u'PG', u'PNG', 598, u'ISO 3166-2:PG'),
(u'Paraguay', u'PY', u'PRY', 600, u'ISO 3166-2:PY'),
(u'Peru', u'PE', u'PER', 604, u'ISO 3166-2:PE'),
(u'Philippines', u'PH', u'PHL', 608, u'ISO 3166-2:PH'),
(u'Pitcairn', u'PN', u'PCN', 612, u'ISO 3166-2:PN'),
(u'Poland', u'PL', u'POL', 616, u'ISO 3166-2:PL'),
(u'Portugal', u'PT', u'PRT', 620, u'ISO 3166-2:PT'),
(u'Puerto Rico', u'PR', u'PRI', 630, u'ISO 3166-2:PR'),
(u'Qatar', u'QA', u'QAT', 634, u'ISO 3166-2:QA'),
(u'Reunion R\xe9union', u'RE', u'REU', 638, u'ISO 3166-2:RE'),
(u'Romania', u'RO', u'ROU', 642, u'ISO 3166-2:RO'),
(u'Russian Federation', u'RU', u'RUS', 643, u'ISO 3166-2:RU'),
(u'Rwanda', u'RW', u'RWA', 646, u'ISO 3166-2:RW'),
(u'Saint Barth\xe9lemy', u'BL', u'BLM', 652, u'ISO 3166-2:BL'),
(u'Saint Helena', u'SH', u'SHN', 654, u'ISO 3166-2:SH'),
(u'Saint Kitts and Nevis', u'KN', u'KNA', 659, u'ISO 3166-2:KN'),
(u'Saint Lucia', u'LC', u'LCA', 662, u'ISO 3166-2:LC'),
(u'Saint Martin (French part)', u'MF', u'MAF', 663, u'ISO 3166-2:MF'),
(u'Saint Pierre and Miquelon', u'PM', u'SPM', 666, u'ISO 3166-2:PM'),
(u'Saint Vincent and the Grenadines', u'VC', u'VCT', 670, u'ISO 3166-2:VC'),
(u'Samoa', u'WS', u'WSM', 882, u'ISO 3166-2:WS'),
(u'San Marino', u'SM', u'SMR', 674, u'ISO 3166-2:SM'),
(u'Sao Tome and Principe', u'ST', u'STP', 678, u'ISO 3166-2:ST'),
(u'Saudi Arabia', u'SA', u'SAU', 682, u'ISO 3166-2:SA'),
(u'Senegal', u'SN', u'SEN', 686, u'ISO 3166-2:SN'),
(u'Serbia', u'RS', u'SRB', 688, u'ISO 3166-2:RS'),
(u'Seychelles', u'SC', u'SYC', 690, u'ISO 3166-2:SC'),
(u'Sierra Leone', u'SL', u'SLE', 694, u'ISO 3166-2:SL'),
(u'Singapore', u'SG', u'SGP', 702, u'ISO 3166-2:SG'),
(u'Slovakia', u'SK', u'SVK', 703, u'ISO 3166-2:SK'),
(u'Slovenia', u'SI', u'SVN', 705, u'ISO 3166-2:SI'),
(u'Solomon Islands', u'SB', u'SLB', 90, u'ISO 3166-2:SB'),
(u'Somalia', u'SO', u'SOM', 706, u'ISO 3166-2:SO'),
(u'South Africa', u'ZA', u'ZAF', 710, u'ISO 3166-2:ZA'),
(u'South Georgia and the South Sandwich Islands',
u'GS',
u'SGS',
239,
u'ISO 3166-2:GS'),
(u'Spain', u'ES', u'ESP', 724, u'ISO 3166-2:ES'),
(u'Sri Lanka', u'LK', u'LKA', 144, u'ISO 3166-2:LK'),
(u'Sudan', u'SD', u'SDN', 736, u'ISO 3166-2:SD'),
(u'Suriname', u'SR', u'SUR', 740, u'ISO 3166-2:SR'),
(u'Svalbard and Jan Mayen', u'SJ', u'SJM', 744, u'ISO 3166-2:SJ'),
(u'Swaziland', u'SZ', u'SWZ', 748, u'ISO 3166-2:SZ'),
(u'Sweden', u'SE', u'SWE', 752, u'ISO 3166-2:SE'),
(u'Switzerland', u'CH', u'CHE', 756, u'ISO 3166-2:CH'),
(u'Syrian Arab Republic', u'SY', u'SYR', 760, u'ISO 3166-2:SY'),
(u'Taiwan, Province of China', u'TW', u'TWN', 158, u'ISO 3166-2:TW'),
(u'Tajikistan', u'TJ', u'TJK', 762, u'ISO 3166-2:TJ'),
(u'Tanzania, United Republic of', u'TZ', u'TZA', 834, u'ISO 3166-2:TZ'),
(u'Thailand', u'TH', u'THA', 764, u'ISO 3166-2:TH'),
(u'Timor-Leste', u'TL', u'TLS', 626, u'ISO 3166-2:TL'),
(u'Togo', u'TG', u'TGO', 768, u'ISO 3166-2:TG'),
(u'Tokelau', u'TK', u'TKL', 772, u'ISO 3166-2:TK'),
(u'Tonga', u'TO', u'TON', 776, u'ISO 3166-2:TO'),
(u'Trinidad and Tobago', u'TT', u'TTO', 780, u'ISO 3166-2:TT'),
(u'Tunisia', u'TN', u'TUN', 788, u'ISO 3166-2:TN'),
(u'Turkey', u'TR', u'TUR', 792, u'ISO 3166-2:TR'),
(u'Turkmenistan', u'TM', u'TKM', 795, u'ISO 3166-2:TM'),
(u'Turks and Caicos Islands', u'TC', u'TCA', 796, u'ISO 3166-2:TC'),
(u'Tuvalu', u'TV', u'TUV', 798, u'ISO 3166-2:TV'),
(u'Uganda', u'UG', u'UGA', 800, u'ISO 3166-2:UG'),
(u'Ukraine', u'UA', u'UKR', 804, u'ISO 3166-2:UA'),
(u'United Arab Emirates', u'AE', u'ARE', 784, u'ISO 3166-2:AE'),
(u'United Kingdom', u'GB', u'GBR', 826, u'ISO 3166-2:GB'),
(u'United States', u'US', u'USA', 840, u'ISO 3166-2:US'),
(u'United States Minor Outlying Islands',
u'UM',
u'UMI',
581,
u'ISO 3166-2:UM'),
(u'Uruguay', u'UY', u'URY', 858, u'ISO 3166-2:UY'),
(u'Uzbekistan', u'UZ', u'UZB', 860, u'ISO 3166-2:UZ'),
(u'Vanuatu', u'VU', u'VUT', 548, u'ISO 3166-2:VU'),
(u'Venezuela', u'VE', u'VEN', 862, u'ISO 3166-2:VE'),
(u'Viet Nam', u'VN', u'VNM', 704, u'ISO 3166-2:VN'),
(u'Virgin Islands, British', u'VG', u'VGB', 92, u'ISO 3166-2:VG'),
(u'Virgin Islands, U.S.', u'VI', u'VIR', 850, u'ISO 3166-2:VI'),
(u'Wallis and Futuna', u'WF', u'WLF', 876, u'ISO 3166-2:WF'),
(u'Western Sahara', u'EH', u'ESH', 732, u'ISO 3166-2:EH'),
(u'Yemen', u'YE', u'YEM', 887, u'ISO 3166-2:YE'),
(u'Zambia', u'ZM', u'ZMB', 894, u'ISO 3166-2:ZM'),
(u'Zimbabwe', u'ZW', u'ZWE', 716, u'ISO 3166-2:ZW')]
COUNTRIES_SELECTION_LIST = [(u'Afghanistan', u'AF', u'AFG', 4),
(u'\xc5land Islands', u'AX', u'ALA', 248),
(u'Albania', u'AL', u'ALB', 8),
(u'Algeria', u'DZ', u'DZA', 12),
(u'American Samoa', u'AS', u'ASM', 16),
(u'Andorra', u'AD', u'AND', 20),
(u'Angola', u'AO', u'AGO', 24),
(u'Anguilla', u'AI', u'AIA', 660),
(u'Antarctica', u'AQ', u'ATA', 10),
(u'Antigua and Barbuda', u'AG', u'ATG', 28),
(u'Argentina', u'AR', u'ARG', 32),
(u'Armenia', u'AM', u'ARM', 51),
(u'Aruba', u'AW', u'ABW', 533),
(u'Australia', u'AU', u'AUS', 36),
(u'Austria', u'AT', u'AUT', 40),
(u'Azerbaijan', u'AZ', u'AZE', 31),
(u'Bahamas', u'BS', u'BHS', 44),
(u'Bahrain', u'BH', u'BHR', 48),
(u'Bangladesh', u'BD', u'BGD', 50),
(u'Barbados', u'BB', u'BRB', 52),
(u'Belarus', u'BY', u'BLR', 112),
(u'Belgium', u'BE', u'BEL', 56),
(u'Belize', u'BZ', u'BLZ', 84),
(u'Benin', u'BJ', u'BEN', 204),
(u'Bermuda', u'BM', u'BMU', 60),
(u'Bhutan', u'BT', u'BTN', 64),
(u'Bolivia', u'BO', u'BOL', 68),
(u'Bosnia and Herzegovina', u'BA', u'BIH', 70),
(u'Botswana', u'BW', u'BWA', 72),
(u'Bouvet Island', u'BV', u'BVT', 74),
(u'Brazil', u'BR', u'BRA', 76),
(u'British Indian Ocean Territory', u'IO', u'IOT', 86),
(u'Brunei Darussalam', u'BN', u'BRN', 96),
(u'Bulgaria', u'BG', u'BGR', 100),
(u'Burkina Faso', u'BF', u'BFA', 854),
(u'Burundi', u'BI', u'BDI', 108),
(u'Cambodia', u'KH', u'KHM', 116),
(u'Cameroon', u'CM', u'CMR', 120),
(u'Canada', u'CA', u'CAN', 124),
(u'Cape Verde', u'CV', u'CPV', 132),
(u'Cayman Islands', u'KY', u'CYM', 136),
(u'Central African Republic', u'CF', u'CAF', 140),
(u'Chad', u'TD', u'TCD', 148),
(u'Chile', u'CL', u'CHL', 152),
(u'China', u'CN', u'CHN', 156),
(u'Christmas Island', u'CX', u'CXR', 162),
(u'Cocos (Keeling) Islands', u'CC', u'CCK', 166),
(u'Colombia', u'CO', u'COL', 170),
(u'Comoros', u'KM', u'COM', 174),
(u'Congo', u'CG', u'COG', 178),
(u'Congo, Democratic Republic of the', u'CD', u'COD', 180),
(u'Cook Islands', u'CK', u'COK', 184),
(u'Costa Rica', u'CR', u'CRI', 188),
(u"Cote d'Ivoire C\xf4te d'Ivoire", u'CI', u'CIV', 384),
(u'Croatia', u'HR', u'HRV', 191),
(u'Cuba', u'CU', u'CUB', 192),
(u'Cyprus', u'CY', u'CYP', 196),
(u'Czech Republic', u'CZ', u'CZE', 203),
(u'Denmark', u'DK', u'DNK', 208),
(u'Djibouti', u'DJ', u'DJI', 262),
(u'Dominica', u'DM', u'DMA', 212),
(u'Dominican Republic', u'DO', u'DOM', 214),
(u'Ecuador', u'EC', u'ECU', 218),
(u'Egypt', u'EG', u'EGY', 818),
(u'El Salvador', u'SV', u'SLV', 222),
(u'Equatorial Guinea', u'GQ', u'GNQ', 226),
(u'Eritrea', u'ER', u'ERI', 232),
(u'Estonia', u'EE', u'EST', 233),
(u'Ethiopia', u'ET', u'ETH', 231),
(u'Falkland Islands (Malvinas)', u'FK', u'FLK', 238),
(u'Faroe Islands', u'FO', u'FRO', 234),
(u'Fiji', u'FJ', u'FJI', 242),
(u'Finland', u'FI', u'FIN', 246),
(u'France', u'FR', u'FRA', 250),
(u'French Guiana', u'GF', u'GUF', 254),
(u'French Polynesia', u'PF', u'PYF', 258),
(u'French Southern Territories', u'TF', u'ATF', 260),
(u'Gabon', u'GA', u'GAB', 266),
(u'Gambia', u'GM', u'GMB', 270),
(u'Georgia', u'GE', u'GEO', 268),
(u'Germany', u'DE', u'DEU', 276),
(u'Ghana', u'GH', u'GHA', 288),
(u'Gibraltar', u'GI', u'GIB', 292),
(u'Greece', u'GR', u'GRC', 300),
(u'Greenland', u'GL', u'GRL', 304),
(u'Grenada', u'GD', u'GRD', 308),
(u'Guadeloupe', u'GP', u'GLP', 312),
(u'Guam', u'GU', u'GUM', 316),
(u'Guatemala', u'GT', u'GTM', 320),
(u'Guernsey', u'GG', u'GGY', 831),
(u'Guinea', u'GN', u'GIN', 324),
(u'Guinea-Bissau', u'GW', u'GNB', 624),
(u'Guyana', u'GY', u'GUY', 328),
(u'Haiti', u'HT', u'HTI', 332),
(u'Heard Island and McDonald Islands', u'HM', u'HMD', 334),
(u'Holy See (Vatican City State)', u'VA', u'VAT', 336),
(u'Honduras', u'HN', u'HND', 340),
(u'Hong Kong', u'HK', u'HKG', 344),
(u'Hungary', u'HU', u'HUN', 348),
(u'Iceland', u'IS', u'ISL', 352),
(u'India', u'IN', u'IND', 356),
(u'Indonesia', u'ID', u'IDN', 360),
(u'Iran, Islamic Republic of', u'IR', u'IRN', 364),
(u'Iraq', u'IQ', u'IRQ', 368),
(u'Ireland', u'IE', u'IRL', 372),
(u'Isle of Man', u'IM', u'IMN', 833),
(u'Israel', u'IL', u'ISR', 376),
(u'Italy', u'IT', u'ITA', 380),
(u'Jamaica', u'JM', u'JAM', 388),
(u'Japan', u'JP', u'JPN', 392),
(u'Jersey', u'JE', u'JEY', 832),
(u'Jordan', u'JO', u'JOR', 400),
(u'Kazakhstan', u'KZ', u'KAZ', 398),
(u'Kenya', u'KE', u'KEN', 404),
(u'Kiribati', u'KI', u'KIR', 296),
(u"Korea, Democratic People's Republic of", u'KP', u'PRK', 408),
(u'Korea, Republic of', u'KR', u'KOR', 410),
(u'Kuwait', u'KW', u'KWT', 414),
(u'Kyrgyzstan', u'KG', u'KGZ', 417),
(u"Lao People's Democratic Republic", u'LA', u'LAO', 418),
(u'Latvia', u'LV', u'LVA', 428),
(u'Lebanon', u'LB', u'LBN', 422),
(u'Lesotho', u'LS', u'LSO', 426),
(u'Liberia', u'LR', u'LBR', 430),
(u'Libyan Arab Jamahiriya', u'LY', u'LBY', 434),
(u'Liechtenstein', u'LI', u'LIE', 438),
(u'Lithuania', u'LT', u'LTU', 440),
(u'Luxembourg', u'LU', u'LUX', 442),
(u'Macao', u'MO', u'MAC', 446),
(u'Macedonia, the former Yugoslav Republic of', u'MK', u'MKD', 807),
(u'Madagascar', u'MG', u'MDG', 450),
(u'Malawi', u'MW', u'MWI', 454),
(u'Malaysia', u'MY', u'MYS', 458),
(u'Maldives', u'MV', u'MDV', 462),
(u'Mali', u'ML', u'MLI', 466),
(u'Malta', u'MT', u'MLT', 470),
(u'Marshall Islands', u'MH', u'MHL', 584),
(u'Martinique', u'MQ', u'MTQ', 474),
(u'Mauritania', u'MR', u'MRT', 478),
(u'Mauritius', u'MU', u'MUS', 480),
(u'Mayotte', u'YT', u'MYT', 175),
(u'Mexico', u'MX', u'MEX', 484),
(u'Micronesia, Federated States of', u'FM', u'FSM', 583),
(u'Moldova, Republic of', u'MD', u'MDA', 498),
(u'Monaco', u'MC', u'MCO', 492),
(u'Mongolia', u'MN', u'MNG', 496),
(u'Montenegro', u'ME', u'MNE', 499),
(u'Montserrat', u'MS', u'MSR', 500),
(u'Morocco', u'MA', u'MAR', 504),
(u'Mozambique', u'MZ', u'MOZ', 508),
(u'Myanmar', u'MM', u'MMR', 104),
(u'Namibia', u'NA', u'NAM', 516),
(u'Nauru', u'NR', u'NRU', 520),
(u'Nepal', u'NP', u'NPL', 524),
(u'Netherlands', u'NL', u'NLD', 528),
(u'Netherlands Antilles', u'AN', u'ANT', 530),
(u'New Caledonia', u'NC', u'NCL', 540),
(u'New Zealand', u'NZ', u'NZL', 554),
(u'Nicaragua', u'NI', u'NIC', 558),
(u'Niger', u'NE', u'NER', 562),
(u'Nigeria', u'NG', u'NGA', 566),
(u'Niue', u'NU', u'NIU', 570),
(u'Norfolk Island', u'NF', u'NFK', 574),
(u'Northern Mariana Islands', u'MP', u'MNP', 580),
(u'Norway', u'NO', u'NOR', 578),
(u'Oman', u'OM', u'OMN', 512),
(u'Pakistan', u'PK', u'PAK', 586),
(u'Palau', u'PW', u'PLW', 585),
(u'Palestinian Territory, Occupied', u'PS', u'PSE', 275),
(u'Panama', u'PA', u'PAN', 591),
(u'Papua New Guinea', u'PG', u'PNG', 598),
(u'Paraguay', u'PY', u'PRY', 600),
(u'Peru', u'PE', u'PER', 604),
(u'Philippines', u'PH', u'PHL', 608),
(u'Pitcairn', u'PN', u'PCN', 612),
(u'Poland', u'PL', u'POL', 616),
(u'Portugal', u'PT', u'PRT', 620),
(u'Puerto Rico', u'PR', u'PRI', 630),
(u'Qatar', u'QA', u'QAT', 634),
(u'Reunion R\xe9union', u'RE', u'REU', 638),
(u'Romania', u'RO', u'ROU', 642),
(u'Russian Federation', u'RU', u'RUS', 643),
(u'Rwanda', u'RW', u'RWA', 646),
(u'Saint Barth\xe9lemy', u'BL', u'BLM', 652),
(u'Saint Helena', u'SH', u'SHN', 654),
(u'Saint Kitts and Nevis', u'KN', u'KNA', 659),
(u'Saint Lucia', u'LC', u'LCA', 662),
(u'Saint Martin (French part)', u'MF', u'MAF', 663),
(u'Saint Pierre and Miquelon', u'PM', u'SPM', 666),
(u'Saint Vincent and the Grenadines', u'VC', u'VCT', 670),
(u'Samoa', u'WS', u'WSM', 882),
(u'San Marino', u'SM', u'SMR', 674),
(u'Sao Tome and Principe', u'ST', u'STP', 678),
(u'Saudi Arabia', u'SA', u'SAU', 682),
(u'Senegal', u'SN', u'SEN', 686),
(u'Serbia', u'RS', u'SRB', 688),
(u'Seychelles', u'SC', u'SYC', 690),
(u'Sierra Leone', u'SL', u'SLE', 694),
(u'Singapore', u'SG', u'SGP', 702),
(u'Slovakia', u'SK', u'SVK', 703),
(u'Slovenia', u'SI', u'SVN', 705),
(u'Solomon Islands', u'SB', u'SLB', 90),
(u'Somalia', u'SO', u'SOM', 706),
(u'South Africa', u'ZA', u'ZAF', 710),
(u'South Georgia and the South Sandwich Islands', u'GS', u'SGS', 239),
(u'Spain', u'ES', u'ESP', 724),
(u'Sri Lanka', u'LK', u'LKA', 144),
(u'Sudan', u'SD', u'SDN', 736),
(u'Suriname', u'SR', u'SUR', 740),
(u'Svalbard and Jan Mayen', u'SJ', u'SJM', 744),
(u'Swaziland', u'SZ', u'SWZ', 748),
(u'Sweden', u'SE', u'SWE', 752),
(u'Switzerland', u'CH', u'CHE', 756),
(u'Syrian Arab Republic', u'SY', u'SYR', 760),
(u'Taiwan, Province of China', u'TW', u'TWN', 158),
(u'Tajikistan', u'TJ', u'TJK', 762),
(u'Tanzania, United Republic of', u'TZ', u'TZA', 834),
(u'Thailand', u'TH', u'THA', 764),
(u'Timor-Leste', u'TL', u'TLS', 626),
(u'Togo', u'TG', u'TGO', 768),
(u'Tokelau', u'TK', u'TKL', 772),
(u'Tonga', u'TO', u'TON', 776),
(u'Trinidad and Tobago', u'TT', u'TTO', 780),
(u'Tunisia', u'TN', u'TUN', 788),
(u'Turkey', u'TR', u'TUR', 792),
(u'Turkmenistan', u'TM', u'TKM', 795),
(u'Turks and Caicos Islands', u'TC', u'TCA', 796),
(u'Tuvalu', u'TV', u'TUV', 798),
(u'Uganda', u'UG', u'UGA', 800),
(u'Ukraine', u'UA', u'UKR', 804),
(u'United Arab Emirates', u'AE', u'ARE', 784),
(u'United Kingdom', u'GB', u'GBR', 826),
(u'United States', u'US', u'USA', 840),
(u'United States Minor Outlying Islands', u'UM', u'UMI', 581),
(u'Uruguay', u'UY', u'URY', 858),
(u'Uzbekistan', u'UZ', u'UZB', 860),
(u'Vanuatu', u'VU', u'VUT', 548),
(u'Venezuela', u'VE', u'VEN', 862),
(u'Viet Nam', u'VN', u'VNM', 704),
(u'Virgin Islands, British', u'VG', u'VGB', 92),
(u'Virgin Islands, U.S.', u'VI', u'VIR', 850),
(u'Wallis and Futuna', u'WF', u'WLF', 876),
(u'Western Sahara', u'EH', u'ESH', 732),
(u'Yemen', u'YE', u'YEM', 887),
(u'Zambia', u'ZM', u'ZMB', 894),
(u'Zimbabwe', u'ZW', u'ZWE', 716)]
COUNTRY_NAMES = [u'Afghanistan',
u'\xc5land Islands',
u'Albania',
u'Algeria',
u'American Samoa',
u'Andorra',
u'Angola',
u'Anguilla',
u'Antarctica',
u'Antigua and Barbuda',
u'Argentina',
u'Armenia',
u'Aruba',
u'Australia',
u'Austria',
u'Azerbaijan',
u'Bahamas',
u'Bahrain',
u'Bangladesh',
u'Barbados',
u'Belarus',
u'Belgium',
u'Belize',
u'Benin',
u'Bermuda',
u'Bhutan',
u'Bolivia',
u'Bosnia and Herzegovina',
u'Botswana',
u'Bouvet Island',
u'Brazil',
u'British Indian Ocean Territory',
u'Brunei Darussalam',
u'Bulgaria',
u'Burkina Faso',
u'Burundi',
u'Cambodia',
u'Cameroon',
u'Canada',
u'Cape Verde',
u'Cayman Islands',
u'Central African Republic',
u'Chad',
u'Chile',
u'China',
u'Christmas Island',
u'Cocos (Keeling) Islands',
u'Colombia',
u'Comoros',
u'Congo',
u'Congo, Democratic Republic of the',
u'Cook Islands',
u'Costa Rica',
u"Cote d'Ivoire C\xf4te d'Ivoire",
u'Croatia',
u'Cuba',
u'Cyprus',
u'Czech Republic',
u'Denmark',
u'Djibouti',
u'Dominica',
u'Dominican Republic',
u'Ecuador',
u'Egypt',
u'El Salvador',
u'Equatorial Guinea',
u'Eritrea',
u'Estonia',
u'Ethiopia',
u'Falkland Islands (Malvinas)',
u'Faroe Islands',
u'Fiji',
u'Finland',
u'France',
u'French Guiana',
u'French Polynesia',
u'French Southern Territories',
u'Gabon',
u'Gambia',
u'Georgia',
u'Germany',
u'Ghana',
u'Gibraltar',
u'Greece',
u'Greenland',
u'Grenada',
u'Guadeloupe',
u'Guam',
u'Guatemala',
u'Guernsey',
u'Guinea',
u'Guinea-Bissau',
u'Guyana',
u'Haiti',
u'Heard Island and McDonald Islands',
u'Holy See (Vatican City State)',
u'Honduras',
u'Hong Kong',
u'Hungary',
u'Iceland',
u'India',
u'Indonesia',
u'Iran, Islamic Republic of',
u'Iraq',
u'Ireland',
u'Isle of Man',
u'Israel',
u'Italy',
u'Jamaica',
u'Japan',
u'Jersey',
u'Jordan',
u'Kazakhstan',
u'Kenya',
u'Kiribati',
u"Korea, Democratic People's Republic of",
u'Korea, Republic of',
u'Kuwait',
u'Kyrgyzstan',
u"Lao People's Democratic Republic",
u'Latvia',
u'Lebanon',
u'Lesotho',
u'Liberia',
u'Libyan Arab Jamahiriya',
u'Liechtenstein',
u'Lithuania',
u'Luxembourg',
u'Macao',
u'Macedonia, the former Yugoslav Republic of',
u'Madagascar',
u'Malawi',
u'Malaysia',
u'Maldives',
u'Mali',
u'Malta',
u'Marshall Islands',
u'Martinique',
u'Mauritania',
u'Mauritius',
u'Mayotte',
u'Mexico',
u'Micronesia, Federated States of',
u'Moldova, Republic of',
u'Monaco',
u'Mongolia',
u'Montenegro',
u'Montserrat',
u'Morocco',
u'Mozambique',
u'Myanmar',
u'Namibia',
u'Nauru',
u'Nepal',
u'Netherlands',
u'Netherlands Antilles',
u'New Caledonia',
u'New Zealand',
u'Nicaragua',
u'Niger',
u'Nigeria',
u'Niue',
u'Norfolk Island',
u'Northern Mariana Islands',
u'Norway',
u'Oman',
u'Pakistan',
u'Palau',
u'Palestinian Territory, Occupied',
u'Panama',
u'Papua New Guinea',
u'Paraguay',
u'Peru',
u'Philippines',
u'Pitcairn',
u'Poland',
u'Portugal',
u'Puerto Rico',
u'Qatar',
u'Reunion R\xe9union',
u'Romania',
u'Russian Federation',
u'Rwanda',
u'Saint Barth\xe9lemy',
u'Saint Helena',
u'Saint Kitts and Nevis',
u'Saint Lucia',
u'Saint Martin (French part)',
u'Saint Pierre and Miquelon',
u'Saint Vincent and the Grenadines',
u'Samoa',
u'San Marino',
u'Sao Tome and Principe',
u'Saudi Arabia',
u'Senegal',
u'Serbia',
u'Seychelles',
u'Sierra Leone',
u'Singapore',
u'Slovakia',
u'Slovenia',
u'Solomon Islands',
u'Somalia',
u'South Africa',
u'South Georgia and the South Sandwich Islands',
u'Spain',
u'Sri Lanka',
u'Sudan',
u'Suriname',
u'Svalbard and Jan Mayen',
u'Swaziland',
u'Sweden',
u'Switzerland',
u'Syrian Arab Republic',
u'Taiwan, Province of China',
u'Tajikistan',
u'Tanzania, United Republic of',
u'Thailand',
u'Timor-Leste',
u'Togo',
u'Tokelau',
u'Tonga',
u'Trinidad and Tobago',
u'Tunisia',
u'Turkey',
u'Turkmenistan',
u'Turks and Caicos Islands',
u'Tuvalu',
u'Uganda',
u'Ukraine',
u'United Arab Emirates',
u'United Kingdom',
u'United States',
u'United States Minor Outlying Islands',
u'Uruguay',
u'Uzbekistan',
u'Vanuatu',
u'Venezuela',
u'Viet Nam',
u'Virgin Islands, British',
u'Virgin Islands, U.S.',
u'Wallis and Futuna',
u'Western Sahara',
u'Yemen',
u'Zambia',
u'Zimbabwe']
ISO_ALPHA_2_CODES = [u'AF',
u'AX',
u'AL',
u'DZ',
u'AS',
u'AD',
u'AO',
u'AI',
u'AQ',
u'AG',
u'AR',
u'AM',
u'AW',
u'AU',
u'AT',
u'AZ',
u'BS',
u'BH',
u'BD',
u'BB',
u'BY',
u'BE',
u'BZ',
u'BJ',
u'BM',
u'BT',
u'BO',
u'BA',
u'BW',
u'BV',
u'BR',
u'IO',
u'BN',
u'BG',
u'BF',
u'BI',
u'KH',
u'CM',
u'CA',
u'CV',
u'KY',
u'CF',
u'TD',
u'CL',
u'CN',
u'CX',
u'CC',
u'CO',
u'KM',
u'CG',
u'CD',
u'CK',
u'CR',
u'CI',
u'HR',
u'CU',
u'CY',
u'CZ',
u'DK',
u'DJ',
u'DM',
u'DO',
u'EC',
u'EG',
u'SV',
u'GQ',
u'ER',
u'EE',
u'ET',
u'FK',
u'FO',
u'FJ',
u'FI',
u'FR',
u'GF',
u'PF',
u'TF',
u'GA',
u'GM',
u'GE',
u'DE',
u'GH',
u'GI',
u'GR',
u'GL',
u'GD',
u'GP',
u'GU',
u'GT',
u'GG',
u'GN',
u'GW',
u'GY',
u'HT',
u'HM',
u'VA',
u'HN',
u'HK',
u'HU',
u'IS',
u'IN',
u'ID',
u'IR',
u'IQ',
u'IE',
u'IM',
u'IL',
u'IT',
u'JM',
u'JP',
u'JE',
u'JO',
u'KZ',
u'KE',
u'KI',
u'KP',
u'KR',
u'KW',
u'KG',
u'LA',
u'LV',
u'LB',
u'LS',
u'LR',
u'LY',
u'LI',
u'LT',
u'LU',
u'MO',
u'MK',
u'MG',
u'MW',
u'MY',
u'MV',
u'ML',
u'MT',
u'MH',
u'MQ',
u'MR',
u'MU',
u'YT',
u'MX',
u'FM',
u'MD',
u'MC',
u'MN',
u'ME',
u'MS',
u'MA',
u'MZ',
u'MM',
u'NA',
u'NR',
u'NP',
u'NL',
u'AN',
u'NC',
u'NZ',
u'NI',
u'NE',
u'NG',
u'NU',
u'NF',
u'MP',
u'NO',
u'OM',
u'PK',
u'PW',
u'PS',
u'PA',
u'PG',
u'PY',
u'PE',
u'PH',
u'PN',
u'PL',
u'PT',
u'PR',
u'QA',
u'RE',
u'RO',
u'RU',
u'RW',
u'BL',
u'SH',
u'KN',
u'LC',
u'MF',
u'PM',
u'VC',
u'WS',
u'SM',
u'ST',
u'SA',
u'SN',
u'RS',
u'SC',
u'SL',
u'SG',
u'SK',
u'SI',
u'SB',
u'SO',
u'ZA',
u'GS',
u'ES',
u'LK',
u'SD',
u'SR',
u'SJ',
u'SZ',
u'SE',
u'CH',
u'SY',
u'TW',
u'TJ',
u'TZ',
u'TH',
u'TL',
u'TG',
u'TK',
u'TO',
u'TT',
u'TN',
u'TR',
u'TM',
u'TC',
u'TV',
u'UG',
u'UA',
u'AE',
u'GB',
u'US',
u'UM',
u'UY',
u'UZ',
u'VU',
u'VE',
u'VN',
u'VG',
u'VI',
u'WF',
u'EH',
u'YE',
u'ZM',
u'ZW']
ISO_ALPHA_3_CODES = [u'AFG',
u'ALA',
u'ALB',
u'DZA',
u'ASM',
u'AND',
u'AGO',
u'AIA',
u'ATA',
u'ATG',
u'ARG',
u'ARM',
u'ABW',
u'AUS',
u'AUT',
u'AZE',
u'BHS',
u'BHR',
u'BGD',
u'BRB',
u'BLR',
u'BEL',
u'BLZ',
u'BEN',
u'BMU',
u'BTN',
u'BOL',
u'BIH',
u'BWA',
u'BVT',
u'BRA',
u'IOT',
u'BRN',
u'BGR',
u'BFA',
u'BDI',
u'KHM',
u'CMR',
u'CAN',
u'CPV',
u'CYM',
u'CAF',
u'TCD',
u'CHL',
u'CHN',
u'CXR',
u'CCK',
u'COL',
u'COM',
u'COG',
u'COD',
u'COK',
u'CRI',
u'CIV',
u'HRV',
u'CUB',
u'CYP',
u'CZE',
u'DNK',
u'DJI',
u'DMA',
u'DOM',
u'ECU',
u'EGY',
u'SLV',
u'GNQ',
u'ERI',
u'EST',
u'ETH',
u'FLK',
u'FRO',
u'FJI',
u'FIN',
u'FRA',
u'GUF',
u'PYF',
u'ATF',
u'GAB',
u'GMB',
u'GEO',
u'DEU',
u'GHA',
u'GIB',
u'GRC',
u'GRL',
u'GRD',
u'GLP',
u'GUM',
u'GTM',
u'GGY',
u'GIN',
u'GNB',
u'GUY',
u'HTI',
u'HMD',
u'VAT',
u'HND',
u'HKG',
u'HUN',
u'ISL',
u'IND',
u'IDN',
u'IRN',
u'IRQ',
u'IRL',
u'IMN',
u'ISR',
u'ITA',
u'JAM',
u'JPN',
u'JEY',
u'JOR',
u'KAZ',
u'KEN',
u'KIR',
u'PRK',
u'KOR',
u'KWT',
u'KGZ',
u'LAO',
u'LVA',
u'LBN',
u'LSO',
u'LBR',
u'LBY',
u'LIE',
u'LTU',
u'LUX',
u'MAC',
u'MKD',
u'MDG',
u'MWI',
u'MYS',
u'MDV',
u'MLI',
u'MLT',
u'MHL',
u'MTQ',
u'MRT',
u'MUS',
u'MYT',
u'MEX',
u'FSM',
u'MDA',
u'MCO',
u'MNG',
u'MNE',
u'MSR',
u'MAR',
u'MOZ',
u'MMR',
u'NAM',
u'NRU',
u'NPL',
u'NLD',
u'ANT',
u'NCL',
u'NZL',
u'NIC',
u'NER',
u'NGA',
u'NIU',
u'NFK',
u'MNP',
u'NOR',
u'OMN',
u'PAK',
u'PLW',
u'PSE',
u'PAN',
u'PNG',
u'PRY',
u'PER',
u'PHL',
u'PCN',
u'POL',
u'PRT',
u'PRI',
u'QAT',
u'REU',
u'ROU',
u'RUS',
u'RWA',
u'BLM',
u'SHN',
u'KNA',
u'LCA',
u'MAF',
u'SPM',
u'VCT',
u'WSM',
u'SMR',
u'STP',
u'SAU',
u'SEN',
u'SRB',
u'SYC',
u'SLE',
u'SGP',
u'SVK',
u'SVN',
u'SLB',
u'SOM',
u'ZAF',
u'SGS',
u'ESP',
u'LKA',
u'SDN',
u'SUR',
u'SJM',
u'SWZ',
u'SWE',
u'CHE',
u'SYR',
u'TWN',
u'TJK',
u'TZA',
u'THA',
u'TLS',
u'TGO',
u'TKL',
u'TON',
u'TTO',
u'TUN',
u'TUR',
u'TKM',
u'TCA',
u'TUV',
u'UGA',
u'UKR',
u'ARE',
u'GBR',
u'USA',
u'UMI',
u'URY',
u'UZB',
u'VUT',
u'VEN',
u'VNM',
u'VGB',
u'VIR',
u'WLF',
u'ESH',
u'YEM',
u'ZMB',
u'ZWE']
ISO_3166_2_CODES = [u'ISO 3166-2:AF',
u'ISO 3166-2:AX',
u'ISO 3166-2:AL',
u'ISO 3166-2:DZ',
u'ISO 3166-2:AS',
u'ISO 3166-2:AD',
u'ISO 3166-2:AO',
u'ISO 3166-2:AI',
u'ISO 3166-2:AQ',
u'ISO 3166-2:AG',
u'ISO 3166-2:AR',
u'ISO 3166-2:AM',
u'ISO 3166-2:AW',
u'ISO 3166-2:AU',
u'ISO 3166-2:AT',
u'ISO 3166-2:AZ',
u'ISO 3166-2:BS',
u'ISO 3166-2:BH',
u'ISO 3166-2:BD',
u'ISO 3166-2:BB',
u'ISO 3166-2:BY',
u'ISO 3166-2:BE',
u'ISO 3166-2:BZ',
u'ISO 3166-2:BJ',
u'ISO 3166-2:BM',
u'ISO 3166-2:BT',
u'ISO 3166-2:BO',
u'ISO 3166-2:BA',
u'ISO 3166-2:BW',
u'ISO 3166-2:BV',
u'ISO 3166-2:BR',
u'ISO 3166-2:IO',
u'ISO 3166-2:BN',
u'ISO 3166-2:BG',
u'ISO 3166-2:BF',
u'ISO 3166-2:BI',
u'ISO 3166-2:KH',
u'ISO 3166-2:CM',
u'ISO 3166-2:CA',
u'ISO 3166-2:CV',
u'ISO 3166-2:KY',
u'ISO 3166-2:CF',
u'ISO 3166-2:TD',
u'ISO 3166-2:CL',
u'ISO 3166-2:CN',
u'ISO 3166-2:CX',
u'ISO 3166-2:CC',
u'ISO 3166-2:CO',
u'ISO 3166-2:KM',
u'ISO 3166-2:CG',
u'ISO 3166-2:CD',
u'ISO 3166-2:CK',
u'ISO 3166-2:CR',
u'ISO 3166-2:CI',
u'ISO 3166-2:HR',
u'ISO 3166-2:CU',
u'ISO 3166-2:CY',
u'ISO 3166-2:CZ',
u'ISO 3166-2:DK',
u'ISO 3166-2:DJ',
u'ISO 3166-2:DM',
u'ISO 3166-2:DO',
u'ISO 3166-2:EC',
u'ISO 3166-2:EG',
u'ISO 3166-2:SV',
u'ISO 3166-2:GQ',
u'ISO 3166-2:ER',
u'ISO 3166-2:EE',
u'ISO 3166-2:ET',
u'ISO 3166-2:FK',
u'ISO 3166-2:FO',
u'ISO 3166-2:FJ',
u'ISO 3166-2:FI',
u'ISO 3166-2:FR',
u'ISO 3166-2:GF',
u'ISO 3166-2:PF',
u'ISO 3166-2:TF',
u'ISO 3166-2:GA',
u'ISO 3166-2:GM',
u'ISO 3166-2:GE',
u'ISO 3166-2:DE',
u'ISO 3166-2:GH',
u'ISO 3166-2:GI',
u'ISO 3166-2:GR',
u'ISO 3166-2:GL',
u'ISO 3166-2:GD',
u'ISO 3166-2:GP',
u'ISO 3166-2:GU',
u'ISO 3166-2:GT',
u'ISO 3166-2:GG',
u'ISO 3166-2:GN',
u'ISO 3166-2:GW',
u'ISO 3166-2:GY',
u'ISO 3166-2:HT',
u'ISO 3166-2:HM',
u'ISO 3166-2:VA',
u'ISO 3166-2:HN',
u'ISO 3166-2:HK',
u'ISO 3166-2:HU',
u'ISO 3166-2:IS',
u'ISO 3166-2:IN',
u'ISO 3166-2:ID',
u'ISO 3166-2:IR',
u'ISO 3166-2:IQ',
u'ISO 3166-2:IE',
u'ISO 3166-2:IM',
u'ISO 3166-2:IL',
u'ISO 3166-2:IT',
u'ISO 3166-2:JM',
u'ISO 3166-2:JP',
u'ISO 3166-2:JE',
u'ISO 3166-2:JO',
u'ISO 3166-2:KZ',
u'ISO 3166-2:KE',
u'ISO 3166-2:KI',
u'ISO 3166-2:KP',
u'ISO 3166-2:KR',
u'ISO 3166-2:KW',
u'ISO 3166-2:KG',
u'ISO 3166-2:LA',
u'ISO 3166-2:LV',
u'ISO 3166-2:LB',
u'ISO 3166-2:LS',
u'ISO 3166-2:LR',
u'ISO 3166-2:LY',
u'ISO 3166-2:LI',
u'ISO 3166-2:LT',
u'ISO 3166-2:LU',
u'ISO 3166-2:MO',
u'ISO 3166-2:MK',
u'ISO 3166-2:MG',
u'ISO 3166-2:MW',
u'ISO 3166-2:MY',
u'ISO 3166-2:MV',
u'ISO 3166-2:ML',
u'ISO 3166-2:MT',
u'ISO 3166-2:MH',
u'ISO 3166-2:MQ',
u'ISO 3166-2:MR',
u'ISO 3166-2:MU',
u'ISO 3166-2:YT',
u'ISO 3166-2:MX',
u'ISO 3166-2:FM',
u'ISO 3166-2:MD',
u'ISO 3166-2:MC',
u'ISO 3166-2:MN',
u'ISO 3166-2:ME',
u'ISO 3166-2:MS',
u'ISO 3166-2:MA',
u'ISO 3166-2:MZ',
u'ISO 3166-2:MM',
u'ISO 3166-2:NA',
u'ISO 3166-2:NR',
u'ISO 3166-2:NP',
u'ISO 3166-2:NL',
u'ISO 3166-2:AN',
u'ISO 3166-2:NC',
u'ISO 3166-2:NZ',
u'ISO 3166-2:NI',
u'ISO 3166-2:NE',
u'ISO 3166-2:NG',
u'ISO 3166-2:NU',
u'ISO 3166-2:NF',
u'ISO 3166-2:MP',
u'ISO 3166-2:NO',
u'ISO 3166-2:OM',
u'ISO 3166-2:PK',
u'ISO 3166-2:PW',
u'ISO 3166-2:PS',
u'ISO 3166-2:PA',
u'ISO 3166-2:PG',
u'ISO 3166-2:PY',
u'ISO 3166-2:PE',
u'ISO 3166-2:PH',
u'ISO 3166-2:PN',
u'ISO 3166-2:PL',
u'ISO 3166-2:PT',
u'ISO 3166-2:PR',
u'ISO 3166-2:QA',
u'ISO 3166-2:RE',
u'ISO 3166-2:RO',
u'ISO 3166-2:RU',
u'ISO 3166-2:RW',
u'ISO 3166-2:BL',
u'ISO 3166-2:SH',
u'ISO 3166-2:KN',
u'ISO 3166-2:LC',
u'ISO 3166-2:MF',
u'ISO 3166-2:PM',
u'ISO 3166-2:VC',
u'ISO 3166-2:WS',
u'ISO 3166-2:SM',
u'ISO 3166-2:ST',
u'ISO 3166-2:SA',
u'ISO 3166-2:SN',
u'ISO 3166-2:RS',
u'ISO 3166-2:SC',
u'ISO 3166-2:SL',
u'ISO 3166-2:SG',
u'ISO 3166-2:SK',
u'ISO 3166-2:SI',
u'ISO 3166-2:SB',
u'ISO 3166-2:SO',
u'ISO 3166-2:ZA',
u'ISO 3166-2:GS',
u'ISO 3166-2:ES',
u'ISO 3166-2:LK',
u'ISO 3166-2:SD',
u'ISO 3166-2:SR',
u'ISO 3166-2:SJ',
u'ISO 3166-2:SZ',
u'ISO 3166-2:SE',
u'ISO 3166-2:CH',
u'ISO 3166-2:SY',
u'ISO 3166-2:TW',
u'ISO 3166-2:TJ',
u'ISO 3166-2:TZ',
u'ISO 3166-2:TH',
u'ISO 3166-2:TL',
u'ISO 3166-2:TG',
u'ISO 3166-2:TK',
u'ISO 3166-2:TO',
u'ISO 3166-2:TT',
u'ISO 3166-2:TN',
u'ISO 3166-2:TR',
u'ISO 3166-2:TM',
u'ISO 3166-2:TC',
u'ISO 3166-2:TV',
u'ISO 3166-2:UG',
u'ISO 3166-2:UA',
u'ISO 3166-2:AE',
u'ISO 3166-2:GB',
u'ISO 3166-2:US',
u'ISO 3166-2:UM',
u'ISO 3166-2:UY',
u'ISO 3166-2:UZ',
u'ISO 3166-2:VU',
u'ISO 3166-2:VE',
u'ISO 3166-2:VN',
u'ISO 3166-2:VG',
u'ISO 3166-2:VI',
u'ISO 3166-2:WF',
u'ISO 3166-2:EH',
u'ISO 3166-2:YE',
u'ISO 3166-2:ZM',
u'ISO 3166-2:ZW']
COUNTRY_NAME_ISO_ALPHA_2_TABLE = {u'AD': u'Andorra',
u'AE': u'United Arab Emirates',
u'AF': u'Afghanistan',
u'AG': u'Antigua and Barbuda',
u'AI': u'Anguilla',
u'AL': u'Albania',
u'AM': u'Armenia',
u'AN': u'Netherlands Antilles',
u'AO': u'Angola',
u'AQ': u'Antarctica',
u'AR': u'Argentina',
u'AS': u'American Samoa',
u'AT': u'Austria',
u'AU': u'Australia',
u'AW': u'Aruba',
u'AX': u'\xc5land Islands',
u'AZ': u'Azerbaijan',
u'Afghanistan': u'AF',
u'Albania': u'AL',
u'Algeria': u'DZ',
u'American Samoa': u'AS',
u'Andorra': u'AD',
u'Angola': u'AO',
u'Anguilla': u'AI',
u'Antarctica': u'AQ',
u'Antigua and Barbuda': u'AG',
u'Argentina': u'AR',
u'Armenia': u'AM',
u'Aruba': u'AW',
u'Australia': u'AU',
u'Austria': u'AT',
u'Azerbaijan': u'AZ',
u'BA': u'Bosnia and Herzegovina',
u'BB': u'Barbados',
u'BD': u'Bangladesh',
u'BE': u'Belgium',
u'BF': u'Burkina Faso',
u'BG': u'Bulgaria',
u'BH': u'Bahrain',
u'BI': u'Burundi',
u'BJ': u'Benin',
u'BL': u'Saint Barth\xe9lemy',
u'BM': u'Bermuda',
u'BN': u'Brunei Darussalam',
u'BO': u'Bolivia',
u'BR': u'Brazil',
u'BS': u'Bahamas',
u'BT': u'Bhutan',
u'BV': u'Bouvet Island',
u'BW': u'Botswana',
u'BY': u'Belarus',
u'BZ': u'Belize',
u'Bahamas': u'BS',
u'Bahrain': u'BH',
u'Bangladesh': u'BD',
u'Barbados': u'BB',
u'Belarus': u'BY',
u'Belgium': u'BE',
u'Belize': u'BZ',
u'Benin': u'BJ',
u'Bermuda': u'BM',
u'Bhutan': u'BT',
u'Bolivia': u'BO',
u'Bosnia and Herzegovina': u'BA',
u'Botswana': u'BW',
u'Bouvet Island': u'BV',
u'Brazil': u'BR',
u'British Indian Ocean Territory': u'IO',
u'Brunei Darussalam': u'BN',
u'Bulgaria': u'BG',
u'Burkina Faso': u'BF',
u'Burundi': u'BI',
u'CA': u'Canada',
u'CC': u'Cocos (Keeling) Islands',
u'CD': u'Congo, Democratic Republic of the',
u'CF': u'Central African Republic',
u'CG': u'Congo',
u'CH': u'Switzerland',
u'CI': u"Cote d'Ivoire C\xf4te d'Ivoire",
u'CK': u'Cook Islands',
u'CL': u'Chile',
u'CM': u'Cameroon',
u'CN': u'China',
u'CO': u'Colombia',
u'CR': u'Costa Rica',
u'CU': u'Cuba',
u'CV': u'Cape Verde',
u'CX': u'Christmas Island',
u'CY': u'Cyprus',
u'CZ': u'Czech Republic',
u'Cambodia': u'KH',
u'Cameroon': u'CM',
u'Canada': u'CA',
u'Cape Verde': u'CV',
u'Cayman Islands': u'KY',
u'Central African Republic': u'CF',
u'Chad': u'TD',
u'Chile': u'CL',
u'China': u'CN',
u'Christmas Island': u'CX',
u'Cocos (Keeling) Islands': u'CC',
u'Colombia': u'CO',
u'Comoros': u'KM',
u'Congo': u'CG',
u'Congo, Democratic Republic of the': u'CD',
u'Cook Islands': u'CK',
u'Costa Rica': u'CR',
u"Cote d'Ivoire C\xf4te d'Ivoire": u'CI',
u'Croatia': u'HR',
u'Cuba': u'CU',
u'Cyprus': u'CY',
u'Czech Republic': u'CZ',
u'DE': u'Germany',
u'DJ': u'Djibouti',
u'DK': u'Denmark',
u'DM': u'Dominica',
u'DO': u'Dominican Republic',
u'DZ': u'Algeria',
u'Denmark': u'DK',
u'Djibouti': u'DJ',
u'Dominica': u'DM',
u'Dominican Republic': u'DO',
u'EC': u'Ecuador',
u'EE': u'Estonia',
u'EG': u'Egypt',
u'EH': u'Western Sahara',
u'ER': u'Eritrea',
u'ES': u'Spain',
u'ET': u'Ethiopia',
u'Ecuador': u'EC',
u'Egypt': u'EG',
u'El Salvador': u'SV',
u'Equatorial Guinea': u'GQ',
u'Eritrea': u'ER',
u'Estonia': u'EE',
u'Ethiopia': u'ET',
u'FI': u'Finland',
u'FJ': u'Fiji',
u'FK': u'Falkland Islands (Malvinas)',
u'FM': u'Micronesia, Federated States of',
u'FO': u'Faroe Islands',
u'FR': u'France',
u'Falkland Islands (Malvinas)': u'FK',
u'Faroe Islands': u'FO',
u'Fiji': u'FJ',
u'Finland': u'FI',
u'France': u'FR',
u'French Guiana': u'GF',
u'French Polynesia': u'PF',
u'French Southern Territories': u'TF',
u'GA': u'Gabon',
u'GB': u'United Kingdom',
u'GD': u'Grenada',
u'GE': u'Georgia',
u'GF': u'French Guiana',
u'GG': u'Guernsey',
u'GH': u'Ghana',
u'GI': u'Gibraltar',
u'GL': u'Greenland',
u'GM': u'Gambia',
u'GN': u'Guinea',
u'GP': u'Guadeloupe',
u'GQ': u'Equatorial Guinea',
u'GR': u'Greece',
u'GS': u'South Georgia and the South Sandwich Islands',
u'GT': u'Guatemala',
u'GU': u'Guam',
u'GW': u'Guinea-Bissau',
u'GY': u'Guyana',
u'Gabon': u'GA',
u'Gambia': u'GM',
u'Georgia': u'GE',
u'Germany': u'DE',
u'Ghana': u'GH',
u'Gibraltar': u'GI',
u'Greece': u'GR',
u'Greenland': u'GL',
u'Grenada': u'GD',
u'Guadeloupe': u'GP',
u'Guam': u'GU',
u'Guatemala': u'GT',
u'Guernsey': u'GG',
u'Guinea': u'GN',
u'Guinea-Bissau': u'GW',
u'Guyana': u'GY',
u'HK': u'Hong Kong',
u'HM': u'Heard Island and McDonald Islands',
u'HN': u'Honduras',
u'HR': u'Croatia',
u'HT': u'Haiti',
u'HU': u'Hungary',
u'Haiti': u'HT',
u'Heard Island and McDonald Islands': u'HM',
u'Holy See (Vatican City State)': u'VA',
u'Honduras': u'HN',
u'Hong Kong': u'HK',
u'Hungary': u'HU',
u'ID': u'Indonesia',
u'IE': u'Ireland',
u'IL': u'Israel',
u'IM': u'Isle of Man',
u'IN': u'India',
u'IO': u'British Indian Ocean Territory',
u'IQ': u'Iraq',
u'IR': u'Iran, Islamic Republic of',
u'IS': u'Iceland',
u'IT': u'Italy',
u'Iceland': u'IS',
u'India': u'IN',
u'Indonesia': u'ID',
u'Iran, Islamic Republic of': u'IR',
u'Iraq': u'IQ',
u'Ireland': u'IE',
u'Isle of Man': u'IM',
u'Israel': u'IL',
u'Italy': u'IT',
u'JE': u'Jersey',
u'JM': u'Jamaica',
u'JO': u'Jordan',
u'JP': u'Japan',
u'Jamaica': u'JM',
u'Japan': u'JP',
u'Jersey': u'JE',
u'Jordan': u'JO',
u'KE': u'Kenya',
u'KG': u'Kyrgyzstan',
u'KH': u'Cambodia',
u'KI': u'Kiribati',
u'KM': u'Comoros',
u'KN': u'Saint Kitts and Nevis',
u'KP': u"Korea, Democratic People's Republic of",
u'KR': u'Korea, Republic of',
u'KW': u'Kuwait',
u'KY': u'Cayman Islands',
u'KZ': u'Kazakhstan',
u'Kazakhstan': u'KZ',
u'Kenya': u'KE',
u'Kiribati': u'KI',
u"Korea, Democratic People's Republic of": u'KP',
u'Korea, Republic of': u'KR',
u'Kuwait': u'KW',
u'Kyrgyzstan': u'KG',
u'LA': u"Lao People's Democratic Republic",
u'LB': u'Lebanon',
u'LC': u'Saint Lucia',
u'LI': u'Liechtenstein',
u'LK': u'Sri Lanka',
u'LR': u'Liberia',
u'LS': u'Lesotho',
u'LT': u'Lithuania',
u'LU': u'Luxembourg',
u'LV': u'Latvia',
u'LY': u'Libyan Arab Jamahiriya',
u"Lao People's Democratic Republic": u'LA',
u'Latvia': u'LV',
u'Lebanon': u'LB',
u'Lesotho': u'LS',
u'Liberia': u'LR',
u'Libyan Arab Jamahiriya': u'LY',
u'Liechtenstein': u'LI',
u'Lithuania': u'LT',
u'Luxembourg': u'LU',
u'MA': u'Morocco',
u'MC': u'Monaco',
u'MD': u'Moldova, Republic of',
u'ME': u'Montenegro',
u'MF': u'Saint Martin (French part)',
u'MG': u'Madagascar',
u'MH': u'Marshall Islands',
u'MK': u'Macedonia, the former Yugoslav Republic of',
u'ML': u'Mali',
u'MM': u'Myanmar',
u'MN': u'Mongolia',
u'MO': u'Macao',
u'MP': u'Northern Mariana Islands',
u'MQ': u'Martinique',
u'MR': u'Mauritania',
u'MS': u'Montserrat',
u'MT': u'Malta',
u'MU': u'Mauritius',
u'MV': u'Maldives',
u'MW': u'Malawi',
u'MX': u'Mexico',
u'MY': u'Malaysia',
u'MZ': u'Mozambique',
u'Macao': u'MO',
u'Macedonia, the former Yugoslav Republic of': u'MK',
u'Madagascar': u'MG',
u'Malawi': u'MW',
u'Malaysia': u'MY',
u'Maldives': u'MV',
u'Mali': u'ML',
u'Malta': u'MT',
u'Marshall Islands': u'MH',
u'Martinique': u'MQ',
u'Mauritania': u'MR',
u'Mauritius': u'MU',
u'Mayotte': u'YT',
u'Mexico': u'MX',
u'Micronesia, Federated States of': u'FM',
u'Moldova, Republic of': u'MD',
u'Monaco': u'MC',
u'Mongolia': u'MN',
u'Montenegro': u'ME',
u'Montserrat': u'MS',
u'Morocco': u'MA',
u'Mozambique': u'MZ',
u'Myanmar': u'MM',
u'NA': u'Namibia',
u'NC': u'New Caledonia',
u'NE': u'Niger',
u'NF': u'Norfolk Island',
u'NG': u'Nigeria',
u'NI': u'Nicaragua',
u'NL': u'Netherlands',
u'NO': u'Norway',
u'NP': u'Nepal',
u'NR': u'Nauru',
u'NU': u'Niue',
u'NZ': u'New Zealand',
u'Namibia': u'NA',
u'Nauru': u'NR',
u'Nepal': u'NP',
u'Netherlands': u'NL',
u'Netherlands Antilles': u'AN',
u'New Caledonia': u'NC',
u'New Zealand': u'NZ',
u'Nicaragua': u'NI',
u'Niger': u'NE',
u'Nigeria': u'NG',
u'Niue': u'NU',
u'Norfolk Island': u'NF',
u'Northern Mariana Islands': u'MP',
u'Norway': u'NO',
u'OM': u'Oman',
u'Oman': u'OM',
u'PA': u'Panama',
u'PE': u'Peru',
u'PF': u'French Polynesia',
u'PG': u'Papua New Guinea',
u'PH': u'Philippines',
u'PK': u'Pakistan',
u'PL': u'Poland',
u'PM': u'Saint Pierre and Miquelon',
u'PN': u'Pitcairn',
u'PR': u'Puerto Rico',
u'PS': u'Palestinian Territory, Occupied',
u'PT': u'Portugal',
u'PW': u'Palau',
u'PY': u'Paraguay',
u'Pakistan': u'PK',
u'Palau': u'PW',
u'Palestinian Territory, Occupied': u'PS',
u'Panama': u'PA',
u'Papua New Guinea': u'PG',
u'Paraguay': u'PY',
u'Peru': u'PE',
u'Philippines': u'PH',
u'Pitcairn': u'PN',
u'Poland': u'PL',
u'Portugal': u'PT',
u'Puerto Rico': u'PR',
u'QA': u'Qatar',
u'Qatar': u'QA',
u'RE': u'Reunion R\xe9union',
u'RO': u'Romania',
u'RS': u'Serbia',
u'RU': u'Russian Federation',
u'RW': u'Rwanda',
u'Reunion R\xe9union': u'RE',
u'Romania': u'RO',
u'Russian Federation': u'RU',
u'Rwanda': u'RW',
u'SA': u'Saudi Arabia',
u'SB': u'Solomon Islands',
u'SC': u'Seychelles',
u'SD': u'Sudan',
u'SE': u'Sweden',
u'SG': u'Singapore',
u'SH': u'Saint Helena',
u'SI': u'Slovenia',
u'SJ': u'Svalbard and Jan Mayen',
u'SK': u'Slovakia',
u'SL': u'Sierra Leone',
u'SM': u'San Marino',
u'SN': u'Senegal',
u'SO': u'Somalia',
u'SR': u'Suriname',
u'ST': u'Sao Tome and Principe',
u'SV': u'El Salvador',
u'SY': u'Syrian Arab Republic',
u'SZ': u'Swaziland',
u'Saint Barth\xe9lemy': u'BL',
u'Saint Helena': u'SH',
u'Saint Kitts and Nevis': u'KN',
u'Saint Lucia': u'LC',
u'Saint Martin (French part)': u'MF',
u'Saint Pierre and Miquelon': u'PM',
u'Saint Vincent and the Grenadines': u'VC',
u'Samoa': u'WS',
u'San Marino': u'SM',
u'Sao Tome and Principe': u'ST',
u'Saudi Arabia': u'SA',
u'Senegal': u'SN',
u'Serbia': u'RS',
u'Seychelles': u'SC',
u'Sierra Leone': u'SL',
u'Singapore': u'SG',
u'Slovakia': u'SK',
u'Slovenia': u'SI',
u'Solomon Islands': u'SB',
u'Somalia': u'SO',
u'South Africa': u'ZA',
u'South Georgia and the South Sandwich Islands': u'GS',
u'Spain': u'ES',
u'Sri Lanka': u'LK',
u'Sudan': u'SD',
u'Suriname': u'SR',
u'Svalbard and Jan Mayen': u'SJ',
u'Swaziland': u'SZ',
u'Sweden': u'SE',
u'Switzerland': u'CH',
u'Syrian Arab Republic': u'SY',
u'TC': u'Turks and Caicos Islands',
u'TD': u'Chad',
u'TF': u'French Southern Territories',
u'TG': u'Togo',
u'TH': u'Thailand',
u'TJ': u'Tajikistan',
u'TK': u'Tokelau',
u'TL': u'Timor-Leste',
u'TM': u'Turkmenistan',
u'TN': u'Tunisia',
u'TO': u'Tonga',
u'TR': u'Turkey',
u'TT': u'Trinidad and Tobago',
u'TV': u'Tuvalu',
u'TW': u'Taiwan, Province of China',
u'TZ': u'Tanzania, United Republic of',
u'Taiwan, Province of China': u'TW',
u'Tajikistan': u'TJ',
u'Tanzania, United Republic of': u'TZ',
u'Thailand': u'TH',
u'Timor-Leste': u'TL',
u'Togo': u'TG',
u'Tokelau': u'TK',
u'Tonga': u'TO',
u'Trinidad and Tobago': u'TT',
u'Tunisia': u'TN',
u'Turkey': u'TR',
u'Turkmenistan': u'TM',
u'Turks and Caicos Islands': u'TC',
u'Tuvalu': u'TV',
u'UA': u'Ukraine',
u'UG': u'Uganda',
u'UM': u'United States Minor Outlying Islands',
u'US': u'United States',
u'UY': u'Uruguay',
u'UZ': u'Uzbekistan',
u'Uganda': u'UG',
u'Ukraine': u'UA',
u'United Arab Emirates': u'AE',
u'United Kingdom': u'GB',
u'United States': u'US',
u'United States Minor Outlying Islands': u'UM',
u'Uruguay': u'UY',
u'Uzbekistan': u'UZ',
u'VA': u'Holy See (Vatican City State)',
u'VC': u'Saint Vincent and the Grenadines',
u'VE': u'Venezuela',
u'VG': u'Virgin Islands, British',
u'VI': u'Virgin Islands, U.S.',
u'VN': u'Viet Nam',
u'VU': u'Vanuatu',
u'Vanuatu': u'VU',
u'Venezuela': u'VE',
u'Viet Nam': u'VN',
u'Virgin Islands, British': u'VG',
u'Virgin Islands, U.S.': u'VI',
u'WF': u'Wallis and Futuna',
u'WS': u'Samoa',
u'Wallis and Futuna': u'WF',
u'Western Sahara': u'EH',
u'YE': u'Yemen',
u'YT': u'Mayotte',
u'Yemen': u'YE',
u'ZA': u'South Africa',
u'ZM': u'Zambia',
u'ZW': u'Zimbabwe',
u'Zambia': u'ZM',
u'Zimbabwe': u'ZW',
u'\xc5land Islands': u'AX'}
COUNTRY_NAME_ISO_ALPHA_3_TABLE = {u'ABW': u'Aruba',
u'AFG': u'Afghanistan',
u'AGO': u'Angola',
u'AIA': u'Anguilla',
u'ALA': u'\xc5land Islands',
u'ALB': u'Albania',
u'AND': u'Andorra',
u'ANT': u'Netherlands Antilles',
u'ARE': u'United Arab Emirates',
u'ARG': u'Argentina',
u'ARM': u'Armenia',
u'ASM': u'American Samoa',
u'ATA': u'Antarctica',
u'ATF': u'French Southern Territories',
u'ATG': u'Antigua and Barbuda',
u'AUS': u'Australia',
u'AUT': u'Austria',
u'AZE': u'Azerbaijan',
u'Afghanistan': u'AFG',
u'Albania': u'ALB',
u'Algeria': u'DZA',
u'American Samoa': u'ASM',
u'Andorra': u'AND',
u'Angola': u'AGO',
u'Anguilla': u'AIA',
u'Antarctica': u'ATA',
u'Antigua and Barbuda': u'ATG',
u'Argentina': u'ARG',
u'Armenia': u'ARM',
u'Aruba': u'ABW',
u'Australia': u'AUS',
u'Austria': u'AUT',
u'Azerbaijan': u'AZE',
u'BDI': u'Burundi',
u'BEL': u'Belgium',
u'BEN': u'Benin',
u'BFA': u'Burkina Faso',
u'BGD': u'Bangladesh',
u'BGR': u'Bulgaria',
u'BHR': u'Bahrain',
u'BHS': u'Bahamas',
u'BIH': u'Bosnia and Herzegovina',
u'BLM': u'Saint Barth\xe9lemy',
u'BLR': u'Belarus',
u'BLZ': u'Belize',
u'BMU': u'Bermuda',
u'BOL': u'Bolivia',
u'BRA': u'Brazil',
u'BRB': u'Barbados',
u'BRN': u'Brunei Darussalam',
u'BTN': u'Bhutan',
u'BVT': u'Bouvet Island',
u'BWA': u'Botswana',
u'Bahamas': u'BHS',
u'Bahrain': u'BHR',
u'Bangladesh': u'BGD',
u'Barbados': u'BRB',
u'Belarus': u'BLR',
u'Belgium': u'BEL',
u'Belize': u'BLZ',
u'Benin': u'BEN',
u'Bermuda': u'BMU',
u'Bhutan': u'BTN',
u'Bolivia': u'BOL',
u'Bosnia and Herzegovina': u'BIH',
u'Botswana': u'BWA',
u'Bouvet Island': u'BVT',
u'Brazil': u'BRA',
u'British Indian Ocean Territory': u'IOT',
u'Brunei Darussalam': u'BRN',
u'Bulgaria': u'BGR',
u'Burkina Faso': u'BFA',
u'Burundi': u'BDI',
u'CAF': u'Central African Republic',
u'CAN': u'Canada',
u'CCK': u'Cocos (Keeling) Islands',
u'CHE': u'Switzerland',
u'CHL': u'Chile',
u'CHN': u'China',
u'CIV': u"Cote d'Ivoire C\xf4te d'Ivoire",
u'CMR': u'Cameroon',
u'COD': u'Congo, Democratic Republic of the',
u'COG': u'Congo',
u'COK': u'Cook Islands',
u'COL': u'Colombia',
u'COM': u'Comoros',
u'CPV': u'Cape Verde',
u'CRI': u'Costa Rica',
u'CUB': u'Cuba',
u'CXR': u'Christmas Island',
u'CYM': u'Cayman Islands',
u'CYP': u'Cyprus',
u'CZE': u'Czech Republic',
u'Cambodia': u'KHM',
u'Cameroon': u'CMR',
u'Canada': u'CAN',
u'Cape Verde': u'CPV',
u'Cayman Islands': u'CYM',
u'Central African Republic': u'CAF',
u'Chad': u'TCD',
u'Chile': u'CHL',
u'China': u'CHN',
u'Christmas Island': u'CXR',
u'Cocos (Keeling) Islands': u'CCK',
u'Colombia': u'COL',
u'Comoros': u'COM',
u'Congo': u'COG',
u'Congo, Democratic Republic of the': u'COD',
u'Cook Islands': u'COK',
u'Costa Rica': u'CRI',
u"Cote d'Ivoire C\xf4te d'Ivoire": u'CIV',
u'Croatia': u'HRV',
u'Cuba': u'CUB',
u'Cyprus': u'CYP',
u'Czech Republic': u'CZE',
u'DEU': u'Germany',
u'DJI': u'Djibouti',
u'DMA': u'Dominica',
u'DNK': u'Denmark',
u'DOM': u'Dominican Republic',
u'DZA': u'Algeria',
u'Denmark': u'DNK',
u'Djibouti': u'DJI',
u'Dominica': u'DMA',
u'Dominican Republic': u'DOM',
u'ECU': u'Ecuador',
u'EGY': u'Egypt',
u'ERI': u'Eritrea',
u'ESH': u'Western Sahara',
u'ESP': u'Spain',
u'EST': u'Estonia',
u'ETH': u'Ethiopia',
u'Ecuador': u'ECU',
u'Egypt': u'EGY',
u'El Salvador': u'SLV',
u'Equatorial Guinea': u'GNQ',
u'Eritrea': u'ERI',
u'Estonia': u'EST',
u'Ethiopia': u'ETH',
u'FIN': u'Finland',
u'FJI': u'Fiji',
u'FLK': u'Falkland Islands (Malvinas)',
u'FRA': u'France',
u'FRO': u'Faroe Islands',
u'FSM': u'Micronesia, Federated States of',
u'Falkland Islands (Malvinas)': u'FLK',
u'Faroe Islands': u'FRO',
u'Fiji': u'FJI',
u'Finland': u'FIN',
u'France': u'FRA',
u'French Guiana': u'GUF',
u'French Polynesia': u'PYF',
u'French Southern Territories': u'ATF',
u'GAB': u'Gabon',
u'GBR': u'United Kingdom',
u'GEO': u'Georgia',
u'GGY': u'Guernsey',
u'GHA': u'Ghana',
u'GIB': u'Gibraltar',
u'GIN': u'Guinea',
u'GLP': u'Guadeloupe',
u'GMB': u'Gambia',
u'GNB': u'Guinea-Bissau',
u'GNQ': u'Equatorial Guinea',
u'GRC': u'Greece',
u'GRD': u'Grenada',
u'GRL': u'Greenland',
u'GTM': u'Guatemala',
u'GUF': u'French Guiana',
u'GUM': u'Guam',
u'GUY': u'Guyana',
u'Gabon': u'GAB',
u'Gambia': u'GMB',
u'Georgia': u'GEO',
u'Germany': u'DEU',
u'Ghana': u'GHA',
u'Gibraltar': u'GIB',
u'Greece': u'GRC',
u'Greenland': u'GRL',
u'Grenada': u'GRD',
u'Guadeloupe': u'GLP',
u'Guam': u'GUM',
u'Guatemala': u'GTM',
u'Guernsey': u'GGY',
u'Guinea': u'GIN',
u'Guinea-Bissau': u'GNB',
u'Guyana': u'GUY',
u'HKG': u'Hong Kong',
u'HMD': u'Heard Island and McDonald Islands',
u'HND': u'Honduras',
u'HRV': u'Croatia',
u'HTI': u'Haiti',
u'HUN': u'Hungary',
u'Haiti': u'HTI',
u'Heard Island and McDonald Islands': u'HMD',
u'Holy See (Vatican City State)': u'VAT',
u'Honduras': u'HND',
u'Hong Kong': u'HKG',
u'Hungary': u'HUN',
u'IDN': u'Indonesia',
u'IMN': u'Isle of Man',
u'IND': u'India',
u'IOT': u'British Indian Ocean Territory',
u'IRL': u'Ireland',
u'IRN': u'Iran, Islamic Republic of',
u'IRQ': u'Iraq',
u'ISL': u'Iceland',
u'ISR': u'Israel',
u'ITA': u'Italy',
u'Iceland': u'ISL',
u'India': u'IND',
u'Indonesia': u'IDN',
u'Iran, Islamic Republic of': u'IRN',
u'Iraq': u'IRQ',
u'Ireland': u'IRL',
u'Isle of Man': u'IMN',
u'Israel': u'ISR',
u'Italy': u'ITA',
u'JAM': u'Jamaica',
u'JEY': u'Jersey',
u'JOR': u'Jordan',
u'JPN': u'Japan',
u'Jamaica': u'JAM',
u'Japan': u'JPN',
u'Jersey': u'JEY',
u'Jordan': u'JOR',
u'KAZ': u'Kazakhstan',
u'KEN': u'Kenya',
u'KGZ': u'Kyrgyzstan',
u'KHM': u'Cambodia',
u'KIR': u'Kiribati',
u'KNA': u'Saint Kitts and Nevis',
u'KOR': u'Korea, Republic of',
u'KWT': u'Kuwait',
u'Kazakhstan': u'KAZ',
u'Kenya': u'KEN',
u'Kiribati': u'KIR',
u"Korea, Democratic People's Republic of": u'PRK',
u'Korea, Republic of': u'KOR',
u'Kuwait': u'KWT',
u'Kyrgyzstan': u'KGZ',
u'LAO': u"Lao People's Democratic Republic",
u'LBN': u'Lebanon',
u'LBR': u'Liberia',
u'LBY': u'Libyan Arab Jamahiriya',
u'LCA': u'Saint Lucia',
u'LIE': u'Liechtenstein',
u'LKA': u'Sri Lanka',
u'LSO': u'Lesotho',
u'LTU': u'Lithuania',
u'LUX': u'Luxembourg',
u'LVA': u'Latvia',
u"Lao People's Democratic Republic": u'LAO',
u'Latvia': u'LVA',
u'Lebanon': u'LBN',
u'Lesotho': u'LSO',
u'Liberia': u'LBR',
u'Libyan Arab Jamahiriya': u'LBY',
u'Liechtenstein': u'LIE',
u'Lithuania': u'LTU',
u'Luxembourg': u'LUX',
u'MAC': u'Macao',
u'MAF': u'Saint Martin (French part)',
u'MAR': u'Morocco',
u'MCO': u'Monaco',
u'MDA': u'Moldova, Republic of',
u'MDG': u'Madagascar',
u'MDV': u'Maldives',
u'MEX': u'Mexico',
u'MHL': u'Marshall Islands',
u'MKD': u'Macedonia, the former Yugoslav Republic of',
u'MLI': u'Mali',
u'MLT': u'Malta',
u'MMR': u'Myanmar',
u'MNE': u'Montenegro',
u'MNG': u'Mongolia',
u'MNP': u'Northern Mariana Islands',
u'MOZ': u'Mozambique',
u'MRT': u'Mauritania',
u'MSR': u'Montserrat',
u'MTQ': u'Martinique',
u'MUS': u'Mauritius',
u'MWI': u'Malawi',
u'MYS': u'Malaysia',
u'MYT': u'Mayotte',
u'Macao': u'MAC',
u'Macedonia, the former Yugoslav Republic of': u'MKD',
u'Madagascar': u'MDG',
u'Malawi': u'MWI',
u'Malaysia': u'MYS',
u'Maldives': u'MDV',
u'Mali': u'MLI',
u'Malta': u'MLT',
u'Marshall Islands': u'MHL',
u'Martinique': u'MTQ',
u'Mauritania': u'MRT',
u'Mauritius': u'MUS',
u'Mayotte': u'MYT',
u'Mexico': u'MEX',
u'Micronesia, Federated States of': u'FSM',
u'Moldova, Republic of': u'MDA',
u'Monaco': u'MCO',
u'Mongolia': u'MNG',
u'Montenegro': u'MNE',
u'Montserrat': u'MSR',
u'Morocco': u'MAR',
u'Mozambique': u'MOZ',
u'Myanmar': u'MMR',
u'NAM': u'Namibia',
u'NCL': u'New Caledonia',
u'NER': u'Niger',
u'NFK': u'Norfolk Island',
u'NGA': u'Nigeria',
u'NIC': u'Nicaragua',
u'NIU': u'Niue',
u'NLD': u'Netherlands',
u'NOR': u'Norway',
u'NPL': u'Nepal',
u'NRU': u'Nauru',
u'NZL': u'New Zealand',
u'Namibia': u'NAM',
u'Nauru': u'NRU',
u'Nepal': u'NPL',
u'Netherlands': u'NLD',
u'Netherlands Antilles': u'ANT',
u'New Caledonia': u'NCL',
u'New Zealand': u'NZL',
u'Nicaragua': u'NIC',
u'Niger': u'NER',
u'Nigeria': u'NGA',
u'Niue': u'NIU',
u'Norfolk Island': u'NFK',
u'Northern Mariana Islands': u'MNP',
u'Norway': u'NOR',
u'OMN': u'Oman',
u'Oman': u'OMN',
u'PAK': u'Pakistan',
u'PAN': u'Panama',
u'PCN': u'Pitcairn',
u'PER': u'Peru',
u'PHL': u'Philippines',
u'PLW': u'Palau',
u'PNG': u'Papua New Guinea',
u'POL': u'Poland',
u'PRI': u'Puerto Rico',
u'PRK': u"Korea, Democratic People's Republic of",
u'PRT': u'Portugal',
u'PRY': u'Paraguay',
u'PSE': u'Palestinian Territory, Occupied',
u'PYF': u'French Polynesia',
u'Pakistan': u'PAK',
u'Palau': u'PLW',
u'Palestinian Territory, Occupied': u'PSE',
u'Panama': u'PAN',
u'Papua New Guinea': u'PNG',
u'Paraguay': u'PRY',
u'Peru': u'PER',
u'Philippines': u'PHL',
u'Pitcairn': u'PCN',
u'Poland': u'POL',
u'Portugal': u'PRT',
u'Puerto Rico': u'PRI',
u'QAT': u'Qatar',
u'Qatar': u'QAT',
u'REU': u'Reunion R\xe9union',
u'ROU': u'Romania',
u'RUS': u'Russian Federation',
u'RWA': u'Rwanda',
u'Reunion R\xe9union': u'REU',
u'Romania': u'ROU',
u'Russian Federation': u'RUS',
u'Rwanda': u'RWA',
u'SAU': u'Saudi Arabia',
u'SDN': u'Sudan',
u'SEN': u'Senegal',
u'SGP': u'Singapore',
u'SGS': u'South Georgia and the South Sandwich Islands',
u'SHN': u'Saint Helena',
u'SJM': u'Svalbard and Jan Mayen',
u'SLB': u'Solomon Islands',
u'SLE': u'Sierra Leone',
u'SLV': u'El Salvador',
u'SMR': u'San Marino',
u'SOM': u'Somalia',
u'SPM': u'Saint Pierre and Miquelon',
u'SRB': u'Serbia',
u'STP': u'Sao Tome and Principe',
u'SUR': u'Suriname',
u'SVK': u'Slovakia',
u'SVN': u'Slovenia',
u'SWE': u'Sweden',
u'SWZ': u'Swaziland',
u'SYC': u'Seychelles',
u'SYR': u'Syrian Arab Republic',
u'Saint Barth\xe9lemy': u'BLM',
u'Saint Helena': u'SHN',
u'Saint Kitts and Nevis': u'KNA',
u'Saint Lucia': u'LCA',
u'Saint Martin (French part)': u'MAF',
u'Saint Pierre and Miquelon': u'SPM',
u'Saint Vincent and the Grenadines': u'VCT',
u'Samoa': u'WSM',
u'San Marino': u'SMR',
u'Sao Tome and Principe': u'STP',
u'Saudi Arabia': u'SAU',
u'Senegal': u'SEN',
u'Serbia': u'SRB',
u'Seychelles': u'SYC',
u'Sierra Leone': u'SLE',
u'Singapore': u'SGP',
u'Slovakia': u'SVK',
u'Slovenia': u'SVN',
u'Solomon Islands': u'SLB',
u'Somalia': u'SOM',
u'South Africa': u'ZAF',
u'South Georgia and the South Sandwich Islands': u'SGS',
u'Spain': u'ESP',
u'Sri Lanka': u'LKA',
u'Sudan': u'SDN',
u'Suriname': u'SUR',
u'Svalbard and Jan Mayen': u'SJM',
u'Swaziland': u'SWZ',
u'Sweden': u'SWE',
u'Switzerland': u'CHE',
u'Syrian Arab Republic': u'SYR',
u'TCA': u'Turks and Caicos Islands',
u'TCD': u'Chad',
u'TGO': u'Togo',
u'THA': u'Thailand',
u'TJK': u'Tajikistan',
u'TKL': u'Tokelau',
u'TKM': u'Turkmenistan',
u'TLS': u'Timor-Leste',
u'TON': u'Tonga',
u'TTO': u'Trinidad and Tobago',
u'TUN': u'Tunisia',
u'TUR': u'Turkey',
u'TUV': u'Tuvalu',
u'TWN': u'Taiwan, Province of China',
u'TZA': u'Tanzania, United Republic of',
u'Taiwan, Province of China': u'TWN',
u'Tajikistan': u'TJK',
u'Tanzania, United Republic of': u'TZA',
u'Thailand': u'THA',
u'Timor-Leste': u'TLS',
u'Togo': u'TGO',
u'Tokelau': u'TKL',
u'Tonga': u'TON',
u'Trinidad and Tobago': u'TTO',
u'Tunisia': u'TUN',
u'Turkey': u'TUR',
u'Turkmenistan': u'TKM',
u'Turks and Caicos Islands': u'TCA',
u'Tuvalu': u'TUV',
u'UGA': u'Uganda',
u'UKR': u'Ukraine',
u'UMI': u'United States Minor Outlying Islands',
u'URY': u'Uruguay',
u'USA': u'United States',
u'UZB': u'Uzbekistan',
u'Uganda': u'UGA',
u'Ukraine': u'UKR',
u'United Arab Emirates': u'ARE',
u'United Kingdom': u'GBR',
u'United States': u'USA',
u'United States Minor Outlying Islands': u'UMI',
u'Uruguay': u'URY',
u'Uzbekistan': u'UZB',
u'VAT': u'Holy See (Vatican City State)',
u'VCT': u'Saint Vincent and the Grenadines',
u'VEN': u'Venezuela',
u'VGB': u'Virgin Islands, British',
u'VIR': u'Virgin Islands, U.S.',
u'VNM': u'Viet Nam',
u'VUT': u'Vanuatu',
u'Vanuatu': u'VUT',
u'Venezuela': u'VEN',
u'Viet Nam': u'VNM',
u'Virgin Islands, British': u'VGB',
u'Virgin Islands, U.S.': u'VIR',
u'WLF': u'Wallis and Futuna',
u'WSM': u'Samoa',
u'Wallis and Futuna': u'WLF',
u'Western Sahara': u'ESH',
u'YEM': u'Yemen',
u'Yemen': u'YEM',
u'ZAF': u'South Africa',
u'ZMB': u'Zambia',
u'ZWE': u'Zimbabwe',
u'Zambia': u'ZMB',
u'Zimbabwe': u'ZWE',
u'\xc5land Islands': u'ALA'}
COUNTRY_ISO_ALPHA_TABLE = {u'ABW': u'AW',
u'AD': u'AND',
u'AE': u'ARE',
u'AF': u'AFG',
u'AFG': u'AF',
u'AG': u'ATG',
u'AGO': u'AO',
u'AI': u'AIA',
u'AIA': u'AI',
u'AL': u'ALB',
u'ALA': u'AX',
u'ALB': u'AL',
u'AM': u'ARM',
u'AN': u'ANT',
u'AND': u'AD',
u'ANT': u'AN',
u'AO': u'AGO',
u'AQ': u'ATA',
u'AR': u'ARG',
u'ARE': u'AE',
u'ARG': u'AR',
u'ARM': u'AM',
u'AS': u'ASM',
u'ASM': u'AS',
u'AT': u'AUT',
u'ATA': u'AQ',
u'ATF': u'TF',
u'ATG': u'AG',
u'AU': u'AUS',
u'AUS': u'AU',
u'AUT': u'AT',
u'AW': u'ABW',
u'AX': u'ALA',
u'AZ': u'AZE',
u'AZE': u'AZ',
u'BA': u'BIH',
u'BB': u'BRB',
u'BD': u'BGD',
u'BDI': u'BI',
u'BE': u'BEL',
u'BEL': u'BE',
u'BEN': u'BJ',
u'BF': u'BFA',
u'BFA': u'BF',
u'BG': u'BGR',
u'BGD': u'BD',
u'BGR': u'BG',
u'BH': u'BHR',
u'BHR': u'BH',
u'BHS': u'BS',
u'BI': u'BDI',
u'BIH': u'BA',
u'BJ': u'BEN',
u'BL': u'BLM',
u'BLM': u'BL',
u'BLR': u'BY',
u'BLZ': u'BZ',
u'BM': u'BMU',
u'BMU': u'BM',
u'BN': u'BRN',
u'BO': u'BOL',
u'BOL': u'BO',
u'BR': u'BRA',
u'BRA': u'BR',
u'BRB': u'BB',
u'BRN': u'BN',
u'BS': u'BHS',
u'BT': u'BTN',
u'BTN': u'BT',
u'BV': u'BVT',
u'BVT': u'BV',
u'BW': u'BWA',
u'BWA': u'BW',
u'BY': u'BLR',
u'BZ': u'BLZ',
u'CA': u'CAN',
u'CAF': u'CF',
u'CAN': u'CA',
u'CC': u'CCK',
u'CCK': u'CC',
u'CD': u'COD',
u'CF': u'CAF',
u'CG': u'COG',
u'CH': u'CHE',
u'CHE': u'CH',
u'CHL': u'CL',
u'CHN': u'CN',
u'CI': u'CIV',
u'CIV': u'CI',
u'CK': u'COK',
u'CL': u'CHL',
u'CM': u'CMR',
u'CMR': u'CM',
u'CN': u'CHN',
u'CO': u'COL',
u'COD': u'CD',
u'COG': u'CG',
u'COK': u'CK',
u'COL': u'CO',
u'COM': u'KM',
u'CPV': u'CV',
u'CR': u'CRI',
u'CRI': u'CR',
u'CU': u'CUB',
u'CUB': u'CU',
u'CV': u'CPV',
u'CX': u'CXR',
u'CXR': u'CX',
u'CY': u'CYP',
u'CYM': u'KY',
u'CYP': u'CY',
u'CZ': u'CZE',
u'CZE': u'CZ',
u'DE': u'DEU',
u'DEU': u'DE',
u'DJ': u'DJI',
u'DJI': u'DJ',
u'DK': u'DNK',
u'DM': u'DMA',
u'DMA': u'DM',
u'DNK': u'DK',
u'DO': u'DOM',
u'DOM': u'DO',
u'DZ': u'DZA',
u'DZA': u'DZ',
u'EC': u'ECU',
u'ECU': u'EC',
u'EE': u'EST',
u'EG': u'EGY',
u'EGY': u'EG',
u'EH': u'ESH',
u'ER': u'ERI',
u'ERI': u'ER',
u'ES': u'ESP',
u'ESH': u'EH',
u'ESP': u'ES',
u'EST': u'EE',
u'ET': u'ETH',
u'ETH': u'ET',
u'FI': u'FIN',
u'FIN': u'FI',
u'FJ': u'FJI',
u'FJI': u'FJ',
u'FK': u'FLK',
u'FLK': u'FK',
u'FM': u'FSM',
u'FO': u'FRO',
u'FR': u'FRA',
u'FRA': u'FR',
u'FRO': u'FO',
u'FSM': u'FM',
u'GA': u'GAB',
u'GAB': u'GA',
u'GB': u'GBR',
u'GBR': u'GB',
u'GD': u'GRD',
u'GE': u'GEO',
u'GEO': u'GE',
u'GF': u'GUF',
u'GG': u'GGY',
u'GGY': u'GG',
u'GH': u'GHA',
u'GHA': u'GH',
u'GI': u'GIB',
u'GIB': u'GI',
u'GIN': u'GN',
u'GL': u'GRL',
u'GLP': u'GP',
u'GM': u'GMB',
u'GMB': u'GM',
u'GN': u'GIN',
u'GNB': u'GW',
u'GNQ': u'GQ',
u'GP': u'GLP',
u'GQ': u'GNQ',
u'GR': u'GRC',
u'GRC': u'GR',
u'GRD': u'GD',
u'GRL': u'GL',
u'GS': u'SGS',
u'GT': u'GTM',
u'GTM': u'GT',
u'GU': u'GUM',
u'GUF': u'GF',
u'GUM': u'GU',
u'GUY': u'GY',
u'GW': u'GNB',
u'GY': u'GUY',
u'HK': u'HKG',
u'HKG': u'HK',
u'HM': u'HMD',
u'HMD': u'HM',
u'HN': u'HND',
u'HND': u'HN',
u'HR': u'HRV',
u'HRV': u'HR',
u'HT': u'HTI',
u'HTI': u'HT',
u'HU': u'HUN',
u'HUN': u'HU',
u'ID': u'IDN',
u'IDN': u'ID',
u'IE': u'IRL',
u'IL': u'ISR',
u'IM': u'IMN',
u'IMN': u'IM',
u'IN': u'IND',
u'IND': u'IN',
u'IO': u'IOT',
u'IOT': u'IO',
u'IQ': u'IRQ',
u'IR': u'IRN',
u'IRL': u'IE',
u'IRN': u'IR',
u'IRQ': u'IQ',
u'IS': u'ISL',
u'ISL': u'IS',
u'ISR': u'IL',
u'IT': u'ITA',
u'ITA': u'IT',
u'JAM': u'JM',
u'JE': u'JEY',
u'JEY': u'JE',
u'JM': u'JAM',
u'JO': u'JOR',
u'JOR': u'JO',
u'JP': u'JPN',
u'JPN': u'JP',
u'KAZ': u'KZ',
u'KE': u'KEN',
u'KEN': u'KE',
u'KG': u'KGZ',
u'KGZ': u'KG',
u'KH': u'KHM',
u'KHM': u'KH',
u'KI': u'KIR',
u'KIR': u'KI',
u'KM': u'COM',
u'KN': u'KNA',
u'KNA': u'KN',
u'KOR': u'KR',
u'KP': u'PRK',
u'KR': u'KOR',
u'KW': u'KWT',
u'KWT': u'KW',
u'KY': u'CYM',
u'KZ': u'KAZ',
u'LA': u'LAO',
u'LAO': u'LA',
u'LB': u'LBN',
u'LBN': u'LB',
u'LBR': u'LR',
u'LBY': u'LY',
u'LC': u'LCA',
u'LCA': u'LC',
u'LI': u'LIE',
u'LIE': u'LI',
u'LK': u'LKA',
u'LKA': u'LK',
u'LR': u'LBR',
u'LS': u'LSO',
u'LSO': u'LS',
u'LT': u'LTU',
u'LTU': u'LT',
u'LU': u'LUX',
u'LUX': u'LU',
u'LV': u'LVA',
u'LVA': u'LV',
u'LY': u'LBY',
u'MA': u'MAR',
u'MAC': u'MO',
u'MAF': u'MF',
u'MAR': u'MA',
u'MC': u'MCO',
u'MCO': u'MC',
u'MD': u'MDA',
u'MDA': u'MD',
u'MDG': u'MG',
u'MDV': u'MV',
u'ME': u'MNE',
u'MEX': u'MX',
u'MF': u'MAF',
u'MG': u'MDG',
u'MH': u'MHL',
u'MHL': u'MH',
u'MK': u'MKD',
u'MKD': u'MK',
u'ML': u'MLI',
u'MLI': u'ML',
u'MLT': u'MT',
u'MM': u'MMR',
u'MMR': u'MM',
u'MN': u'MNG',
u'MNE': u'ME',
u'MNG': u'MN',
u'MNP': u'MP',
u'MO': u'MAC',
u'MOZ': u'MZ',
u'MP': u'MNP',
u'MQ': u'MTQ',
u'MR': u'MRT',
u'MRT': u'MR',
u'MS': u'MSR',
u'MSR': u'MS',
u'MT': u'MLT',
u'MTQ': u'MQ',
u'MU': u'MUS',
u'MUS': u'MU',
u'MV': u'MDV',
u'MW': u'MWI',
u'MWI': u'MW',
u'MX': u'MEX',
u'MY': u'MYS',
u'MYS': u'MY',
u'MYT': u'YT',
u'MZ': u'MOZ',
u'NA': u'NAM',
u'NAM': u'NA',
u'NC': u'NCL',
u'NCL': u'NC',
u'NE': u'NER',
u'NER': u'NE',
u'NF': u'NFK',
u'NFK': u'NF',
u'NG': u'NGA',
u'NGA': u'NG',
u'NI': u'NIC',
u'NIC': u'NI',
u'NIU': u'NU',
u'NL': u'NLD',
u'NLD': u'NL',
u'NO': u'NOR',
u'NOR': u'NO',
u'NP': u'NPL',
u'NPL': u'NP',
u'NR': u'NRU',
u'NRU': u'NR',
u'NU': u'NIU',
u'NZ': u'NZL',
u'NZL': u'NZ',
u'OM': u'OMN',
u'OMN': u'OM',
u'PA': u'PAN',
u'PAK': u'PK',
u'PAN': u'PA',
u'PCN': u'PN',
u'PE': u'PER',
u'PER': u'PE',
u'PF': u'PYF',
u'PG': u'PNG',
u'PH': u'PHL',
u'PHL': u'PH',
u'PK': u'PAK',
u'PL': u'POL',
u'PLW': u'PW',
u'PM': u'SPM',
u'PN': u'PCN',
u'PNG': u'PG',
u'POL': u'PL',
u'PR': u'PRI',
u'PRI': u'PR',
u'PRK': u'KP',
u'PRT': u'PT',
u'PRY': u'PY',
u'PS': u'PSE',
u'PSE': u'PS',
u'PT': u'PRT',
u'PW': u'PLW',
u'PY': u'PRY',
u'PYF': u'PF',
u'QA': u'QAT',
u'QAT': u'QA',
u'RE': u'REU',
u'REU': u'RE',
u'RO': u'ROU',
u'ROU': u'RO',
u'RS': u'SRB',
u'RU': u'RUS',
u'RUS': u'RU',
u'RW': u'RWA',
u'RWA': u'RW',
u'SA': u'SAU',
u'SAU': u'SA',
u'SB': u'SLB',
u'SC': u'SYC',
u'SD': u'SDN',
u'SDN': u'SD',
u'SE': u'SWE',
u'SEN': u'SN',
u'SG': u'SGP',
u'SGP': u'SG',
u'SGS': u'GS',
u'SH': u'SHN',
u'SHN': u'SH',
u'SI': u'SVN',
u'SJ': u'SJM',
u'SJM': u'SJ',
u'SK': u'SVK',
u'SL': u'SLE',
u'SLB': u'SB',
u'SLE': u'SL',
u'SLV': u'SV',
u'SM': u'SMR',
u'SMR': u'SM',
u'SN': u'SEN',
u'SO': u'SOM',
u'SOM': u'SO',
u'SPM': u'PM',
u'SR': u'SUR',
u'SRB': u'RS',
u'ST': u'STP',
u'STP': u'ST',
u'SUR': u'SR',
u'SV': u'SLV',
u'SVK': u'SK',
u'SVN': u'SI',
u'SWE': u'SE',
u'SWZ': u'SZ',
u'SY': u'SYR',
u'SYC': u'SC',
u'SYR': u'SY',
u'SZ': u'SWZ',
u'TC': u'TCA',
u'TCA': u'TC',
u'TCD': u'TD',
u'TD': u'TCD',
u'TF': u'ATF',
u'TG': u'TGO',
u'TGO': u'TG',
u'TH': u'THA',
u'THA': u'TH',
u'TJ': u'TJK',
u'TJK': u'TJ',
u'TK': u'TKL',
u'TKL': u'TK',
u'TKM': u'TM',
u'TL': u'TLS',
u'TLS': u'TL',
u'TM': u'TKM',
u'TN': u'TUN',
u'TO': u'TON',
u'TON': u'TO',
u'TR': u'TUR',
u'TT': u'TTO',
u'TTO': u'TT',
u'TUN': u'TN',
u'TUR': u'TR',
u'TUV': u'TV',
u'TV': u'TUV',
u'TW': u'TWN',
u'TWN': u'TW',
u'TZ': u'TZA',
u'TZA': u'TZ',
u'UA': u'UKR',
u'UG': u'UGA',
u'UGA': u'UG',
u'UKR': u'UA',
u'UM': u'UMI',
u'UMI': u'UM',
u'URY': u'UY',
u'US': u'USA',
u'USA': u'US',
u'UY': u'URY',
u'UZ': u'UZB',
u'UZB': u'UZ',
u'VA': u'VAT',
u'VAT': u'VA',
u'VC': u'VCT',
u'VCT': u'VC',
u'VE': u'VEN',
u'VEN': u'VE',
u'VG': u'VGB',
u'VGB': u'VG',
u'VI': u'VIR',
u'VIR': u'VI',
u'VN': u'VNM',
u'VNM': u'VN',
u'VU': u'VUT',
u'VUT': u'VU',
u'WF': u'WLF',
u'WLF': u'WF',
u'WS': u'WSM',
u'WSM': u'WS',
u'YE': u'YEM',
u'YEM': u'YE',
u'YT': u'MYT',
u'ZA': u'ZAF',
u'ZAF': u'ZA',
u'ZM': u'ZMB',
u'ZMB': u'ZM',
u'ZW': u'ZWE',
u'ZWE': u'ZW'}
|
yesudeep/mils-secure
|
app/data/countries.py
|
Python
|
mit
| 69,183
|
[
"BWA"
] |
d9975534a4c3b15f8570446fb558d447f30de4f032893cfe3a2e3015716ca348
|
# -*- coding: utf-8 -*-
{
"A location that specifies the geographic area for this region. This can be a location from the location hierarchy, or a 'group location', or a location that has a boundary for the area.": "Uma localização que especifica a área geográfica para esta região. Pode ser uma localização da hierarquia de localização, ou uma 'localização de grupo', ou uma localização que tem uma delimitação para a área.",
"Can't import tweepy": 'Não pode importar tweepy',
"If selected, then this Asset's Location will be updated whenever the Person's Location is updated.": 'Se seleccionada, então a localização deste Bem/produto será actualizada sempre que a Localização da Pessoa é actualizada.',
"If this configuration represents a region for the Regions menu, give it a name to use in the menu. The name for a personal map configuration will be set to the user's name.": 'Se esta configuração representa uma região para o menu de Regiões, dar-lhe um nome para utilizar no menu. O nome para uma configuração pessoal de um mapa será definida para o nome do utilizador.',
"If this is ticked, then this will become the user's Base Location & hence where the user is shown on the Map": 'Se esta é assinalada, então esta será a localização base do utilizador e, portanto, onde o utilizador é mostrado no Mapa',
"If you don't see the Hospital in the list, you can add a new one by clicking link 'Create Hospital'.": "Se você não vê o Hospital na lista, pode adicionar um novo carregando na ligação 'Criar Hospital'.",
"If you don't see the Organization in the list, you can add a new one by clicking link 'Create Organization'.": "Se você não vê a Organização na lista, pode adicionar uma nova carregando na ligação 'Criar Organização'.",
"Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.": 'Opcional. O nome da coluna geométrica. Em PostGIS este padrão é "the_geom".',
"Parent level should be higher than this record's level. Parent level is": 'Nível "Pai" deve ser mais alto que este nível de registo. Nível "pai" é',
"Password fields don't match": 'Os campos de Password não coincidem.',
"Phone number to donate to this organization's relief efforts.": 'Número de Telefone para doar para o esforço de socorro desta organização.',
"Please come back after sometime if that doesn't help.": 'Por favor volte após algum tempo se isso não ajudar.',
"Quantity in %s's Inventory": 'Quantidade me inventário de %s',
"Select a person in charge for status 'assigned'": "Seleccionar uma pessoa responsável pelo estado 'atribuído'",
"Select this if all specific locations need a parent at the deepest level of the location hierarchy. For example, if 'district' is the smallest division in the hierarchy, then all specific locations would be required to have a district as a parent.": 'Seleccionar isto se todas as localizações específicas precisam de um antecessor / pai no nível mais profundo da hierarquia da localização. Por exemplo, se "distrito" é a divisão mais pequena na hierarquia, então todas as localizações específicas necessitariam de ter um distrito como antecessor / pai. Por exemplo: Distrito -> Conselho -> Freguesia ->',
"Select this if all specific locations need a parent location in the location hierarchy. This can assist in setting up a 'region' representing an affected area.": "Seleccionar isto se todas as localizações específicas necessitam de uma localização mãe na hierarquia de localização. Isto pode auxiliar em configurar uma 'região' representando uma área afectada.",
"Sorry, things didn't get done on time.": 'Desculpe, as coisas não foram feitas a tempo.',
"Sorry, we couldn't find that page.": 'Desculpe, não foi possível encontrar esta página.',
"System's Twitter account updated": 'Conta do sistema do Twitter actualizado',
"The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.": "O(s) Doador(s) para este projecto. Múltiplos valores podem ser seleccionados mantendo carregada a tecla de 'Control'",
"The URL of the image file. If you don't upload an image file, then you must specify its location here.": 'O URL do ficheiro de imagem. Se não carregar um ficheiro de imagem, então deve especificar a sua localização aqui.',
"To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": 'Para procurar um hospital, insira qualquer um dos nomes ou Identificações do hospital, separado por espaços. Você pode utilizar % como caracter de pesquisa. Pressione "Pesquisar" sem nenhum texto para listar todos os hospitais.',
"To search for a person, enter any of the first, middle or last names and/or an ID number of a person, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": "Para procurar uma pessoa, entrar em qualquer dos primeiros nomes, no meio ou na última e / ou um número de identificação de uma pessoa, separadas por espaços. Você pode usar% como caraceter geral. 'Pesquisa' Prima sem entrada para listar todas as pessoas.",
"To search for a person, enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": 'Para procurar uma pessoa, insira qualquer nome seja o primeiro, o do meio ou o último, separado por espaços. Pode utilizar % como ajuda. Carregue "Procurar" sem inserir para listar todas as pessoas.',
"To search for an assessment, enter any portion the ticket number of the assessment. You may use % as wildcard. Press 'Search' without input to list all assessments.": 'Para procurar uma avaliação, insira qualquer porção do número do ticket da avaliação. Você pode utilizar % como caracter de pesquisa. Prima "Pesquisar" sem inserir dados para listar todas as avaliações.',
"Type the first few characters of one of the Person's names.": 'Escreva as primeiras letras de um dos nomes da pessoa',
"Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.": 'Carregar um ficheiro de imagem aqui. Se não carregar um ficheiro de imagem, então deve especificar a sua localização no campo URL.',
"You haven't made any calculations": 'Você não fez nenhuns cálculos',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" é uma expressão opcional como "field1=\'newvalue\'". Você não pode actualizar ou apagar os resultados de uma JOIN',
'# of International Staff': 'nº de pessoal internacional',
'# of National Staff': '# de Staff Nacional',
'# of Vehicles': '# de Veículos',
'%(system_name)s - Verify Email': '%(system_name)s - Verifique e-mail',
'%s rows deleted': '%s de linhas apagadas',
'%s rows updated': '%s linhas actualizadas',
'& then click on the map below to adjust the Lat/Lon fields': 'e depois carregue no mapa abaixo para ajustar os campos de Lat/Long',
'* Required Fields': '* Campos Requeridos/Necessários',
'0-15 minutes': '0-15 minutos',
'1 Assessment': '1 Avaliação',
'1-3 days': '1-3 dias',
'15-30 minutes': '15-30 minutos',
'2 different options are provided here currently:': '2 opções diferentes são providenciadas aqui actualmente:',
'2x4 Car': 'Carro 2x4',
'30-60 minutes': '30-60 minutos',
'8-14 days': '8 a 14 dias',
'A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class.': 'Um marcador atribuído a uma Localização individual é definido se existir uma necessidade de substituir o marcador atribuído à classe de recurso.',
'A Reference Document such as a file, URL or contact person to verify this data. You can type the 1st few characters of the document name to link to an existing document.': 'Documento referente a ficheiro, URL ou pessoa de contato, para verificar estes dados. Pode introduzir primeiro caracteres do Nome do Documento, para criar uma ligação para um Documento existente.',
'A brief description of the group (optional)': 'Uma breve descrição do grupo (opcional)',
'A file downloaded from a GPS containing a series of geographic points in XML format.': 'Um ficheiro descarregado de um GPS contendo uma série de pontos geográficos em formato XML.',
'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'Um ficheiro em formato GPX tirado de um GPS do qual os timestamps podem estar relacionados com os timestamps nas fotos para as localizar no mapa.',
'A library of digital resources, such as photos, documents and reports': 'Uma biblioteca de recursos digitais, como fotografias, documentos e relatórios',
'A location group is a set of locations (often, a set of administrative regions representing a combined area). Member locations are added to a location group here. Location groups may be used to filter what is shown on the map and in search results to only entities covered by locations in the group. A location group can be used to define the extent of an affected area, if it does not fall within one administrative region. Location groups can be used in the Regions menu.': 'Um grupo de localização é um conjunto de localizações (muitas vezes um conjunto de regiões administrativas representando uma área combinada). Localizações de membro podem ser utilizadas para filtrar o que é exibido no mapa e em resultados de procura para apenas entidades abrangidas por grupos de localização. Uma grupo de localização pode ser utilizado para definira extensão de uma área afectada, se não recai dentro de uma região administrativa. Grupos de localização podem ser utilizados no meu de Regiões.',
'A location group must have at least one member.': 'Um grupo de localização deve ter pelo menos um membro',
'A survey series with id %s does not exist. Please go back and create one.': 'Uma série de inquérito com id %s não existe. Por favor retroceda e crie uma.',
'ABOUT THIS MODULE': 'Acerca deste Modulo',
'ACCESS DATA': 'ACEDER AOS DADOS',
'API is documented here': 'API está documentado aqui',
'ATC-20 Rapid Evaluation modified for New Zealand': 'ATC-20 Avaliação Rápida modificado para Nova Zelândia',
'Ability to Fill Out Surveys': 'Habilidade para preencher inquéritos',
'Ability to customize the list of details tracked at a Shelter': 'Capacidade de personalizar a lista de detalhes controladas num abrigo',
'Ability to customize the list of human resource tracked at a Shelter': 'Possibilidade de adaptar a listagem de recursos humanos geridos/registados no Abrigo',
'Ability to customize the list of important facilities needed at a Shelter': 'capacidade de personalizar a lista de instalações importantes necessárias num abrigo',
'Ability to view Results of Completed and/or partially filled out Surveys': 'Capacidade de ver Resultados de Inquéritos Completos e/ou preenchidos parcialmente.',
'About': 'Sobre',
'Access denied': 'Acesso negado',
'Access to Shelter': 'Acesso a abrigo',
'Access to education services': 'Acesso a serviços educativos',
'Accessibility of Affected Location': 'Acessibilidade da Localização Afectada',
'Account Registered - Please Check Your Email': 'Conta Registada - Por favor verifique o seu E-mail',
'Account registered, however registration is still pending approval - please wait until confirmation received.': 'Conta registada, contudo o registo ainda necessita de aprovação - por favor espere até ter recebido confirmação.',
'Acronym': 'Acrónimo',
'Actionable by all targeted recipients': 'Accionável por todos os destinatários alvo',
'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>': 'activável apenas por participantes designados no exercício; identificador do exercício DEVE aparecer em <note>',
'Actioned?': 'Activado? / Accionado?',
'Actions taken as a result of this request.': 'Acções tomadas como resultado deste pedido.',
'Actions': 'Acções',
'Active Problems': 'Problemas Activos',
'Activities matching Assessments:': 'Actividades correspondentes a avaliações',
'Activities of children': 'Actividades de crianças',
'Activities': 'Actividades',
'Activities:': 'Actividades:',
'Activity Added': 'Actividade Adicionada',
'Activity Deleted': 'Actividade Apagada',
'Activity Details': 'Detalhes de Actividade',
'Activity Reports': 'Relatórios de Actividade',
'Activity Type': 'Tipo de Actividade',
'Activity Updated': 'Actividade actualizada',
'Activity': 'Actividade',
'Add Address': 'Adicionar Endereço',
'Add Alternative Item': 'Adicionar Item Alternativo',
'Add Assessment Summary': 'Adicionar Resumo/Sumário de Avaliação',
'Add Assessment': 'Adicionar Avaliação',
'Add Availability': 'adicionar disponibilidade',
'Add Baseline Type': 'Adicionar Tipo de Linha de Base',
'Add Baseline': 'adicionar linha de base',
'Add Bundle': 'Adicionar Pacote',
'Add Contact Information': 'Adicionar Informação de Contacto',
'Add Credential': 'Adicionar Credencial/ autorização',
'Add Credentials': 'Adicionar Credenciais',
'Add Disaster Victims': 'Adicionar Vítimas de Desastre/Catástrofe',
'Add Distribution.': 'Adicionar Distribuição',
'Add Donor': 'Adicionar Doador/Dador',
'Add Flood Report': 'Adicionar Relatório de Inundação',
'Add Group Member': 'Adicionar Membro de Grupo',
'Add Identity': 'Adicionar Identidade',
'Add Image': 'Adicionar Imagem',
'Add Impact Type': 'Adicionar Tipo de Impacto',
'Add Impact': 'Adicionar Impacto',
'Add Item to Request': 'Adicionar Item ao Pedido',
'Add Item to Shipment': 'Adicionar Item a Carregamento',
'Add Item': 'Adicionar Item',
'Add Key': 'Adicionar Chave',
'Add Kit': 'Adicionar Kit',
'Add Level 2 Assessment': 'Adicionar Avaliação Nível 2',
'Add Log Entry': 'Adicionar Entrada de Diário/Log',
'Add Member': 'Adicionar Membro/sócio',
'Add Membership': 'Adicionar Sociedade',
'Add Message': 'Adicionar Mensagem',
'Add Need Type': 'Adicionar Novo Tipo',
'Add Need': 'Adicionar Necessidade',
'Add New Assessment Summary': 'adicionar novo resumo de avaliação',
'Add New Assessment': 'Adicionar Nova Avaliação',
'Add New Baseline Type': 'Adicionar nova linha de base / Patamar',
'Add New Baseline': 'Adicionar Nova Linha de Base/ Plataforma',
'Add New Budget': 'Adicionar Novo Orçamento',
'Add New Bundle': 'Adicionar Novo Conjunto',
'Add New Cluster Subsector': 'Adicionar Novo Subsector de Cluster',
'Add New Commitment Item': 'Adicionar Novo Item de Consignação',
'Add New Document': 'Adicionar Novo Documento',
'Add New Donor': 'Adicionar novos doadores',
'Add New Entry': 'Adicionar Nova Entrada',
'Add New Flood Report': 'Adicionar Novo Relatório de Inundação',
'Add New Image': 'Adicionar Nova Imagem',
'Add New Impact': 'Adicionar Novo Impacto',
'Add New Inventory Item': 'Adicionar Novo Item de Inventário',
'Add New Item to Kit': 'Adicionar Novo Item ao Kit',
'Add New Key': 'Adicionar Nova Chave',
'Add New Level 1 Assessment': 'Adicionar Nova Avaliação Nível 1',
'Add New Level 2 Assessment': 'Adicionar Nova Avaliação Nível 2',
'Add New Member': 'Adicionar Novo Membro/sócio',
'Add New Membership': 'Adicionar Novo associado',
'Add New Need Type': 'Adicionar um novo tipo de necessidade',
'Add New Need': 'Adicionar nova necessidade',
'Add New Population Statistic': 'Adicionar Nova Estatística de População',
'Add New Problem': 'Adicionar Novo Problema',
'Add New Rapid Assessment': 'Adicionar Nova Avaliação Rápida',
'Add New Received Item': 'Adicionar novo item recepcionado',
'Add New Record': 'Adicionar Novo Registo',
'Add New Report': 'Adicionar Novo Relatório',
'Add New Request Item': 'Adicionar Novo Item de Pedido',
'Add New Request': 'Adicionar Novo Pedido',
'Add New River': 'Adicionar Novo Rio/ Curso de água',
'Add New Role to User': 'Adicionar Nova Função a Utilizador',
'Add New Sent Item': 'Adicionar Novo Item Enviado',
'Add New Setting': 'Adicionar Nova Definição',
'Add New Solution': 'Adicionar Nova Solução',
'Add New Staff Type': 'Adicionar Novo Tipo de Funcionário/Staff',
'Add New Staff': 'Adicionar Novo Staff/funcionário',
'Add New Survey Answer': 'Adicionar Nova Resposta de Inquérito',
'Add New Survey Question': 'Adicionar nova questão de inquérito',
'Add New Survey Section': 'Adicionar nova secção de inquérito',
'Add New Survey Template': 'Adicionar Novo Modelo de Inquérito',
'Add New Team': 'Adicionar Nova Equipa',
'Add New Ticket': 'Adicionar Novo Bilhete',
'Add New Track': 'Adicionar Novo Trajecto',
'Add New User to Role': 'Adicionar Novo Utilizador a Função',
'Add New': 'Adicionar Novo',
'Add Note': 'Adicionar Nota',
'Add Peer': 'Adicionar Peer',
'Add Person': 'adicionar pessoa',
'Add Photo': 'Adicionar Foto',
'Add Population Statistic': 'Adicionar Estatística Populacional',
'Add Problem': 'Adicionar Problema',
'Add Question': 'Adicionar Questão',
'Add Rapid Assessment': 'Adicionar Avaliação Rápida',
'Add Record': 'Adicionar Registo',
'Add Report': 'Adicionar Relatório',
'Add Request': 'Adicionar Pedido',
'Add River': 'Adicionar Rio/curso de água',
'Add Section': 'Adicionar Secção',
'Add Solution': 'Adicionar Solução',
'Add Staff Type': 'Adicionar Tipo de Staff',
'Add Staff': 'Adicionar Staff/ Funcionário',
'Add Subscription': 'Adicionar Subscrição',
'Add Survey Answer': 'Adicionar pergunta de inquérito',
'Add Survey Question': 'Adicione pergunta do exame / Questionário',
'Add Survey Section': 'Adicionar Secção de Inquérito',
'Add Survey Series': 'Adicionar Séries de Inquérito',
'Add Survey Template': 'Adicionar Modelo de Inquérito',
'Add Team Member': 'Adicionar Membro/sócio',
'Add Team': 'Adicionar Equipa',
'Add Ticket': 'Adicionar Bilhete',
'Add Unit': 'Adicionar Unidade',
'Add Volunteer Availability': 'Adicionar disponibilidade de voluntários',
'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.': 'Adicione um Documento de Referência tal como um ficheiro, URL ou pessoa de contacto para verificar estes dados. Se não inserir um Documento de Referência, o seu email será exibido em vez disso.',
'Add a Volunteer': 'Adicionar um Voluntário',
'Add new Group': 'Adicionar novo Grupo',
'Add new Individual': 'Adicionar novo Indivíduo',
'Add new project.': 'Adicionar novo projecto',
'Add new staff role.': 'Adicionar novo papel para o Staff',
'Add to Bundle': 'Adicionar ao Pacote',
'Add to budget': 'Adicionar a orçamento',
'Add': 'Adicionar',
'Add/Edit/Remove Layers': 'Adicionar/Editar/Remover Camadas',
'Added to Group': 'Membro Adicionado',
'Added to Team': 'Membro Adicionado',
'Additional Beds / 24hrs': 'Camas Adicionais /24hrs',
'Address Details': 'Detalhes de Endereço',
'Address Type': 'tipo de endereço',
'Address added': 'Endereço adicionado',
'Address deleted': 'Endereço apagado',
'Address updated': 'Endereço actualizado',
'Address': 'Endereço',
'Addresses': 'Endereços',
'Adequate': 'Adequado',
'Admin Email': 'E-mail de Administrador',
'Admin Name': 'Nome de Administrador',
'Admin Tel': 'Tel Admin',
'Administration': 'Administração',
'Admissions/24hrs': 'Admissões/24horas',
'Adolescent (12-20)': 'Adolescente (12-20)',
'Adult (21-50)': 'Adulto (21-50)',
'Adult ICU': 'UCI Adultos',
'Adult Psychiatric': 'Psiquiatria Adultos',
'Adult female': ' Feminino adulto ',
'Adult male': 'Masculino Adulto',
'Advanced:': 'avançados / adiantados',
'Advisory': 'Consultivo',
'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.': 'Depois de carregar no botão, um conjunto de items emparelhados será mostrado um a um. Por favor seleccione a solução única de cada par a qual prefere acima da outra.',
'Age Group': 'Grupo de Idade',
'Age group does not match actual age.': 'A faixa etária não corresponde a idade real.',
'Aggravating factors': 'Factores Agravantes',
'Agriculture': 'Agricultura',
'Air Transport Service': 'Serviço de Transporte Aéreo',
'Aircraft Crash': 'Queda de Aeronave',
'Aircraft Hijacking': 'Sequestro de Avião',
'Airport Closure': 'Encerramento de Aeroporto',
'Alcohol': 'Alcool',
'Alert': 'Alerta',
'All Inbound & Outbound Messages are stored here': 'Todas as Mensagens de Entrada e de Saída são armazenadas aqui',
'All Resources': 'todos os recursos',
'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.': 'Todos os dados providenciados pela Fundação de Software Sahana deste sítio é licenciado sob uma licença Creative Commons Attribution. Contudo, nem todos os dados são originados aqui. Por favor consulte o campo fonte de cada entrada.',
'All': 'Todos',
'Allowed to push': 'Permitido para empurrar',
'Allows a Budget to be drawn up': 'Permite delinear um orçamento',
'Allows authorized users to control which layers are available to the situation map.': 'Permite que os usuários autorizados controlar quais camadas estão disponíveis para o mapa de situação.',
'Alternative Item Details': 'Detalhes de Item Alternativos',
'Alternative Item added': 'Item Alternativo adicionado',
'Alternative Item deleted': 'Item Alternativo apagado',
'Alternative Item': 'Item Alternativo',
'Alternative Items': 'Items Alternativos',
'Alternative places for studying': 'Locais alternativos para estudar',
'Ambulance Service': 'Serviço de Ambulância',
'An asset must be assigned to a person, site OR location.': 'Um bem deve ser consignado a uma pessoa, local OU localização.',
'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.': 'Um sistema de admissão, um sistema de gestão de armazém, acompanhamento das mercadorias, gestão da cadeia de abastecimento, capacidades de gestão de aquisições e de outros recursos e bens.',
'An item which can be used in place of another item': 'Um item que pode ser utilizado em lugar de outro item',
'Analysis of Completed Surveys': 'Análise de Inquéritos Completos',
'Animal Die Off': 'Animais Morrem',
'Animal Feed': 'Alimentos para animais',
'Answer Choices (One Per Line)': 'Responda às questões (um por linha)',
'Anthropology': 'Antropologia',
'Antibiotics available': 'Antibióticos disponíveis',
'Apparent Age': 'Idade Aparente',
'Apparent Gender': 'Género Aparente',
'Applications': 'Candidaturas',
'Approve': 'Aprovar',
'Approved': 'Aprovado',
'Approver': 'Aprovador',
'Area': 'Área',
'Areas inspected': 'Áreas inspeccionadas',
'Assessment Details': 'Detalhes da Avaliação',
'Assessment Reported': 'Avaliação Comunicada',
'Assessment Summaries': 'Sumários de Avaliação',
'Assessment Summary Details': 'Detalhes de Sumário/ resumo de Avaliação',
'Assessment Summary added': 'Sumário/Resumo de Avaliação adicionado',
'Assessment Summary updated': 'Resumo/sumário de Avaliação actualizado',
'Assessment added': 'Avaliação adicionada',
'Assessment admin level': 'Avaliação Nível de Administrador',
'Assessment deleted': 'Avaliação apagada',
'Assessment timeline': 'Cronograma de Avaliação',
'Assessment updated': 'Avaliação actualizada',
'Assessment': 'Avaliação',
'Assessments Needs vs. Activities': 'Avaliações Necessidades vs. Actividades',
'Assessments': 'Avaliações',
'Assessments:': 'Avaliações:',
'Asset Assigned': 'Bem Atribuído',
'Asset Assignment Details': 'Detalhes de Atribuição de Bem',
'Asset Assignment deleted': 'Consignação/atribuição de Bem/posse apagada',
'Asset Assignment updated': 'Atribuição de bens actualizada',
'Asset Assignments': 'Atribuição de Bens',
'Asset Details': 'Detalhes do Bem',
'Asset Management': 'Gestão de Bens',
'Asset Number': 'Numero do Bem / Produto',
'Asset added': 'Bem adicionado',
'Asset deleted': 'Bem apagado',
'Asset updated': 'Bem actualizado',
'Asset': 'Bem / Posse',
'Assets': 'Bens',
'Assign Staff': 'designar funcionários / staff',
'Assign to Org.': 'atribuir a Org.',
'Assign': 'Atribuir',
'Assigned By': 'Atribuído Por',
'Assigned To': 'Atribuído A',
'Assigned to': 'Atribuído a',
'Assigned': 'Atribuído',
'Assignments': 'Atribuições',
'At/Visited Location (not virtual)': 'Em / Localização Visitada (não virtual)',
'Attend to information sources as described in <instruction>': 'Atenção às fontes de informação, conforme descrito nas <instruction>',
'Attribution': 'Atribuição',
'Author': 'Autor',
'Availability': 'Disponibilidade',
'Available Alternative Inventory Items': 'Items de Inventário Alternativos Disponíveis',
'Available Inventory Items': 'Items de Inventário Disponíveis',
'Available Messages': 'Mensagens Disponíveis',
'Available Records': 'Registos Disponíveis',
'Available databases and tables': 'Bases de Dados e Tabelas disponíveis',
'Available for Location': 'Disponível para Localização',
'Available from': 'Disponível de',
'Available in Viewer?': 'Disponível no Visualizador?',
'Available until': 'Disponível até',
'Avoid the subject event as per the <instruction>': 'Evitar o evento sujeito conforme a <instruction>',
'Background Color for Text blocks': 'Cor de Fundo para blocos de Texto',
'Background Color': 'Cor de Fundo',
'Baldness': 'Calvície',
'Bank/micro finance': 'Banco / Micro Sistema Financeiro',
'Barricades are needed': 'São necessários Barricadas',
'Base Layer?': 'Camada de Base?',
'Base Location': 'Localização da Base',
'Baseline Number of Beds': 'Número de Linha de Base/Plataforma de Camas',
'Baseline Type Details': 'Detalhes de Tipo de Linha de base',
'Baseline Type added': 'Tipo de Linha de base/patamar adicionado',
'Baseline Type updated': 'Tipo de Linha de Base actualizado',
'Baseline Type': 'Tipos de Linha de Base',
'Baseline Types': 'Tipos de Linha de Base /patamar',
'Baseline added': 'Adicionada Linha de Base',
'Baseline deleted': 'Linha de Base/Plataforma apagada',
'Baseline number of beds of that type in this unit.': 'Número base de camas desse tipo nesta unidade.',
'Baseline updated': 'Linha de base actualizada',
'Baselines Details': 'Detalhes de Linha de Base /plataforma',
'Basic Assessment Reported': 'Avaliação Básica Comunicada',
'Basic Assessment': 'Avaliação Básica',
'Basic Details': 'Detalhes Básicos',
'Basic reports on the Shelter and drill-down by region': 'Relatórios básicos sobre o abrigo e exercício por região',
'Baud rate to use for your modem - The default is safe for most cases': 'Taxa de transmissão a utilizar para o seu modem- o padrão é seguro para a maioria dos casos',
'Baud': 'Baud / velocidade',
'Beam': 'Viga',
'Bed Capacity per Unit': 'Capacidade de Camas por Unidade',
'Bed Capacity': 'Capacidade de camas',
'Bed Type': 'Tipo de Cama',
'Bed type already registered': 'Tipo de cama já registado',
'Below ground level': 'Abaixo do nível do chão',
'Beneficiary Type': 'Tipo de Beneficiário',
'Biological Hazard': 'Risco Biológico',
'Biscuits': 'Biscoitos',
'Blizzard': 'Nevasca',
'Blood Type (AB0)': 'Grupo Sanguíneo (AB0)',
'Blowing Snow': 'Nevão',
'Boat': 'Barco',
'Bodies found': 'Corpos encontrados',
'Bodies recovered': 'Corpos recuperados',
'Body Recovery Request': 'Pediro de Recuperação de Corpo',
'Body Recovery Requests': 'Pedidos de Recuperação de Corpo',
'Body': 'corpo',
'Bomb Explosion': 'Explosão de Bomba',
'Bomb Threat': 'Ameaça de Bomba',
'Bomb': 'Bomba',
'Border Color for Text blocks': 'Cor de limite para os blocos de texto',
'Bounding Box Insets': 'Margens da Caixa Delimitadora',
'Bounding Box Size': 'Tamanho da Caixa Delimitadora',
'Brand added': 'Marca adicionada',
'Brand deleted': 'Marca apagada',
'Brand updated': 'Marca actualizada',
'Brand': 'Marca',
'Brands': 'Marcas',
'Bricks': 'Tijolos',
'Bridge Closed': 'Ponte Encerrada',
'Bucket': 'Balde',
'Buddhist': 'Budista',
'Budget Details': 'Detalhes de Orçamento',
'Budget Updated': 'Orçamento Actualizado',
'Budget added': 'Orçamento adicionado',
'Budget deleted': 'Orçamento apagado',
'Budget updated': 'Orçamento actualizado',
'Budget': 'Orçamento',
'Budgeting Module': 'Módulo de Orçamentação',
'Budgets': 'Orçamentos',
'Building Collapsed': 'Edifício colapsado',
'Building Name': 'Nome de Edifício',
'Building Safety Assessments': 'Avaliações de Segurança de Edifício',
'Building Short Name/Business Name': 'Nome Abreviado de Edifício/Nome de Negócio',
'Building or storey leaning': 'Inclinação de Edifício ou Piso',
'Built using the Template agreed by a group of NGOs working together as the': 'Construído utilizando o Modelo acordado por um grupo de ONGs trabalhando juntas como ',
'Bulk Uploader': 'Upload em Massa/Carregamento em massa',
'Bundle Contents': 'Conteúdos de Embalagem',
'Bundle Details': 'Detalhes de Pacote',
'Bundle Updated': 'Pacote Actualizado',
'Bundle deleted': 'Embalagem/bloco apagado',
'Bundle': 'Pacote',
'Bundles': 'Pacotes /Conjuntos',
'Burn ICU': 'UCI Queimados',
'Burn': 'Queimadura',
'Burned/charred': 'Queimado/carbonizado',
'By Inventory': 'Por Inventário',
'By Person': 'Por Pessoa',
'By Site': 'Por Sítio',
'CBA Women': 'Mulheres CBA',
'CSS file %s not writable - unable to apply theme!': 'ficheiro CSS %s não gravávél - não é possível aplicar o tema!',
'Calculate': 'Calcular',
'Camp Coordination/Management': 'Coordenação/Gestão de Campo',
'Camp': 'Campo',
'Can only disable 1 record at a time!': 'Só pode desactivar 1 registo de cada vez.',
'Cancel Shipment': 'Cancelar Carregamento',
'Cancel': 'Cancelar',
'Canceled': 'Cancelado',
'Canned Fish': 'Peixe Enlatado',
'Cannot be empty': 'Não pode estar vazio',
'Cannot disable your own account!': 'Não pode desactivar a sua própria conta!',
'Capacity (Max Persons)': 'Capacidade (Máximo de Pessoas)',
'Capture Information on Disaster Victim groups (Tourists, Passengers, Families, etc.)': 'Captar/Obter informação nos Grupos de Vítimas de Catástrofe/Desastre (Turistas, Passageiros, Famílias, etc.)',
'Capture Information on each disaster victim': 'Capturar Informação em cada vítima de desastre',
'Capturing organizational information of a relief organization and all the projects they have in the region': 'Capturar informação organizacional de uma organização de socorro e todos os projectos que as mesmas têm na região',
'Capturing the projects each organization is providing and where': 'Captar/recolher os projectos que cada organização está a providenciar e onde',
'Cardiology': 'Cardiologia',
'Cassava': 'Mandioca',
'Casual Labor': 'Trabalho Casual',
'Casualties': 'Vitimas mortais',
'Catalog Item added': 'Item de Catálogo adicionado',
'Catalog Item deleted': 'Item do Catalogo excluído',
'Catalog Item updated': 'Item de Catálogo actualizado',
'Catalog Item': 'Item de Catálogo',
'Catalog Items': 'Items de Catálogo',
'Category': 'Categoria',
'Ceilings, light fixtures': 'Tectos, Luminárias /Iluminação',
'Central point to record details on People': 'Ponto central para gravar detalhes sobre Pessoas',
'Certificate Status': 'Estado/ Ponto de situação de Certificados/ Certidões',
'Certification': 'Certificação',
'Change Password': 'Alterar Palavra Passe',
'Check for errors in the URL, maybe the address was mistyped.': 'Verifique erros no URL, talvez o endereço tenha sido mal escrito.',
'Check if the URL is pointing to a directory instead of a webpage.': 'verificar se o URL está direccionado a um directório em vez de uma página web',
'Check outbox for the message status': 'verifique caixa de saída para o estado da mensagem',
'Check': 'Confira',
'Check-in': 'Check-in/ verificação de entrada',
'Checklist created': 'Checklist /Lista de Verificação criada',
'Checklist deleted': 'Checklist apagada',
'Checklist of Operations': 'Checklist/lista de verificação das Operações',
'Checklist updated': 'Checklist actualizada',
'Chemical Hazard': 'Risco Químico',
'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack': 'Ameaça ou ataque Químico, Biológico, Radiológico, Nuclear ou Explosivos de Alto Rendimento',
'Chicken': 'Galinha / Frango',
'Child (2-11)': 'Criança (2-11)',
'Child (< 18 yrs)': 'Criança (<18 anos)',
'Child Abduction Emergency': 'Emergência Rapto de Criança',
'Child headed households (<18 yrs)': 'Famílias chefiadas por crianças (<18 anos)',
'Child': 'Criança',
'Children (2-5 years)': 'Crianças (2-5 anos)',
'Children (5-15 years)': 'Crianças (5-15 anos)',
'Children (< 2 years)': 'Crianças (< 2 anos)',
'Children not enrolled in new school': 'Crianças não matriculadas na nova escola',
'Chinese (Taiwan)': 'Chinês (Taiwan)',
'Cholera Treatment Capability': 'Capacidade de Tratamento de Cólera',
'Cholera Treatment Center': 'Centro de Tratamento de Cólera',
'Cholera Treatment': 'Tratamento para Cólera',
'Cholera-Treatment-Center': 'Centro de Tratamento de Cólera',
'Choose a new posting based on the new evaluation and team judgement. Severe conditions affecting the whole building are grounds for an UNSAFE posting. Localised Severe and overall Moderate conditions may require a RESTRICTED USE. Place INSPECTED placard at main entrance. Post all other placards at every significant entrance.': 'Escolha uma nova postagem baseada na nova avaliação e juízo de equipa. Condições severas/graves que afectem todo o edifício são fundamentos para uma postagem de INSEGURO/PERIGOSO. Condições Severas ou globalmente moderadas podem requirir um USO RESTRICTO. Coloque o placard INSPECCIONADO na entrada principal. Coloque todos os outros placards em todas as entradas significantes.',
'Christian': 'Cristão',
'Church': 'Igreja',
'Civil Emergency': 'Emergencia Civil',
'Cladding, glazing': 'Revestimento, vidros',
'Click on the link %(url)s to reset your password': 'Carregar na hiperligação %(url)s para redefinir a sua password',
'Click on the link %(url)s to verify your email': 'Carregar na hiperligação %(url)s para verificar o seu email',
'Clinical Laboratory': 'Laboratório Clínico',
'Clinical Operations': 'Operações Clínicas',
'Clinical Status': 'Estado Clínico',
'Closed': 'Fechado',
'Clothing': 'Roupas',
'Cluster Distance': 'Distância de Cluster',
'Cluster Subsector Details': 'Detalhes de Subsector de Cluster',
'Cluster Subsector added': 'Subsector de Cluster adicionado',
'Cluster Subsector deleted': 'Subsector de grupo apagado',
'Cluster Subsector updated': 'Subsector de Cluster actualizado',
'Cluster Subsector': 'Subsector de Cluster',
'Cluster Subsectors': 'Subsectores de Cluster/Grupo',
'Code': 'Código',
'Cold Wave': 'Vaga de Frio ',
'Collapse, partial collapse, off foundation': 'Desmoronamento, desmoronamento parcial, fora do alicerce',
'Collective center': 'Centro Colectivo',
'Color of Buttons when hovering': 'Cor dos Botões quando suspensos',
'Color of bottom of Buttons when not pressed': 'Cor de fundo dos Botões quando não pressionados',
'Color of bottom of Buttons when pressed': 'Cor de fundo dos Botões quando pressionados',
'Color of selected Input fields': 'cor dos campos de inserção seleccionados',
'Color of selected menu items': 'Côr dos items de menu seleccionados',
'Column Choices (One Per Line': 'Escolhas colunas (um por linha)',
'Columns, pilasters, corbels': 'Colunas, Pilastras, Mísulas',
'Combined Method': 'Método Combinado',
'Come back later. Everyone visiting this site is probably experiencing the same problem as you.': 'Por favor tente mais tarde, todos os utilizadores deste site provavelmente estão com o mesmo problema que você',
'Come back later.': 'Volte mais tarde.',
'Comments': 'Comentários',
'Commercial/Offices': 'Comercial / Escritórios',
'Commit Date': 'Data de Entrega',
'Commit': 'Cometer/ Consignar/ entregar',
'Commiting a changed spreadsheet to the database': 'Enviar uma folha de cálculo modificada para a base de dados',
'Commitment Added': 'Entrega/consignação Adicionada',
'Commitment Canceled': 'Consignação Cancelada',
'Commitment Details': 'Detalhes de Consignação /Entrega',
'Commitment Item Details': 'Detalhes de Item de Consignação',
'Commitment Item added': 'Item de atribuição adicionado',
'Commitment Item deleted': 'Item de Consignação/Entrega apagado',
'Commitment Item updated': 'Item de entrega actualizado',
'Commitment Item': 'Item de entrega',
'Commitment Updated': 'Compromisso Actualizado',
'Commitment': 'Consignação/Compromisso',
'Commitments': 'Compromissos',
'Committed By': 'Consignado por',
'Committed': 'Consignado/ atribuído',
'Committing Inventory': 'Submeter Inventário',
'Communication problems': 'Problemas de Comunicação',
'Community Centre': 'Centro Comunitário',
'Community Health Center': 'centro de saúde comunitário',
'Community Member': 'Membro da Comunidade',
'Complete': 'Completo',
'Completed': 'Completo',
'Complexion': 'Cor da pele',
'Compose': 'Compor /constituir',
'Compromised': 'Comprometido',
'Concrete frame': 'Estrutura de Betão',
'Concrete shear wall': 'Parede resistente de betão',
'Configurations': 'Configurações',
'Configure Run-time Settings': 'Configurar Definições de Run-time',
'Confirm Shipment Received': 'Confirmar Carregamento Recebido',
'Confirmed': 'Confirmado',
'Conflict Details': 'Detalhes de Conflito',
'Conflict Resolution': 'Resolução de Conflito',
'Consignment Note': 'Nota de Consignação',
'Constraints Only': 'únicas restrições',
'Consumable': 'consumível',
'Contact Data': 'Dados de Contacto',
'Contact Details': 'Detalhes de Contacto',
'Contact Information Added': 'Informação de Contacto Adicionada',
'Contact Information Deleted': 'Informação de Contacto Apagada',
'Contact Information Updated': 'Informação de Contacto Actualizada',
'Contact Information': 'Informação de Contacto',
'Contact Method': 'Método de Contacto',
'Contact Name': 'Nome de Contacto',
'Contact Person': 'Contactar Pessoa',
'Contact Phone': 'Telefone para contato',
'Contact details': 'Detalhes de Contacto',
'Contact information deleted': 'Informação de Contacto apagado',
'Contact information updated': 'Informação de contacto actualizada',
'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': 'Contactar pessoas em caso de notícias ou mais questões (se diferente da pessoa que relata). Incluir número de telefone, endereço e email conforme disponível.',
'Contact us': 'Contacte-nos',
'Contact': 'Contacto',
'Contacts': 'Contactos',
'Contents': 'Conteúdos',
'Contributor': 'Contribuinte',
'Conversion Tool': 'Ferramenta de Conversão',
'Cooking Oil': 'Óleo de Cozinhar',
'Copy': 'Copiar',
'Corn': 'Milho',
'Cost Type': 'Tipo de Custo',
'Cost per Megabyte': 'Custo por Megabyte',
'Country of Residence': 'País de Residência',
'Country': 'País',
'Create & manage Distribution groups to receive Alerts': 'Criar e gerir grupos de distribuição para receberem alertas',
'Create Activity Report': 'Criar Relatório de Actividade',
'Create Activity Type': 'Criar Tipo de Atividade',
'Create Activity': 'Criar Actividade',
'Create Asset': 'Criar Bem/Activo',
'Create Bed Type': 'Criar Tipo de Cama',
'Create Brand': 'Criar Marca',
'Create Budget': 'Criar Orçamento',
'Create Catalog Item': 'Criar Item de Catálogo',
'Create Cholera Treatment Capability Information': 'Criar Informação de Capacidade de Tratamento de Cólera',
'Create Cluster Subsector': 'Criar Subsector de Bloco',
'Create Contact': 'Criar Contacto',
'Create Dead Body Report': 'Criar Relatório de Cadáver',
'Create Feature Layer': 'Criar Camada de Característica',
'Create Group Entry': 'Criar Entrada de Grupo',
'Create Group': 'Criar Grupo',
'Create Hospital': 'Criar Hospital',
'Create Identification Report': 'Criar Relatório de Identificação',
'Create Incident Report': 'Criar Relatório de Incidente',
'Create Incident': 'Criar Ocorrência',
'Create Item Category': 'Criar Categoria de Item',
'Create Item Pack': 'Criar Pacote de Item',
'Create Item': 'Criar Item',
'Create Kit': 'Criar Kit',
'Create Layer': 'Criar Camada',
'Create Location': 'Criar Localização',
'Create Map Configuration': 'Criar Configuração de Mapa',
'Create Marker': 'Criar Marcador',
'Create Office': 'Criar Escritório/Serviço/Agência',
'Create Organization': 'Criar Organização',
'Create Personal Effects': 'Criar Objectos de Uso Pessoal',
'Create Project': 'Criar Projecto',
'Create Projection': 'Criar Projecção',
'Create Rapid Assessment': 'Criar Avaliação Rápida',
'Create Reference Document': 'Criar Documento de Referência',
'Create Request': 'Criar Solicitação',
'Create Resource': 'Criar Recurso',
'Create Role': 'Criar Papel',
'Create Sector': 'Criar Sector ',
'Create Service Profile': 'Criar Perfil de Serviço',
'Create Shelter Service': 'Criar Serviço de Abrigo',
'Create Shelter Type': 'Criar Tipo de Abrigo',
'Create Shelter': 'Criar Abrigo',
'Create Skill': 'Criar Capacidade/Habilidade',
'Create Staff Member': 'Criar Novo Membro de Staff/funcionário',
'Create Status': 'Criar Estado',
'Create Task': 'Criar Tarefa',
'Create Theme': 'Criar Tema',
'Create User': 'Criar Utilizador',
'Create Volunteer': 'Criar Voluntário',
'Create Warehouse': 'Criar Armazém ',
'Create a Person': 'Criar uma Pessoa',
'Create a group entry in the registry.': 'Crear uma entrada de grupo no registro.',
'Create, enter, and manage surveys.': 'Criar, inserir, e gerir inquéritos.',
'Creation of Surveys': 'Criação de Inquéritos',
'Credential Details': 'Detalhes da credencial',
'Credential added': 'Credencial adicionada',
'Credential deleted': 'Credencial apagada',
'Credential updated': 'Credencial actualizada',
'Credentials': 'Credenciais',
'Criteria': 'Critério',
'Currency': 'Moeda corrente',
'Current Group Members': 'grupo de membros / Sócios actual',
'Current Identities': 'Identidades actuais',
'Current Log Entries': 'Entradas de Log Actuais',
'Current Memberships': 'Membros Actuais',
'Current Notes': 'Notas Actuais',
'Current Registrations': 'Registos Actuais',
'Current Team Members': 'Actuais Membros da Equipa ',
'Current Twitter account': 'Conta Atual no Twitter',
'Current community priorities': 'Prioridades actuais de comunidade',
'Current general needs': 'Necessidades Gerais actuais',
'Current health problems': 'Problemas de Saúde actuais',
'Current number of patients': 'Numeero actual de pacientes/doentes',
'Current problems, categories': 'Problemas actuais, categorias',
'Current problems, details': 'Problemas actuais, detalhes',
'Current request': 'Pedido actual',
'Current response': 'Resposta actual',
'Current session': 'Sessão actual',
'Currently no entries in the catalog': 'Actualmente não há entradas no catálogo',
'Custom Database Resource (e.g., anything defined as a resource in Sahana)': 'Recurso de Base de Dados Personalizado (ex.: qualquer coisa definida como recurso no Sahana)',
'DC': 'DC (Controlador de Domínio)',
'DNA Profile': 'Perfil de ADN',
'DNA Profiling': 'Perfil de ADN',
'DVI Navigator': 'Navegador DVI',
'Dam Overflow': 'Transbordamento de Barragem',
'Damage': 'Dano',
'Dangerous Person': 'Pessoa Perigosa',
'Data uploaded': 'Dados carregados',
'Database': 'Base de Dados',
'Date Available': 'Data Disponível',
'Date Received': 'Data recebida',
'Date Required': 'Requerida Data',
'Date Sent': 'Data Enviada',
'Date Until': 'Data Até',
'Date and Time': 'Data e Hora',
'Date and time this report relates to.': 'Data e hora com que este relatório se relaciona.',
'Date of Birth': 'Data de Nascimento',
'Date of Latest Information on Beneficiaries Reached': 'Data da Última Informação em Beneficiários Alcançados',
'Date of Report': 'Data de Relatório',
'Date': 'Data',
'Date/Time of Find': 'Data / Hora a Pesquisar',
'Date/Time of disappearance': 'Data/Hora do desaparecimento',
'Date/Time': 'Data/Hora',
'De-duplicator': 'Des-duplicador',
'Dead Body Details': 'detalhes de cadáveres',
'Dead Body Reports': 'relatórios de cadáver',
'Dead Body': 'Cadáver',
'Dead body report deleted': 'Relatório de Cadáver apagado',
'Dead body report updated': 'Relatório de cadáver actualizado',
'Deaths in the past 24h': 'Mortes nas últimas 24h',
'Deaths/24hrs': 'mortes/24hrs',
'Decimal Degrees': 'Graus Decimais',
'Decision': 'Decisão',
'Decomposed': 'Decomposto/ Em decomposição',
'Default Height of the map window.': 'Altura Padrão da janela do mapa',
'Default Marker': 'Marcador Padrão',
'Default Width of the map window.': 'Largura Padrão da janela de mapa.',
'Default synchronization policy': 'Potítica Padrão de Sincronização',
'Defecation area for animals': 'Área de defecação para animais',
'Defines the icon used for display of features on handheld GPS.': 'Define o icon utilizado para a exibição de características no GPS manual.',
'Defines the icon used for display of features on interactive map & KML exports.': 'Define o ícone utilizado para a exibição de características um mapa interactivo e exportações KML.',
'Defines the marker used for display & the attributes visible in the popup.': 'Define o marcador usado para exibição e os atributos visíveis na popup.',
'Degrees must be a number between -180 and 180': 'Graus devem ser um número entre -180 e 180',
'Delete Alternative Item': 'Apagar Item Alternativo',
'Delete Assessment Summary': 'Apagar Sumário/Resumo de Avaliação',
'Delete Assessment': 'Apagar Avaliação',
'Delete Asset Assignment': 'Apagar Atribuição de Bem',
'Delete Asset': 'Apagar Bem',
'Delete Baseline Type': 'Apagar Tipo de Linha de Base/Plataforma',
'Delete Baseline': 'Apagar Linha de Base',
'Delete Brand': 'Apagar Marca',
'Delete Budget': 'Apagar Orçamento',
'Delete Bundle': 'Apagar Pacote',
'Delete Catalog Item': 'Apagar Item de Catálogo',
'Delete Cluster Subsector': 'Apagar Subsector de Cluster',
'Delete Commitment Item': 'Apagar Item de Compromisso',
'Delete Commitment': 'Apagar Compromisso',
'Delete Contact Information': 'Apagar Informação de Contacto',
'Delete Credential': 'Apagar Credencial',
'Delete Document': 'Apagar Documento',
'Delete Donor': 'Apagar Doador/Dador',
'Delete Entry': 'Apagar Entrada',
'Delete Feature Layer': 'Apagar Camada de característica',
'Delete Group': 'Apagar Grupo',
'Delete Hospital': 'Apagar Hospital',
'Delete Image': 'Apagar imagem',
'Delete Impact Type': 'Apagar Tipo de Impacto',
'Delete Incident Report': 'Apagar Relatório de Incidente',
'Delete Inventory Item': 'Apagar Item de Inventário',
'Delete Item Category': 'Apagar Categoria de Item',
'Delete Item Pack': 'Apagar Pacote de Item',
'Delete Item': 'Apagar Item',
'Delete Key': 'Apagar Chave',
'Delete Kit': 'Apagar Conjunto/Kit',
'Delete Layer': 'Apagar Camada',
'Delete Level 2 Assessment': 'Excluir Nível 2 Avaliação',
'Delete Location': 'Apagar Localização',
'Delete Map Configuration': 'Apagar Configuração do Mapa',
'Delete Marker': 'Apagar Marcador',
'Delete Membership': 'Apagar Associação',
'Delete Message': 'Apagar Mensagem',
'Delete Need Type': 'apagar tipo de necessidade',
'Delete Need': 'Apagar Necessidade',
'Delete Office': 'Apagar agência/Serviço',
'Delete Organization': 'Apagar Organização',
'Delete Peer': 'Apagar Ponto',
'Delete Person': 'Apagar Pessoa',
'Delete Photo': 'Apagar Foto / Fotografia',
'Delete Population Statistic': 'Apagar Estatística de População',
'Delete Project': 'Apagar Projecto',
'Delete Projection': 'Apagar Projecção',
'Delete Rapid Assessment': 'Apagar Avaliação Rápida',
'Delete Received Item': 'Apagar Item Recepcionado',
'Delete Received Shipment': 'Excluir Carregamento Recebido',
'Delete Report': 'Apagar Relatório',
'Delete Request Item': 'Apagar Pedido de Item',
'Delete Request': 'Apagar Pedido',
'Delete Section': 'Apagar Secção',
'Delete Sent Item': 'Apagar Item Enviado',
'Delete Sent Shipment': 'Apagar Carregamento Enviado',
'Delete Service Profile': 'Apagar Perfil de Serviço',
'Delete Setting': 'Apagar definição',
'Delete Skill': 'Apagar Capacidade',
'Delete Staff Type': 'Apagar Tipo de Staff',
'Delete Subscription': 'Apagar Subscrição',
'Delete Survey Answer': 'Apagar Resposta de Inquérito',
'Delete Survey Question': 'Apagar Questão do Inquérito/questionário',
'Delete Survey Section': 'Apagar Secção de Inquérito',
'Delete Survey Series': 'Apagar Séries de Inquérito',
'Delete Survey Template': 'Apagar Modelo de Inquérito/Questionário',
'Delete Unit': 'Apagar Unidade',
'Delete User': 'Apagar Utilizador',
'Delete Volunteer': 'Apagar Voluntário',
'Delete Warehouse': 'Apagar Armazém',
'Delete from Server?': 'Apagar do Servidor?',
'Delphi Decision Maker': 'Marcador de Decisão Delphi',
'Demographic': 'Demográfico',
'Demonstrations': 'Demonstrações',
'Dental Examination': 'Exame Dentário',
'Dental Profile': 'Perfil Dentário',
'Describe the condition of the roads to your hospital.': 'Descreva a condição das estradas para o seu hospital.',
'Describe the procedure which this record relates to (e.g. "medical examination")': 'Descrever o procedimento que este registo relata a (ex: "exame médico")',
'Description of Contacts': 'Descrição de Contactos',
'Description of defecation area': 'Descrição da área de defecação',
'Description of drinking water source': 'Descrição de fonte de água potável',
'Description of sanitary water source': 'Descrição de fonte de água sanitária',
'Description of water source before the disaster': 'Descrição de fonte de água antes do desastre',
'Description': 'Descrição',
'Descriptive Text (e.g., Prose, etc)': 'Texto descritivo (ex. prosa, etc.)',
'Desire to remain with family': 'Deseja permanecer com a família',
'Destination': 'Destino',
'Destroyed': 'Destruído',
'Details': 'Detalhes',
'Dialysis': 'Diálise',
'Diaphragms, horizontal bracing': 'Diafragmas, suporte horizontal',
'Dignitary Visit': 'Visita de dignitário',
'Direction': 'Direcção',
'Disable': 'Desactivar',
'Disabled': 'Desactivado',
'Disabled?': 'Desactivado?',
'Disaster Victim Identification': 'Identificação de Vítima de Desastre',
'Disaster Victim Registry': 'Registo de Vítima de Desastre',
'Disaster clean-up/repairs': 'Desastre limpeza/reparação',
'Discharges/24hrs': 'Descargas/24hrs',
'Discussion Forum on item': 'Forum de Discussão em item',
'Discussion Forum': 'Fórum de Discussão',
'Disease vectors': 'Vectores de Doenças',
'Dispensary': 'Dispensário',
'Displaced Populations': 'Populações Deslocadas',
'Displaced': 'Deslocado',
'Display Polygons?': 'Exibir Polígonos?',
'Display Tracks?': 'Exibir Trajectos?',
'Distance(Kms)': 'Distancia(Kms)',
'Distribution groups': 'Grupos de distribuição',
'Distribution': 'Distribuição',
'Do you want to cancel this received shipment? The items will be removed from the Inventory. This action CANNOT be undone!': 'Deseja cancelar este carregamento recebido? Os items serão removidos do inventário. Esta acção NÃO pode ser desfeita!',
'Do you want to cancel this sent shipment? The items will be returned to the Inventory. This action CANNOT be undone!': 'Deseja cancelar este carregamento? Os items serão devolvidos ao inventário. Esta acção NÃO pode ser anulada!',
'Do you want to receive this shipment?': 'Quer receber este carregamento?',
'Do you want to send this shipment?': 'Deseja enviar este Carregamento?',
'Document Details': 'Detalhes do documento',
'Document Scan': 'Pesquisa de Documento',
'Document added': 'Documento adicionado',
'Document deleted': 'Documento apagado',
'Document updated': 'Documento actualizado',
'Document': 'Documento',
'Documents and Photos': 'Documentos e Fotos',
'Documents': 'Documentos',
'Doing nothing (no structured activity)': 'sem fazer nada (sem actividade estruturada)',
'Dollars': 'Dólares',
'Domain': 'Domínio',
'Domestic chores': 'Tarefas Domésticas',
'Donation Certificate': 'Certificado de Doação',
'Donation Phone #': 'Número de Telefone para Donativos',
'Donor added': 'Doador /dador adicionado',
'Donor deleted': 'Doador/dador apagado',
'Donor updated': 'Doador/dador actualizado',
'Donors Report': ' Relatório de Doadores',
'Donors': 'Doadores/dadores',
'Draft': 'rascunho / projecto',
'Drainage': 'Drenagem',
'Drawing up a Budget for Staff & Equipment across various Locations.': 'Elaborar um Orçamento para Pessoal e Equipamento para várias Localizações.',
'Drill Down by Group': 'Detalhar por Grupo',
'Drill Down by Shelter': 'Detalhar por Abrigo',
'Driving License': 'Carta de Condução',
'Drought': 'Seca',
'Drugs': 'Drogas',
'Dug Well': 'Poço Escavado',
'Duplicate?': 'Duplicar?',
'Duration': 'Duração',
'Dwelling': 'Habitação',
'Dwellings': 'Habitações',
'EMS Reason': 'Razão EMS',
'EMS Status': 'Ponto de situação do Serviço de Emergência Médica',
'ER Status Reason': 'Motivo de Estado de Urgências',
'ER Status': 'Estado da Emergência',
'Early Recovery': 'Início da Recuperação',
'Earthquake': 'Terramoto',
'Edit Address': 'Editar Endereço',
'Edit Alternative Item': 'Editar Item Alternativo',
'Edit Application': 'Editar Aplicação',
'Edit Assessment Summary': 'Editar Resumo/sumário de Avaliação',
'Edit Assessment': 'Editar Avaliação',
'Edit Asset Assignment': 'Editar consignação de bem',
'Edit Asset': 'Editar Bem',
'Edit Baseline': 'Editar Linha de Base',
'Edit Brand': 'Editar Marca',
'Edit Budget': 'Editar Orçamento',
'Edit Bundle': 'Editar Pacote',
'Edit Catalog Item': 'Editar Item de Catálogo',
'Edit Cluster Subsector': 'Editar Subsector de Cluster',
'Edit Commitment Item': 'Editar Item de Entrega',
'Edit Commitment': 'Editar Compromisso',
'Edit Contact Information': 'Editar Informação de Contacto',
'Edit Contact': 'Editar Contacto',
'Edit Contents': 'Editar Conteúdos',
'Edit Credential': 'Editar Credencial',
'Edit Dead Body Details': 'Editar Detalhes de Cadáver',
'Edit Description': 'Editar Descrição',
'Edit Details': 'Editar Detalhes',
'Edit Disaster Victims': 'Editar Vítimas de Desastre',
'Edit Document': 'Editar Documento',
'Edit Donor': 'Editar Doador/dador',
'Edit Email Settings': 'Editar Definições de Email',
'Edit Feature Layer': 'Editar Camada de Característica',
'Edit Flood Report': 'Editar Relatório de Inundação',
'Edit Gateway Settings': 'Editar Definições de Gateway',
'Edit Group': 'Editar Grupo',
'Edit Hospital': 'Editar Hospital',
'Edit Identification Report': 'Editar Relatório de Identificação',
'Edit Identity': 'Editar Identidade',
'Edit Image Details': 'Editar Detalhes de Imagem',
'Edit Image': 'Editar Imagem',
'Edit Impact Type': 'Editar tipo de impacto',
'Edit Impact': 'editar impacto',
'Edit Incident Report': 'Editar Relatório de Ocorrência',
'Edit Inventory Item': 'Editar Item de Inventário',
'Edit Item Category': 'Editar Categoria de Item',
'Edit Item Pack': 'Editar pacote de Item',
'Edit Item': 'Editar Item',
'Edit Key': 'Editar Chave',
'Edit Layer': 'Editar Camada',
'Edit Level 1 Assessment': 'Editar Avaliação Nível 1',
'Edit Level 2 Assessment': 'Editar Avaliação Nível 2',
'Edit Location': 'Editar Localização',
'Edit Log Entry': 'Editar entrada de Log/diário',
'Edit Map Configuration': 'Editar Configuração de Mapa',
'Edit Map Services': 'Editar Serviços de Mapa',
'Edit Marker': 'Editar Marcador',
'Edit Membership': 'Editar Membro/Assosicação',
'Edit Message': 'Editar Mensagem',
'Edit Messaging Settings': 'Editar Definições de Mensagem',
'Edit Modem Settings': 'Editar Definições do Modem',
'Edit Need Type': 'Editar tipo de necessidade',
'Edit Note': 'Editar nota',
'Edit Office': 'Editar agência/Posto',
'Edit Options': 'Editar Opções',
'Edit Organization': 'Editar Organização',
'Edit Parameters': 'Editar Parâmetros',
'Edit Peer Details': 'Editar Detalhes do Ponto/Par',
'Edit Person Details': 'Editar Detalhes de Pessoa',
'Edit Personal Effects Details': 'Editar Detalhes de Bens Pessoais',
'Edit Photo': 'Editar Foto/ Fotografia',
'Edit Population Statistic': 'Editar Estatística Populacional',
'Edit Problem': 'Editar Problema',
'Edit Project': 'Editar Projecto',
'Edit Projection': 'Editar Projecção',
'Edit Rapid Assessment': 'Editar Avaliação Rápida',
'Edit Received Item': 'Editar Item Recebido',
'Edit Received Shipment': 'Editar Recepção de Carregamento',
'Edit Record': 'Editar Registo',
'Edit Registration Details': 'Editar Detalhes de Registo',
'Edit Registration': 'Editar Registo',
'Edit Report': 'Editar Relatório',
'Edit Request Item': 'Editar Pedido de Item',
'Edit Request': 'Editar pedido',
'Edit Resource': 'Editar Recurso',
'Edit River': 'Editar Rio',
'Edit Role': 'Editar Função',
'Edit Sector': 'Editar Sector',
'Edit Sent Item': 'Editar Item Enviado',
'Edit Setting': 'Editar Definição',
'Edit Settings': 'Editar Definições',
'Edit Shelter Service': 'Editar serviço de abrigo',
'Edit Shelter Type': 'Editar Tipo de Abrigo',
'Edit Shelter': 'editar abrigo',
'Edit Skill': 'Editar Capacidade/Habilidade',
'Edit Solution': 'Editar Solução',
'Edit Staff Member Details': 'Editar Detalhes de Membro do Staff',
'Edit Staff Type': 'Editar Tipo de Staff',
'Edit Staff': 'Editar Pessoal/Staff/ Funcionários',
'Edit Subscription': 'Editar Subscrição',
'Edit Survey Answer': 'Editar Resposta de Inquérito/Questionário',
'Edit Survey Section': 'Editar Secção de Inquérito',
'Edit Survey Series': 'Editar Séries de Inquérito',
'Edit Survey Template': 'Editar Modelo de Inquérito',
'Edit Team': 'Editar Equipa',
'Edit Theme': 'Editar Tema',
'Edit Themes': 'Editar Temas',
'Edit Ticket': 'Editar Bilhete',
'Edit Track': 'Editar Trajecto',
'Edit Tropo Settings': 'Editar Definições Tropo',
'Edit User': 'Editar Utilizador',
'Edit Volunteer Availability': 'Editar Disponibilidade de Voluntário',
'Edit Volunteer Details': 'Editar Detalhes de Voluntário',
'Edit Warehouse': 'Editar Armazém',
'Edit current record': 'Editar Registo Actual',
'Edit message': 'Editar mensagem',
'Edit the Application': 'Editar a Aplicação',
'Edit': 'Editar',
'Editable?': 'editável?',
'Education materials received': 'Materiais de Educação recebidos ',
'Education materials, source': 'Materiais de Educação, fonte',
'Education': 'Educação',
'Effects Inventory': 'Inventário de Efeitos',
'Eggs': 'Ovos',
'Either a shelter or a location must be specified': 'Deve ser especificado quer um abrigo ou uma localização',
'Either file upload or image URL required.': 'É necessário carregamento do ficheiro ou imagem URL.',
'Electrical': 'Eléctrico',
'Electrical, gas, sewerage, water, hazmats': 'Eléctrico, gás,rede de esgotos, água, materiais tóxicos ',
'Elevated': 'Elevado',
'Elevators': 'Elevadores',
'Email Address': 'Endereço de E-mail',
'Email Settings': 'Definições de E-mail',
'Email settings updated': 'Definições de email actualizadas',
'Email': 'E-mail',
'Embalming': 'embalsamento',
'Embassy': 'Embaixada',
'Emergency Capacity Building project': 'Projecto de Edifício com Capacidade de Emergência',
'Emergency Department': 'Departamento de Emergência',
'Emergency Shelter': 'Abrigo de Emergência',
'Emergency Support Facility': 'Instalações de Apoio à Emergência',
'Emergency Telecommunications': 'Telecomunicações de Emergência',
'Enable/Disable Layers': 'Activar/Desactivar Camadas',
'Enabled': 'Activado',
'End date': 'Data de término',
'End of Period': 'Fim de Período/ciclo',
'English': 'Inglês',
'Enter Coordinates:': 'inserir coordenadas:',
'Enter a GPS Coord': 'Inserir uma coordenada GPS',
'Enter a name for the spreadsheet you are uploading (mandatory).': 'Insira um nome para a folha de cálculo que está a carregar. (obrigatório)',
'Enter a new support request.': 'Inserir um novo pedido de apoio.',
'Enter a unique label!': 'Inserir uma etiqueta única!',
'Enter a valid date before': 'Inserir uma data válida antes de',
'Enter a valid email': 'Inserir um e-mail válido',
'Enter some characters to bring up a list of possible matches': 'Introduza alguns caracteres para listar algumas possibilidades',
'Enter tags separated by commas.': 'inserir tags/etiquetas separadas por vírgulas.',
'Enter the same password as above': 'Inserir a mesma palavra-passe como acima',
'Entered': 'Inserido',
'Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages.': 'Introduzir um número de telefone é opcional, mas isso permite que se inscreva para receber mensagens SMS.',
'Entry deleted': 'Entrada apagada',
'Environment': 'Ambiente',
'Equipment': 'Equipamento',
'Error encountered while applying the theme.': 'Erro encontrado ao aplicar o tema.',
'Error in message': 'Erro na mensagem',
'Error logs for "%(app)s"': 'Logs de Erro para "%(app)s"',
'Error: no such record': 'Erro: não existe esse registo',
'Errors': 'Erros',
'Estimated # of households who are affected by the emergency': 'Número estimado de famílias que são afectados pela emergência',
'Estimated # of people who are affected by the emergency': 'Estimado # de pessoas que são afectadas pela emergência',
'Evacuating': 'Evacuação',
'Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)': 'Avaliar a informação nesta mensagem. (Este valor NÃO DEVE ser usado em aplicações de advertência pública.)',
'Example': 'Exemplo',
'Exceeded': 'Excedido',
'Exclude contents': 'Excluir conteúdos',
'Excreta disposal': 'Eliminação de fezes/excreções',
'Execute a pre-planned activity identified in <instruction>': 'Executar uma actividade pré-planeada identificada em <instruction>',
'Existing Placard Type': 'Tipo de Placard Existente',
'Existing food stocks': 'Stocks de comida existentes',
'Exits': 'Saídas',
'Experience': 'Experiência',
'Expiry Date': 'Data de expiração',
'Explosive Hazard': 'Risco Explosivo',
'Export Data': 'Exportar Dados',
'Export Database as CSV': 'Exportar Base de Dados como CSV',
'Export in GPX format': 'Exportar em formato GPX',
'Export in KML format': 'Exportar em formato KML',
'Export in OSM format': 'Exportar em formato OSM',
'Export in RSS format': 'Exportar no formato RSS',
'Export in XLS format': 'Exportar no formato XLS',
'Export': 'Exportar',
'Exterior Only': 'Só Exterior',
'Exterior and Interior': 'Exterior e Interior',
'Eye Color': 'cor dos olhos',
'Facial hair, color': 'Cabelo facial, cor',
'Facility Operations': 'Operações de Instalações',
'Facility Status': 'Estado de Instalações',
'Failed!': 'Falhou!',
'Families/HH': 'Famílias/HH',
'Family tarpaulins received': 'Lonas familiares recebidas',
'Family tarpaulins, source': 'Lonas familiares, origem',
'Family': 'Família',
'Family/friends': 'Família/amigos',
'Farmland/fishing material assistance, Rank': 'Assistência de material de pesca/agricultura, Posto',
'Fatalities': 'Mortes / Fatalidades',
'Feature Layer Details': 'Detalhes de Camada de Características',
'Feature Layer added': 'Camada de Característica adicionada',
'Feature Layer deleted': 'Camada de Característica apagada',
'Feature Layer updated': 'Camada de Aspecto /característica actualizada',
'Feature Request': 'Pedido de Recurso',
'Feature Type': 'Tipo de Característica',
'Features Include': 'Características Inclui',
'Female headed households': 'Agregados familiares chefiados por mulheres',
'Female': 'Feminino',
'Few': 'poucos',
'Field Hospital': 'Hospital de Campanha',
'Fill in Latitude': 'Preencher Latitude',
'Fill in Longitude': 'Preencher Longitude',
'Filter Field': 'Campo de Filtro',
'Filter Value': 'Valor do filtro',
'Find All Matches': 'Encontrar toras as correspondências',
'Find Dead Body Report': 'Encontrar Relatório de Cadáver',
'Find Hospital': 'encontrar Hospital',
'Find Person Record': 'Encontrar Registo de Pessoa',
'Find Volunteers': 'Encontrar Voluntários',
'Find a Person Record': 'Encontrar um Registo de Pessoa',
'Finder': 'Descobridor/ Visor',
'Fingerprint': 'Impressão Digital',
'Fingerprinting': 'Sistema de Impressões Digitais',
'Fingerprints': 'Impressões Digitais',
'Finish': 'Terminar',
'Fire suppression and rescue': 'Combate a incêndios e salvamento',
'Fire': 'Incêndio / Fogo',
'First Name': 'Primeiro Nome',
'First name': 'Primeiro Nome',
'Flash Flood': 'Enchente',
'Flexible Impact Assessments': 'Avaliações Flexíveis de Impacto',
'Flood Alerts show water levels in various parts of the country': 'Alertas de Inundação mostram níveis de água em várias partes do país',
'Flood Alerts': 'Alertas de Inundação',
'Flood Report Details': 'Detalhes do Relatório de inundação',
'Flood Report added': 'Relatório de Cheia/Inundação adicionado',
'Flood Report deleted': 'Relatório de Inundação apagado',
'Flood Report updated': 'Relatório de Inundação actualizado',
'Flood Report': 'Relatório de Cheia/inundação',
'Flood Reports': 'Relatórios de Inundações/Cheias',
'Flood': 'Inundação',
'Flow Status': 'Estado de fluxo',
'Focal Point': 'Ponto Focal',
'Fog': 'Nevoeiro',
'Food Supply': 'Fornecimento de Alimentos',
'Food assistance': 'Assistência Alimentar',
'Food': 'Comida',
'Footer': 'Rodapé',
'For each sync partner, there is a default sync job that runs after a specified interval of time. You can also set up more sync jobs which could be customized on your needs. Click the link on the right to get started.': 'Para cada parceiro de sincronização, há um trabalho de sincronização padrão que será executado após um intervalo de tempo especificado. Você também pode criar mais postos de trabalho de sincronização que pode ser personalizado com suas necessidades. Clique no link à direita para começar.',
'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners': 'Para segurança reforçada, é recomendado inserir um nome de utilizador e uma password, e notificar administradores de outras máquinas na sua organização para adicionar este nome de utilizador e password ao seu UUID em Sincronização -> Sincronizar Parceiros',
'For live help from the Sahana community on using this application, go to': 'Para ajuda ao vivo por parte da comunidade Sahana utilizando esta aplicação, ir para',
'For messages that support alert network internal functions': 'Para mensagens que suportam funções internas de rede de alerta',
'For more information, see ': 'Para mais informação, ver',
'For other types, the next screen will allow you to enter the relevant details...': 'Para outros tipos, o próximo ecrã vai permitir-lhe inserir os detalhes relevantes...',
'For': 'Para',
'Forest Fire': 'Incêndio Florestal',
'Formal camp': 'Campo formal',
'Format': 'Formato',
'Forms': 'Formulários',
'Found': 'Encontrado',
'Foundations': 'Fundações /alicerces',
'Freezing Drizzle': 'Geada',
'Freezing Rain': 'Chuva Gelada ',
'Freezing Spray': 'Spray Frio',
'Friday': 'Sexta-feira',
'From Inventory': 'De Inventário',
'From Location': 'De Localização',
'From Organization': 'Da Organização',
'From Person': 'De Pessoa',
'From': 'De',
'Fulfil. Status': 'Completar Estado',
'Fulfillment Status': 'Estado de Desempenho',
'Full beard': 'Barba Completa',
'Fullscreen Map': 'Mapa Ecrã Inteiro',
'Functions available': 'Funções disponíveis',
'Funding Organization': ' Organização Financiadora',
'Further Action Recommended': 'Acção Adicional Recomendada',
'GIS Reports of Shelter': 'Relatórios GIS de Abrigo',
'GIS integration to view location details of the Shelter': 'Integração GIS para visualizar detalhes de localização do Abrigo',
'GPS Marker': 'Marcador GPS',
'GPS Track File': 'Ficheiro de Trajecto GPS',
'GPS Track': 'Trajecto GPS',
'GRN Status': 'Estado GRN',
'Gale Wind': 'Vento /vendaval',
'Gap Analysis Report': 'Relatório de Análise de Falha',
'Gap Analysis': 'Análise de Falhas',
'Gap Map': 'Mapa de Falhas',
'Gap Report': 'Relatório de Intervalo',
'Gateway Settings': 'Configurações do Gateway',
'Gateway settings updated': 'Definições de Gateway actualizadas',
'Gender': 'Género',
'General Medical/Surgical': 'Medicina/Cirurgia Geral',
'General emergency and public safety': 'Segurança pública e emergência geral',
'Generator': 'Gerador',
'Geocode': 'Geocódigo',
'Geocoder Selection': 'Selecção de Geocodificador',
'Geometry Name': 'Nome de Geometria',
'Geophysical (inc. landslide)': 'Geofísico ( desmoronamento, deslizamento de terras)',
'Geotechnical Hazards': 'riscos geotecnicos',
'Geotechnical': 'Geotécnico',
'Geraldo not installed': 'Geraldo não está instalado',
'Give a brief description of the image, e.g. what can be seen where on the picture (optional).': 'Dar uma breve descrição da imagem, ex.: o que pode ser visto e onde na imagem (opcional).',
'Give information about where and when you have seen them': 'Dar informação sobre onde e quando você os viu.',
'Global Messaging Settings': 'Definições Globais de Mensagens',
'Go to Request': 'Ir para Pedido',
'Goatee': 'Pera no queixo',
'Good Condition': 'Boa Condição /em bom estado',
'Goods Received Note': 'Nota de mercadorias/produtos Recebidos',
'Government UID': 'UID Governo',
'Government': 'Governo',
'Grade': 'Grau',
'Greek': 'Grego',
'Green': 'Verde',
'Ground movement, fissures': 'Movimento do Solo, fissuras',
'Ground movement, settlement, slips': 'Movimentação do solo, assentamentos, deslizamentos',
'Group Description': 'Descrição de grupo',
'Group Details': 'Detalhes de Grupo',
'Group Member added': 'Membro de Grupo adicionado',
'Group Members': 'Membros do Grupo',
'Group Memberships': 'Membros do Grupo',
'Group Name': 'Nome de Grupo',
'Group Type': 'Tipo de Grupo',
'Group added': 'grupo adicionado',
'Group deleted': 'Grupo apagado',
'Group description': 'Descrição de Grupo',
'Group updated': 'Grupo actualizado',
'Group': 'Grupo',
'Groups removed': 'Grupos removidos',
'Groups': 'Grupos',
'Guest': 'Convidado',
'HR Data': 'Dados RH',
'HR Manager': 'Gestor de Recursos Humanos',
'Hail': 'Granizo',
'Hair Color': 'Cor do Cabelo',
'Hair Length': 'Comprimento de Cabelo',
'Hair Style': 'Estilo de Cabelo / Penteado',
'Has additional rights to modify records relating to this Organization or Site.': 'Tem direitos adicionais para modificar registos relacionados com esta Organização ou Sítio.',
'Has data from this Reference Document been entered into Sahana?': 'Dados deste Documento de Referência foram inseridos no Sahana?',
'Has only read-only access to records relating to this Organization or Site.': 'Apenas tem acesso de leitura aos registos relacionados com esta Organização ou Site',
'Hazardous Material': 'Material Perigoso',
'Hazardous Road Conditions': 'Condições de Estrada Perigosa',
'Header Background': 'Fundo do Cabeçalho',
'Header background file %s missing!': '%s de Ficheiro de Fundo de Cabeçalho em falta!',
'Headquarters': 'Sede',
'Health care assistance, Rank': 'Assistência de Cuidados de Saúde, Posição',
'Health center with beds': 'Centro de Saúde com camas',
'Health center without beds': 'Centro de Saúde sem camas',
'Health center': 'Centro de Saúde',
'Health': 'Saúde',
'Healthcare Worker': 'Trabalhador de Saúde',
'Heat Wave': 'Vaga/Onda de Calor',
'Heat and Humidity': 'Calor e Humidade',
'Height (cm)': 'Altura (cm)',
'Help': 'Ajuda',
'Helps to monitor status of hospitals': 'Ajuda a monitorizar estado dos hospitais',
'Helps to report and search for Missing Persons': 'Ajuda a comunicar e procurar Pessoas Desaparecidas',
'Here are the solution items related to the problem.': 'Aqui estão os items de solução relacionados com o problema.',
'Heritage Listed': 'Herança Listada',
'Hierarchy Level %d Name': 'Nome de % de Nível de Hierarquia',
'Hierarchy Level 0 Name (e.g. Country)': 'Nome de Hierarquia Nível 0 (ex. País)',
'Hierarchy Level 1 Name (e.g. Province)': 'Nome de Hierarquia Nível 1 (ex. Concelho/Província)',
'High Water': 'Água Alta',
'High': 'Alto',
'Hindu': 'Hindu / Indiano',
'History': 'Histórico',
'Hit the back button on your browser to try again.': 'Carrege no botão retroceder no seu browser para tentar novamente.',
'Holiday Address': 'Endereço de Férias',
'Home Address': 'Endereço de Casa',
'Home Crime': 'Crime Doméstico',
'Home': 'Início ',
'Hospital Details': 'Detalhes hospitalares',
'Hospital Status Report': 'Relatório de Estado de Hospital',
'Hospital information added': ' Informação do Hospital adicionada',
'Hospital information deleted': 'Informação de Hospital apagada',
'Hospital information updated': 'Informação de Hospital actualizada',
'Hospital status assessment.': 'Avaliação de estado de Hospital.',
'Hot Spot': 'Ponto Quente/ Local Perigoso',
'Hours': 'Horas',
'Household kits received': 'Kits familiares recebidos',
'How does it work?': 'Como funciona?',
'How is this person affected by the disaster? (Select all that apply)': 'Como é esta pessoa afectada pelo desastre? (Seleccionar todas as que se aplicam)',
'How long will the food last?': 'Quanto tempo irá a comida durar?',
'How many Boys (0-17 yrs) are Dead due to the crisis': 'Quantos rapazes (0-17 anos) morreram devido à crise/ catástrofe',
'How many Boys (0-17 yrs) are Injured due to the crisis': 'Quantos rapazes (0-17anos) estão feridos devido à crise',
'How many Boys (0-17 yrs) are Missing due to the crisis': 'Quantos rapazes (0-17 anos) estão desaparecidos devido à crise / Catástrofe',
'How many Girls (0-17 yrs) are Injured due to the crisis': 'Quantas Meninas (0-17anos) estão feridas devido à crise',
'How many Girls (0-17 yrs) are Missing due to the crisis': 'Quantas meninas (0-17 anos) estão Desaparecidas devido à crise',
'How many Men (18 yrs+) are Dead due to the crisis': 'Quantos Homens (18 anos+) estão mortos devido à crise',
'How many Men (18 yrs+) are Injured due to the crisis': 'Quantos Homens (18 anos +) estão feridos devido à catástrofe',
'How many Men (18 yrs+) are Missing due to the crisis': 'Quantos homens ( maiores de 18 anos ) estão ausentes devido à crise',
'How many Women (18 yrs+) are Dead due to the crisis': 'Quantas mulheres (18 anos+) estão mortas devido à crise',
'How many Women (18 yrs+) are Injured due to the crisis': 'Quantas Mulheres (18 anos+) estão feridas devido à crise',
'How many Women (18 yrs+) are Missing due to the crisis': 'Quantas mulhers (18 anos +) estão Desaparecidas devido à crise',
'How many days will the supplies last?': 'Quantos dias durarão os mantimentos?',
'How many new cases have been admitted to this facility in the past 24h?': 'Quantos novos casos foram admitidos nestas instalações nas últimas 24h?',
'How many of the patients with the disease died in the past 24h at this facility?': 'Quantos pacientes com a doença morreram nas ultimas24 horas, nesta unidade?',
'How many patients with the disease are currently hospitalized at this facility?': 'Quantos pacientes com a doença estão actualmente hospitalizados nestas instalações?',
'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.': 'Quanto detalhe é visto. Um nível de Zoom alto significa muito detalhe, mas não uma área alargada. Um nível baixo de Zoom significa ver uma grande área, mas com pouco nível de detalhe.',
'Human Resource Management': 'Gestão de Recursos Humanos',
'Human Resource': 'Recurso Humano',
'Human Resources Management': 'Gestão de Recursos Humanos',
'Human Resources': 'Recursos Humanos',
'Humanitarian NGO': 'ONG Humanitária',
'Hurricane': 'Furação',
'Hygiene NFIs': 'Higiene NFIs',
'Hygiene kits received': 'Kits de higiene recebidos',
'Hygiene kits, source': 'Origem de kits de higiene',
'Hygiene practice': 'Práticas de higiene',
'Hygiene problems': 'Problemas de Higiene',
'Hygiene': 'Higiene',
'I am available in the following area(s)': 'estou disponível na(s) seguinte(s) área(s)',
'ID Tag Number': 'Número de Etiqueta de Identificação',
'ID Tag': 'ID Etiqueta',
'ID type': 'Tipo de ID',
'Ice Pressure': 'Pressão do Gelo',
'Iceberg': 'Icebergue',
'Identification Report': 'Relatório de Identificação',
'Identification Reports': 'Relatórios de Identificação',
'Identification Status': 'Estado de Identificação',
'Identification': 'Identificação',
'Identified by': 'Identificado por',
'Identity Details': 'Detalhes de Identidade',
'Identity added': 'Identidade adicionada',
'Identity deleted': 'Identidade apagada',
'Identity updated': 'Identificação actualizada',
'Identity': 'Identificar',
'If Staff have login accounts then they are given access to edit the details of the': 'Se o Staff tem contas login então é-lhes dado acesso para editar os detalhes de',
'If a user verifies that they own an Email Address with this domain, the Approver field is used to determine whether & by whom further approval is required.': 'Se um utilizador verifica que possui um Endereço de E-mail com este domínio, o campo de Aprovação é utilizado para determinar se e da parte de quem é necessária aprovação adicional.',
'If neither are defined, then the Default Marker is used.': 'Se nenhum está definido, então o Marcador Padrão é utilizado.',
'If no marker defined then the system default marker is used': 'Se não for definido marcador então o marcador de sistema padrão é utilizado',
'If no, specify why': 'Se não, especifique porquê',
'If none are selected, then all are searched.': 'Se nenhum é seleccionado, então todos são procurados.',
'If the location is a geographic area, then state at what level here.': 'Se a localização é uma área geográfica, então declare a que nível aqui.',
'If this field is populated then a user with the Domain specified will automatically be assigned as a Staff of this Organization': 'Se este campo tem população então um utilizador com o Domínio especificado será designado automaticamente como Staff/Funcionário desta Organização',
'If this is set to True then mails will be deleted from the server after downloading.': 'Se isto está definido para Verdadeiro então os mails serão apagados do servidor após serem descarregados.',
'If this record should be restricted then select which role is required to access the record here.': 'Se este registo deve ser restrito então seleccione qual função é requerida para aceder ao registo aqui.',
'If yes, specify what and by whom': 'Se Sim, especificar o quê e por quem',
'If yes, which and how': 'Se Sim, qual e como',
'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.': 'Se você não introduzir um Documento de Referência, o seu email será exibido para permitir que estes dados sejam verificados.',
'If you know what the Geonames ID of this location is then you can enter it here.': 'se sabe qual é a identificação Geográfica deste local então pode colocá-la aqui.',
'If you know what the OSM ID of this location is then you can enter it here.': 'Se sabe qual é a ID OSM desta localização então pode inseri-la aqui.',
'If you need to add a new document then you can click here to attach one.': 'Se precisa de adicionar um novo documento então pode carregar aqui para anexar um,',
'If you want several values, then separate with': 'Se você pretende vários calores, então separe com',
'If you would like to help, then please': 'Se gostaria de ajudar, então por favor',
'Illegal Immigrant': 'Imigrante Ilegal',
'Image Details': 'Detalhes de imagem',
'Image Tags': 'Etiquetas de Imagem',
'Image Type': 'Tipo de Imagem',
'Image Upload': 'Carregamento de imagem',
'Image added': 'Imagem adicionada',
'Image deleted': 'Imagem apagada',
'Image updated': 'Imagem actualizada',
'Image': 'Imagem',
'Imagery': 'Imagens',
'Images': 'Imagens',
'Impact Assessments': 'Avaliações de Impacto',
'Impact Details': 'Detalhes de Impacto',
'Impact Type Details': 'Detalhes de Tipo de Impacto',
'Impact Type added': 'Tipo de Impacto adicionado',
'Impact Type deleted': 'Tipo de impacto apagado',
'Impact Type': 'Tipo de Impacto',
'Impact Types': 'Tipos de Impacto',
'Impact added': 'Impacto adicionado',
'Impact deleted': 'Impacto apagado',
'Impact updated': 'Impacto actualizado',
'Impacts': 'Impactos',
'Import & Export Data': 'Importar e Exportar Dados',
'Import Data': 'Importar Dados',
'Import Jobs': 'Importar Trabalhos/tarefas',
'Import and Export': 'Importar e Exportar',
'Import from Ushahidi Instance': 'Importar da Instância Ushahidi',
'Import if Master': 'Importar se principal',
'Import multiple tables as CSV': 'Importar múltiplas tabelas como CSV',
'Import': 'Importar',
'Import/Export': 'Importar/Exportar',
'Important': 'Importante',
'Importantly where there are no aid services being provided': 'Importante quando não existem serviços de apoio a serem providenciados',
'Importing data from spreadsheets': 'A importar dados das planilhas/ folhas de cálculo',
'Improper decontamination': 'Descontaminação imprópria',
'Improper handling of dead bodies': 'Tratamento impróprio de cadáveres',
'In Inventories': 'Em Inventários',
'In Process': 'Em Processo',
'In Progress': 'Em Curso',
'In Window layout the map maximises to fill the window, so no need to set a large value here.': 'Na disposição de Janela o mapa maximiza para preencher a janela, assim não é necessário definir um valor grande aqui.',
'Inbound Mail Settings': 'Definições de Correio de Entrada',
'Incident Categories': 'Categorias de Ocorrência',
'Incident Report Details': 'Detalhes do Relatório de Incidente',
'Incident Report added': 'Relatório de Incidente adicionado',
'Incident Report deleted': 'Relatório de Incidente apagado',
'Incident Report updated': 'Relatório de Ocorrência actualizado',
'Incident Report': 'Relatório de Ocorrência/Incidente',
'Incident Reporting System': 'Sistema de Relatório de Incidente',
'Incident Reporting': 'Relaório de Incidente',
'Incident Reports': 'Relatórios de Ocorrência',
'Incident': 'Incidente/ ocorrência',
'Incidents': 'Ocorrências',
'Incoming Shipment canceled': 'Carregamento em Entrada cancelado',
'Incoming Shipment updated': 'Carregamento em Entrada actualizado',
'Incoming': 'Entrada',
'Incomplete': 'Incompleto',
'Individuals': 'Indivíduos',
'Industrial Crime': 'Crime Industrial',
'Industrial': 'industrial',
'Industry Fire': 'Incêndio Industrial',
'Infant (0-1)': 'Criança (0-1)',
'Infectious Disease': 'Doença Infecciosa',
'Infectious Diseases': 'Doenças infecciosas',
'Infestation': 'Infestação',
'Informal Leader': 'Líder informal',
'Informal camp': 'Campo Informal',
'Information gaps': 'Lacunas/Falhas de Informação',
'Infusion catheters available': 'Cateteres de infusão disponíveis',
'Infusion catheters need per 24h': 'Cateteres de Perfusão necessários por 24h',
'Infusion catheters needed per 24h': 'Cateteres de Perfusão necessários a cada 24h',
'Infusions available': 'Perfusões disponíveis',
'Infusions needed per 24h': 'Infusões necessárias por 24h',
'Inspected': 'Inspeccionado',
'Inspection date and time': 'Data e Hora de Inspecção',
'Inspection time': 'Tempo de Inspecção',
'Inspector ID': 'ID de Ispector',
'Instant Porridge': 'Papa Instantânea',
'Institution': 'Instituição',
'Insufficient': 'Insuficiente',
'Intake Items': 'Items de Consumo/entrada',
'Intergovernmental Organization': 'Organização Intergovernamental',
'Interior walls, partitions': 'Paredes interiores, divisórias',
'Internal State': 'Estado Interno',
'International NGO': 'ONG Internacional',
'International Organization': 'Organização Internacional',
'Interview taking place at': 'Entrevista tendo lugar em',
'Invalid Query': 'Query Inválida',
'Invalid request!': 'Pedido Inválido!',
'Invalid ticket': 'Bilhete Inválido',
'Invalid': 'inválido',
'Inventories': 'Inventários',
'Inventory Item Details': 'Detalhes dos Items do Inventário',
'Inventory Item added': 'Item de inventário adicionado',
'Inventory Item deleted': 'Item de Inventário apagado',
'Inventory Item updated': 'Item de inventário actualizado',
'Inventory Item': 'Item de Inventário',
'Inventory Items Available for Request Item': 'Items do Inventário Disponíveis por Pedido de Item',
'Inventory Items': 'Items de Inventário',
'Inventory Management': 'Gestão de Inventário',
'Inventory functionality is available for:': 'Funcionalidade de Inventário está disponível para:',
'Inventory of Effects': 'Inventário de Objectos de Uso pessoal',
'Inventory': 'Inventário',
'Is it safe to collect water?': 'É seguro recolher água?',
'Is this a strict hierarchy?': 'Isto é uma hierarquia restrita?',
'Issuing Authority': 'Emissão de Autoridade',
'Item Catalog Details': 'Detalhes de Item de Catalogo',
'Item Categories': 'Categorias de Item',
'Item Category Details': 'Detalhes de Categoria de Item',
'Item Category added': 'Categoria de Item adicionada',
'Item Category deleted': 'Categoria de Items Apagada',
'Item Category updated': 'Categoria de Item actualizada',
'Item Category': 'Categoria de Item',
'Item Details': 'Detalhes de Item',
'Item Pack Details': 'Detalhes de Pacote de Item',
'Item Pack added': 'Pacote de Item adicionado',
'Item Pack deleted': 'Item Pacote apagado',
'Item Pack updated': 'Pacote de Item actualizado',
'Item Packs': 'Pacotes de Item',
'Item added to shipment': 'Item adicionado ao carregamento',
'Item added': 'Item adicionado',
'Item already in Bundle!': 'Item já no pacote!',
'Item already in Kit!': 'Item já está no Kit!',
'Item already in budget!': 'Item já no orçamento!',
'Item deleted': 'Item apagado',
'Item updated': 'Item atualizado',
'Japanese': 'Japonês',
'Jerry can': 'Bidão',
'Jew': 'Judeu',
'Job Market': 'Mercado de Trabalho',
'Job Title': 'Título de Trabalho/função/processo',
'Jobs': 'Trabalhos',
'Key Details': 'Detalhes de Chave',
'Key added': 'Chave adicionada',
'Key deleted': 'Chave apagada',
'Key updated': 'Chave actualizada',
'Key': 'Chave',
'Keys': 'Chaves',
'Kit Contents': 'Conteúdos do Kit',
'Kit Details': 'Detalhes de Kit',
'Kit added': 'Kit/conjunto adicionado',
'Kit deleted': 'Kit apagado',
'Kit updated': 'Kit actualizado',
'Known Identities': 'Identidades Conhecidas',
'LICENSE': 'LICENÇA',
'Lack of material': 'falta de Material',
'Lack of school uniform': 'Falta de uniforme escolar',
'Lack of supplies at school': 'Falta de abastecimentos na escola',
'Lack of transport to school': 'Falta de transporte para a escola',
'Lactating women': 'mulheres lactantes',
'Language': 'Linguagem',
'Last Name': 'Último Nome',
'Last known location': 'Última Localização conhecida',
'Last synchronization time': 'Hora da última sincronização',
'Last updated ': 'Último actualizado',
'Last updated by': 'Última actualização feita por',
'Last updated on': 'Última actualização em',
'Latitude & Longitude': 'Latitude e Longitude',
'Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': 'Latitude é zero no equador e positiva no hemisfério Norte e negativa no hemisfério Sul',
'Latitude should be between': 'Latitude deveria ser entre',
'Latrines': 'Latrinas',
'Law enforcement, military, homeland and local/private security': 'Forças da Lei, militares, segurança interna local/privada',
'Layer Details': 'Detalhes de Camada',
'Layer added': 'Camada adicionada',
'Layer deleted': 'Camada apagada',
'Layer updated': 'Camada actualizada',
'Layer': 'Camada',
'Layers updated': 'Camadas actualizadas',
'Layers': 'Camadas',
'Layout': 'Disposição',
'Leader': 'Líder',
'Legend Format': 'formato de legenda',
'Level 1 Assessment Details': 'Detalhes de Avaliação Nível 1',
'Level 1 Assessment added': 'Avaliação Nível 1: adicionada',
'Level 1 Assessment deleted': 'Avaliação de Nível 1 apagada',
'Level 1 Assessment updated': 'Avaliação Nível 1 actualizada',
'Level 1 Assessments': 'Avaliações Nível 1',
'Level 1': 'Nível 1',
'Level 2 Assessment Details': 'Detalhes de Avaliação Nível 2',
'Level 2 Assessment added': 'Avaliação de Nível 2 adicionada',
'Level 2 Assessment deleted': 'Avaliação de Nível 2 apagada',
'Level 2 Assessment updated': 'Avaliação de Nível 2 Actualizada',
'Level 2 Assessments': 'Avaliações Nível 2',
'Level 2 or detailed engineering evaluation recommended': 'É recomendado Nível 2 ou avaliação detalhada de engenharia',
'Level 2': 'Nível 2',
'Level': 'Nível',
'Library support not available for OpenID': 'Apoio de Biblioteca não disponível para OpenID',
'List / Add Baseline Types': 'Listar / Adicionar tipos de base',
'List / Add Impact Types': 'Listar/ Adicionar Tipos de Impacto',
'List / Add Services': 'Listar / Adicionar Serviços',
'List / Add Types': 'Listar / Adicionar tipos',
'List Activities': 'Listar Actividades',
'List All Entries': 'Listar Todas as Entradas',
'List All Memberships': 'Listar todos os Membros',
'List All Reports': 'listar todos os Relatórios',
'List All': 'Listar tudo',
'List Alternative Items': 'Listar Items Alternativos',
'List Assessment Summaries': 'Listar Resumos/sumários de Avaliação',
'List Assessments': 'Listar Avaliações',
'List Asset Assignments': 'Listar Atribuição de Bens',
'List Assets': 'Listar Bens',
'List Availability': 'Listar Disponibilidade',
'List Baseline Types': 'Listar Tipos de Linha de Base',
'List Baselines': 'Listar Linhas de Base/Plataformas',
'List Brands': 'Listar Marccas',
'List Budgets': 'Listar de Orçamentos',
'List Bundles': 'Listar Pacotes',
'List Catalog Items': 'Listar Items de Catálogo',
'List Checklists': 'Lista de Checklists ',
'List Cluster Subsectors': 'Listar Sub-serctores de Cluster',
'List Commitment Items': 'Listar Items de Entrega',
'List Commitments': 'Listar Consignações/entregas',
'List Conflicts': 'Listar Conflitos',
'List Contacts': 'Listar Contactos',
'List Credentials': 'Listar Credenciais',
'List Current': 'Listar Actual',
'List Donors': 'Listar Doadores',
'List Flood Reports': 'Listar Relatórios de Cheia/inundação',
'List Groups': 'listar grupos',
'List Hospitals': 'Listar Hospitais',
'List Identities': 'Listar Identidades',
'List Impact Types': 'Listar Tipos de Impacto',
'List Impacts': 'Listar Impactos',
'List Incident Reports': 'Listar Relatórios de Ocorrência',
'List Inventory Items': 'Listar Items de Inventário',
'List Item Categories': 'Listar Categorias de Item',
'List Item Packs': 'Listar Pacotes de Items',
'List Items': 'Listar Items',
'List Keys': 'Listar Chaves',
'List Kits': 'Listar Kits',
'List Layers': 'Listar Camadas',
'List Level 2 Assessments': 'listar avaliações de nível 2',
'List Level 2 assessments': 'Listar avaliações de Nível 2',
'List Locations': 'Listar Localizações',
'List Log Entries': 'listar registo de entradas',
'List Map Configurations': 'Listar Configurações de Mapa',
'List Members': 'Listar Membros/sócios',
'List Memberships': 'Listar Associados/Membros',
'List Messages': 'Listar Mensagens',
'List Missing Persons': 'Listar Pessoas Desaparecidas',
'List Needs': 'Listar Necessidades',
'List Notes': 'Listar Notas',
'List Organizations': 'Listar Organizações',
'List Peers': 'Listar Pontos',
'List Persons': 'Listar Pessoas',
'List Photos': 'Listar Fotos',
'List Population Statistics': 'Listar Estatísticas Populacionais',
'List Problems': 'Liatar Problemas',
'List Projections': 'Listar Projecções',
'List Projects': 'Listar Projectos',
'List Rapid Assessments': 'Listar Avaliações Rápidas',
'List Received Items': 'Listar Items Recebidos',
'List Received Shipments': 'Listar Carregamentos Recebidos',
'List Records': 'Listar Registos',
'List Registrations': 'Listar Registos',
'List Reports': 'Listar Relatórios',
'List Request Items': 'Listar Items de Pedido',
'List Requests': 'Listar Pedidos',
'List Resources': 'Listar Recursos',
'List Rivers': 'Listar Rios',
'List Roles': 'Listar Papéis/funções',
'List Sections': 'Listar Secções',
'List Sectors': 'Listar Sectores',
'List Sent Items': 'listar items enviados',
'List Sent Shipments': 'Listar Carga Expedida',
'List Service Profiles': 'Listar Perfis de Serviço',
'List Settings': 'Listar Definições',
'List Shelter Services': 'Listar Serviços de Abrigo',
'List Shelter Types': 'Listar Tipos de Abrigo',
'List Shelters': 'Listar Abrigos',
'List Skills': 'Listar Capacidades/habilidades',
'List Solutions': 'Listar Soluções',
'List Staff Members': 'listar membros do staff',
'List Staff Types': 'Listar Tipos de Staff',
'List Staff': 'Listar Pessoal/Staff',
'List Status': 'Listar Estado',
'List Subscriptions': 'Listar Subscrições',
'List Support Requests': 'Listar Pedidos de Apoio',
'List Survey Questions': 'Listar Questões de Inquérito',
'List Survey Sections': 'Listar Secções de Questionário',
'List Survey Series': 'Listar Séries de Inquérito',
'List Survey Templates': 'Listar Modelos de Inquérito',
'List Tasks': 'Listar tarefas',
'List Teams': 'Listar Equipas',
'List Themes': 'Listar Temas',
'List Tickets': 'Listar Bilhetes',
'List Tracks': 'Listar Trajectos',
'List Units': 'Listar Unidades',
'List Users': 'Listar Utilizadores',
'List Volunteers': 'Listar Voluntários',
'List Warehouses': 'Listar Armazéns',
'List all': 'Listar tudo',
'List of Items': 'Lista de Items',
'List of Peers': 'Lista de Pontos /Peers',
'List of Reports': 'Lista de Relatórios',
'List of Requests': 'Lista de Pedidos',
'List of Spreadsheets': 'Lista de Folhas de Cálculo',
'List of Volunteers for this skill set': 'Lista de Voluntários para este conjunto de capacidades',
'List of addresses': 'Lista de Endereços',
'List unidentified': 'Listar não identificado',
'List': 'Listar',
'List/Add': 'Listar/Adicionar ',
'Lists "who is doing what & where". Allows relief agencies to coordinate their activities': 'Listas de "quem faz o quê e onde." Permite que as entidades de socorro coordenem as suas actividades',
'Live Help': 'Ajuda ao Vivo',
'Livelihood': 'Sustento',
'Load Cleaned Data into Database': 'Carregar Dados Limpos para a Base de Dados',
'Loading': 'Carregando',
'Local Name': 'Nome do Local',
'Local Names': 'Nomes Locais',
'Location Details': 'Detalhes de Localização',
'Location Hierarchy Level 0 Name': 'Nome de Localização Hierarquia Nível 0',
'Location Hierarchy Level 1 Name': 'Nome de Localização de Hierarquia Nível 1',
'Location Hierarchy Level 2 Name': 'Nome de Localização de Hierarquia Nível 2',
'Location Hierarchy Level 3 Name': 'nome de localização de hierarquia nível 3',
'Location Hierarchy Level 4 Name': 'Nome de Localização de Hierarquia Nível 4',
'Location Hierarchy Level 5 Name': 'Nome de Localização de Hierarquia Nível 5',
'Location added': 'Localização adicionada',
'Location cannot be converted into a group.': 'A localização não pode ser convertida num grupo.',
'Location deleted': 'Localização apagada',
'Location details': 'Detalhes de localização',
'Location group cannot be a parent.': 'Grupo de localização não pode ser uma "Área Pai"',
'Location group cannot have a parent.': "Grupo de Localização não pode ter um 'grupo-pai'",
'Location updated': 'Localização actualizada',
'Location': 'Localização',
'Location: ': 'Localização:',
'Locations': 'Localizações',
'Lockdown': 'Trancar/ bloquear',
'Log Entry Details': 'Detalhes de Entrada de Log',
'Log entry added': 'Entrada de Log adicionada',
'Log entry deleted': 'Entrada de Diário apagada',
'Log entry updated': 'Entrada no Log actualizada',
'Logistics Management System': 'Sistema de Gestão de Logística',
'Logistics': 'Logística',
'Logo file %s missing!': '% de ficheiro Logo em falta!',
'Logo': 'Logotipo',
'Logout': 'Terminar Sessão',
'Long Text': 'Texto Longo',
'Longitude is West - East (sideways).': 'Longitude é Oeste - Este (lateralmente).',
'Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': 'Longitude é zero no meridiano principal (Greenwich Mean Time) e é positiva a Este, através da Europa e Ásia. Lingitude é negativa para Oeste, através do Atlântico e das Américas.',
'Longitude should be between': 'Longitude deve ser entre',
'Looting': 'Saques/pilhagens',
'Lost Password': 'Perdeu a Password',
'Low': 'Baixo',
'Magnetic Storm': 'Tempestade Magnética',
'Major expenses': 'Despesas maiores',
'Major outward damage': 'Danos exteriores graves',
'Make Commitment': 'Efectuar Submissão',
'Make Request': 'Fazer Pedido',
'Make preparations per the <instruction>': 'Faça as preparações de acordo com <instruction>',
'Manage Relief Item Catalogue': 'Gerir Catálogo de Item de Socorro',
'Manage Users & Roles': 'Gerir Utilizadores e Funções',
'Manage Warehouses/Sites': 'Gerir Armazéns/Locais',
'Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.': 'Gerir pedidos de mantimentos, bens, staff ou outros recursos. Comparar com inventários onde os mantimentos são requisitados.',
'Manage requests of hospitals for assistance.': 'Gerir pedidos de hospitais por assistência.',
'Manage volunteers by capturing their skills, availability and allocation': 'Gerir voluntários registando as suas competências, disponibilidade e alocação',
'Manage': 'Gerir',
'Manager': 'Gestor',
'Managing Office': 'Escritório/Serviço de gestão',
'Mandatory. The URL to access the service.': 'Obrigatório. O URL para aceder ao serviço.',
'Manual Synchronization': 'Sincronização Manual',
'Many': 'Muitos',
'Map Configuration added': 'Configuração de Mapa adicionada',
'Map Configuration': 'Configuração de Mapa',
'Map Configurations': 'Configurações de Mapa',
'Map Height': 'Altura de Mapa',
'Map Service Catalog': 'Catálogo de serviço de mapa',
'Map Settings': 'Definições de Mapa',
'Map Viewing Client': 'Cliente de Visualização Cartográfica',
'Map Width': 'Largura do Mapa',
'Map of Hospitals': 'Mapa de Hospitais',
'Map': 'Mapa',
'Marine Security': 'Segurança Marítima',
'Marital Status': 'Estado Civil',
'Marker Details': ' Detalhes do Marcador',
'Marker added': 'Marcador adicionado',
'Marker deleted': 'Marcador apagado',
'Marker updated': 'Marcador actualizado',
'Marker': 'Marcador',
'Markers': 'Marcadores',
'Master Message Log to process incoming reports & requests': 'Diário de Mensagem Principal para processar relatórios e pedidos em entrada',
'Master Message Log': 'Diário de Mensagem Principal',
'Match Percentage': 'Corresponder Percentagem',
'Match percentage indicates the % match between these two records': 'Percentagem de correspondência indica a % de correspondência entre estes dois registos',
'Matching Catalog Items': 'Items de Catálogo Correspondentes /equivalentes',
'Matching Records': 'Relatórios Equivalentes',
'Matrix of Choices (Multiple Answers)': 'Matriz de Escolhas (Respostas Múltiplas)',
'Matrix of Choices (Only one answer)': 'Matriz de Escolhas ( Apenas uma resposta)',
'Matrix of Text Fields': 'Campos de Matriz ou Texto',
'Max Persons per Dwelling': 'Máximo de Pessoas por Habitação',
'Medical and public health': 'Saúde Médica e pública',
'Medium': 'Médio',
'Megabytes per Month': 'Megabytes por Mês',
'Member removed from Group': 'Associação apagada',
'Members': 'Membros',
'Membership Details': 'Detalhes de Membro/Sociedade',
'Membership updated': 'Associação de Membro actualizada',
'Membership': 'Membro/Sócio',
'Memberships': 'Membros/Sócios',
'Message Details': 'Detalhes de Mensagem',
'Message Variable': 'Variável de Mensagem',
'Message added': 'Mensagem adicionada',
'Message deleted': 'Mensagem apagada',
'Message field is required!': 'É necessário campo de mensagem!',
'Message updated': 'Mensagem actualizada',
'Message variable': 'Variável de Mensagem',
'Message': 'Mensagem',
'Messages': 'Menssagens',
'Messaging settings updated': 'Definições de mensagens actualizadas',
'Messaging': 'Mensagens',
'Meteorite': 'Meteorito',
'Meteorological (inc. flood)': 'Meteorológico (inc. Inundação)',
'Method used': 'Método utilizado',
'Middle Name': 'Nome do Meio',
'Migrants or ethnic minorities': 'Migrantes ou Minorias étnicas',
'Military': 'Exército',
'Minimum Bounding Box': 'Caixa Delimitadora Mínima',
'Minimum shift time is 6 hours': 'Hora mínima de turno é de 6 horas',
'Minor/None': 'Menor/Nenhum',
'Minute': 'Minuto',
'Minutes must be a number between 0 and 60': 'Minutos devem ser um número entre 0 e 60',
'Minutes per Month': 'Minutos por Mês',
'Minutes should be a number greater than 0 and less than 60': 'Minutos devem ser um número maior que 0 e menor que 60',
'Miscellaneous': 'Miscelânea',
'Missing Person Details': 'Detalhes de Pessoa Desaparecida',
'Missing Person Reports': 'Registo de Pessoas Desaparecidas',
'Missing Person': 'Pessoa Desaparecida',
'Missing Persons Registry': 'Registo de Pessoas Desaparecidas',
'Missing Persons Report': 'Relatório de Pessoas Desaparecidas',
'Missing Report': 'Relatório em falta',
'Missing Senior Citizen': 'Cidadão Sénior Desaparecido',
'Missing Vulnerable Person': 'Pessoa Vulnerável Desaparecida',
'Missing': 'Desaparecido',
'Mobile Basic Assessment': 'Avaliação Básica Móvel',
'Mobile Phone': 'Telemóvel',
'Mobile': 'Móvel ',
'Mode': 'Modo',
'Modem Settings': 'Definições de Modem',
'Modem settings updated': 'Definições de Modem actualizadas',
'Moderate': 'Moderado',
'Moderator': 'Moderador',
'Modify Information on groups and individuals': 'Modificar Informação em grupos e Indivíduos',
'Modifying data in spreadsheet before importing it to the database': 'Modificando dados na folha de cálculo antes de importá-los para a base de dados',
'Module disabled!': 'Módulo desactivado',
'Module provides access to information on current Flood Levels.': 'Módulo providencia acesso a informaçao sobre níveis de Inundação actuais',
'Monday': 'Segunda-feira',
'Monthly Cost': 'Custo Mensal',
'Monthly Salary': 'Salário Mensal',
'Months': 'Meses',
'Morgue Status': 'Ponto de Situação da Morgue',
'Morgue Units Available': 'Unidades de Morgue Disponíveis',
'Mosque': 'Mesquita',
'Motorcycle': 'Motociclo',
'Moustache': 'Bigode',
'MultiPolygon': 'MultiPolígono',
'Multiple Choice (Multiple Answers)': 'Escolha Multipla (resposta múltipla)',
'Multiple Choice (Only One Answer)': 'Escolha Múltipla (Apenas Uma Resposta)',
'Multiple Text Fields': 'Múltiplos Campos de Texto',
'Multiple': 'Múltiplo',
'Muslim': 'Muçulmano',
'Must a location have a parent location?': 'Um local deve ter um local pai?',
'My Current function': 'Minha função actual',
'N/A': 'Não Disponível',
'Name and/or ID': 'Nome e/ou Identificação/BI',
'Name of the file (& optional sub-path) located in static which should be used for the background of the header.': 'Nome do ficheiro (e sub-path opcional) localizado em estático o qual deve ser utilizado para o fundo do cabeçalho.',
'Name of the file (& optional sub-path) located in static which should be used for the top-left image.': 'Nome do ficheiro (e opcional sub-path), localizado na estática, que deve ser usado para a imagem do canto superior esquerdo.',
'Name of the file (& optional sub-path) located in views which should be used for footer.': 'Nome do ficheiro (e sub-caminho opcional) localizado em visualizações que deveriam ser utilizadas para rodapé.',
'Name or Job Title': 'Nome ou Titulo de Trabalho',
'Name': 'Nome',
'Name, Org and/or ID': 'Nome, Org. e/ou Identificação',
'Name/Model/Type': 'Nome/Modelo/Tipo',
'Names can be added in multiple languages': 'Nomes podem ser adicionados em múltiplas linguas',
'National ID Card': 'Cartão de Identidade Nacional',
'National NGO': 'ONG Nacional',
'Nationality of the person.': 'Nacionalidade da pessoa.',
'Nationality': 'Nacionalidade',
'Nautical Accident': 'Acidente Náutico',
'Nautical Hijacking': 'Sequestro Náutico',
'Need Type Details': 'Detalhes de Tipo de Necessidade',
'Need Type added': 'Tipo de Necessidade adicionado',
'Need Type deleted': 'Tipo de necessidade apagado',
'Need Type updated': 'Tipo de Necessidade actualizado',
'Need Type': 'tipo de necessidade',
'Need Types': 'tipos de Necessidade',
'Need added': 'Necessidade adicionada',
'Need to be logged-in to be able to submit assessments': 'Necessita estar ligado para poder enviar avaliações',
'Need to configure Twitter Authentication': 'Necessita configurar a Autenticação Twitter',
'Need to specify a Budget!': 'Necessita especificar um Orçamento!',
'Need to specify a Resource!': 'Necessita de especificar um Recurso!',
'Need to specify a bundle!': 'É necessário especificar um pacote!',
'Need to specify a group!': 'Necessita de especificar um grupo!',
'Need to specify a location to search for.': 'É necessário especificar uma localização pela qual procurar.',
'Need to specify a role!': 'Necessita especificar uma função!',
'Need to specify a table!': 'Deve especificar uma tabela!',
'Need to specify a user!': 'é necessário especificar um utilizador!',
'Need updated': 'Necessidade actualizada',
'Needs Maintenance': 'Necessita de Manutenção',
'Needs': 'Necessidades',
'Neighbouring building hazard': 'Perigo de construções vizinhas',
'Neonatal ICU': 'Unidade de Cuidados Intensivos Neonatal',
'Neonatology': 'Neonatologia',
'Network': 'Rede',
'Neurology': 'Neurologia',
'New Assessment reported from': 'Nova Avaliação comunicada de',
'New Checklist': 'nova Checklist (Lista de Verificação)',
'New Peer': 'Novo Ponto',
'New Record': 'Novo Registo',
'New Request': 'Novo Pedido',
'New Solution Choice': 'Nova Escolha de Solução',
'New Support Request': 'Nova Solicitação de Suporte',
'New': 'Novo',
'News': 'Notícias',
'Next': 'Seguinte',
'No Activities Found': 'actividades não encontradas',
'No Alternative Items currently registered': 'Não há Items Alternativos registados actualmente',
'No Assessment Summaries currently registered': 'Não há sumários de avaliação registados actualmente',
'No Assessments currently registered': 'Não há avaliações registadas actualmente',
'No Asset Assignments currently registered': 'Não existem atribuições de Bens registados actualmente.',
'No Assets currently registered': 'Não há bens registados actualmente',
'No Baseline Types currently registered': 'Não há Tipos de Linha de Base/plataforma registados actualmente',
'No Baselines currently registered': 'Não há Linhas de Base registadas actualmente',
'No Brands currently registered': 'Não há Marcas registadas actualmente',
'No Budgets currently registered': 'Não há Orçamentos registados actualmente',
'No Bundles currently registered': 'Não há Pacotes registados actualmente',
'No Checklist available': 'Não há Checklist/lista de confirmação disponível',
'No Cluster Subsectors currently registered': 'Não existem subsectores de grupos registados actualmente',
'No Commitment Items currently registered': 'Não há Items de Consignação registados actualmente',
'No Commitments': 'Sem consignações/ compromissos',
'No Credentials currently set': 'Não há Credenciais definidas actualmente',
'No Details currently registered': 'Não há Detalhes registados actualmente',
'No Documents found': 'Não foram encontrados Documentos',
'No Donors currently registered': 'Não há Doadores registados actualmente',
'No Feature Layers currently defined': 'Não há camadas de Característica definidas actualmente',
'No Flood Reports currently registered': 'Não existem reports de inundação registados de momento',
'No Groups currently defined': 'Não há grupos definidos actualmente',
'No Hospitals currently registered': 'Nenhum Hospital registrado atualmente ',
'No Identification Report Available': 'Não há Relatório de Identificação disponível',
'No Identities currently registered': 'Não há identidades registadas actualmente',
'No Image': 'Sem Imagem',
'No Images currently registered': 'Sem imagens atualmente registrados',
'No Impact Types currently registered': 'Não existem tipos de Impacto registados actualmente',
'No Impacts currently registered': 'Não há impactos registados actualmente',
'No Incident Reports currently registered': 'Não há Relatórios de Ocorrência registados actualmente',
'No Incoming Shipments': 'nenhum carregamento em ordem de chegada',
'No Item Categories currently registered': 'Não há Categorias de Item registadas actualmente',
'No Item Packs currently registered': 'Não há Pacotes de Items registados actualmente',
'No Items currently registered': 'Não há items registados actualmente',
'No Kits currently registered': 'Não há Kits registados actualmente',
'No Level 1 Assessments currently registered': 'Não há avaliações de nível 1 registadas actualmente',
'No Level 2 Assessments currently registered': 'Não há Avaliações Nível 2 registadas actualmente',
'No Locations currently available': 'Não há localizações disponíveis actualmente',
'No Locations currently registered': 'Não há localizações registadas actualmente',
'No Map Configurations currently defined': 'Não há Configurações de Mapa definidas actualmente',
'No Markers currently available': 'Não há marcadores disponíveis actualmente',
'No Match': 'Não há correspondência',
'No Matching Catalog Items': 'Não há Items de Catálogo corespondentes',
'No Matching Records': 'Não há Registos Correspondentes',
'No Members currently registered': 'Não há membros/sócios registados actualmente',
'No Memberships currently defined': 'Não existem registos de membros definidos',
'No Need Types currently registered': 'Não existem tipos de necessidades registados',
'No Needs currently registered': 'Não há Necessidades registadas actualmente',
'No Offices found!': 'Não foram encontrados Serviços!',
'No Organizations currently registered': 'Não há Organizações registadas actualmente',
'No People currently registered in this shelter': 'Não há pessoas registadas neste abrigo actualmente',
'No Persons currently reported missing': 'Não há pessoas desaparecidas comunicadas actualmente',
'No Persons found': 'Não foram encontradas Pessoas',
'No Photos found': 'Não foram encontradas Fotos',
'No Picture': 'Sem Imagem',
'No Population Statistics currently registered': 'Não existem estatísticas Populacionais registadas actualmente',
'No Presence Log Entries currently registered': 'Não há Entradas de Log de Presença registadas actualmente',
'No Projections currently defined': 'Não há projecções definidas actualmente',
'No Projects currently registered': 'Não há Projectos registados actualmente',
'No Rapid Assessments currently registered': 'Não há Avaliações Rápidas registadas actualmente',
'No Received Items currently registered': 'Não há Items Recebidos registados actualmente',
'No Records currently available': 'Não há Registos disponíveis actualmente',
'No Request Items currently registered': 'Não há Items de Pedido registados actualmente',
'No Requests': 'Não há Pedidos',
'No Rivers currently registered': 'Não há rios registados actualmente',
'No Roles currently defined': 'não há papéis definidos actualmente',
'No Sections currently registered': 'Não há Secções registadas actualmente',
'No Sectors currently registered': 'Não há Sectores registados actualmente',
'No Sent Items currently registered': 'Não há Items Enviados registados actualmente',
'No Settings currently defined': 'Não há Definições definidas actualmente',
'No Shelter Services currently registered': 'Não há Serviços de Abrigo registados actualmente',
'No Shelter Types currently registered': 'Não há Tipos de Abrigos registados actualmente',
'No Shelters currently registered': 'Não há Abrigos registados actualmente',
'No Solutions currently defined': 'Não há Soluções definidas actualmente',
'No Staff Types currently registered': 'Não há Tipos de Funcionário/Staff registados actualmente',
'No Staff currently registered': 'Não há Staff/funcionários registados actualmente',
'No Subscription available': 'Não há Subscrição disponível',
'No Support Requests currently registered': 'Não há Pedidos de suporte/apoio registados actualmente',
'No Survey Answers currently registered': 'Não há Respostas de Inquérito actualmente registadas',
'No Survey Questions currently registered': 'Não há questões de Inquérito registadas actualmente',
'No Survey Sections currently registered': 'Não há Secções de Inquérito registadas actualmente',
'No Survey Template currently registered': 'Não há Modelo de Inquérito registado actualmente',
'No Themes currently defined': 'Não há Temas definidos actualmente',
'No Tickets currently registered': 'Nenhuma senha registada de momento',
'No Tracks currently available': 'Não há trajectos disponíveis actualmente',
'No Users currently registered': 'Não há Utilizadores registados actualmente',
'No Volunteers currently registered': 'Não há voluntários registados actualmente',
'No Warehouses currently registered': 'Não existem Armazéns registrados atualmente',
'No access at all': 'Sem nenhum acesso',
'No access to this record!': 'Não há acesso a este registo!',
'No action recommended': 'nenhuma acção recomendada',
'No conflicts logged': 'Não há conflitos registados',
'No contact information available': 'Não há informação de contacto disponível',
'No contacts currently registered': 'Não há contactos registados actualmente',
'No data in this table - cannot create PDF!': 'Não existem dados nesta tabela - não pode ler PDF!',
'No databases in this application': 'Não ha bases de dados nesta aplicação.',
'No dead body reports available': 'não há relatórios de cadáver disponíveis',
'No entries found': 'Não foram encontradas entradas',
'No entries matching the query': 'Não há entradas correspondentes à query ',
'No location known for this person': 'Não há localização conhecida para esta pessoa',
'No locations found for members of this team': 'Não há localizações encontradas para membros desta equipa',
'No log entries matching the query': 'Não há entradas de log coincidentes com a query',
'No messages in the system': 'Não há mensagens no sistema',
'No notes available': 'Não há notas disponíveis',
'No peers currently registered': 'Não há pontos registados actualmente',
'No pending registrations found': 'Não foram encontrados registos/ inscrições pendentes',
'No pending registrations matching the query': 'Não há registos pendentes correspondentes com a query',
'No person record found for current user.': 'Não foi encontrado nenhum registo de pessoa para o actual utilizador.',
'No problem group defined yet': 'Não há grupo de problema ainda definido',
'No records matching the query': 'Não há registos equivalentes à query',
'No report available.': 'Não há relatório disponível.',
'No reports available.': 'Não há relatórios disponíveis.',
'No reports currently available': 'Não há relatórios disponíveis actualmente',
'No requests found': 'Não foram encontrados pedidos',
'No resources currently reported': 'Não foram comunicados recursos actualmente',
'No service profile available': 'Não há perfil de serviço disponível',
'No skills currently set': 'Não há capacidades/habilidades definidas actualmente',
'No staff members currently registered': 'Nenhum membro do staff registado actualmente',
'No status information available': 'Não há informação de estado disponível',
'No synchronization': 'Sem sincronização',
'No tasks currently registered': 'Não há tarefas registadas actualmente',
'No template found!': 'Não foi encontrado nenhum modelo',
'No units currently registered': 'Não há unidades registadas actualmente',
'No volunteer availability registered': 'Não há disponibilidade de voluntários registada',
'No volunteers currently registered': 'Não há voluntários registados actualmente',
'No': 'Não',
'Non-structural Hazards': 'Riscos não-estruturais',
'None (no such record)': 'Nenhum ( não existe tal relatório)',
'Not Applicable': 'Não Aplicável',
'Not Authorised!': 'Não é autorizado!',
'Not Possible': 'Não é Possível',
'Not Set': 'Não definido',
'Not installed or incorrectly configured.': 'Não instalado ou configurado incorrectamente.',
'Not yet a Member of any Group': 'Não há associados registados actualmente',
'Note Details': 'Detalhes de Nota',
'Note Type': 'Tipo de Nota',
'Note added': 'Nota adicionada',
'Note deleted': 'Nota apagada',
'Note that this list only shows active volunteers. To see all people registered in the system, search from this screen instead': 'Note que esta lista mostra apenas voluntários activos. Para ver todas as pessoas registadas no sistema, procuro antes a partir deste ecrã',
'Note updated': 'Nota actualizada',
'Note': 'Nota',
'Notice to Airmen': 'Advertência para aviadores',
'Number of Patients': 'Número de Pacientes',
'Number of Rows': 'Número de Linhas',
'Number of additional beds of that type expected to become available in this unit within the next 24 hours.': 'Número de camas adicionais desse tipo que se prevê estarem disponíveis nesta unidade dentro das próximas 24 horas.',
'Number of alternative places for studying': 'Numero de locais alternativos para estudo',
'Number of available/vacant beds of that type in this unit at the time of reporting.': 'Número de camas vagas/disponíveis desse tipo nesta unidade à hora do relatório.',
'Number of deaths during the past 24 hours.': 'Número de mortes durante as últimas 24 horas.',
'Number of discharged patients during the past 24 hours.': 'Número de pacientes descarregados nas últimas 24 horas.',
'Number of doctors': 'Número de médicos',
'Number of in-patients at the time of reporting.': 'Número de pacientes internados na hora da comunicação.',
'Number of newly admitted patients during the past 24 hours.': 'Número de pacientes admitidos recentemente durante as últimas 24 horas.',
'Number of non-medical staff': 'Número de staff não-médico',
'Number of nurses': 'Número de enfermeiras',
'Number of private schools': 'Número de escolas particulares / privadas',
'Number of public schools': 'Número de escolas públicas',
'Number of religious schools': 'Número de escolas religiosas',
'Number of residential units not habitable': 'Número de unidades residenciais não habitáveis',
'Number of residential units': 'Número de unidades residenciais',
'Number of vacant/available units to which victims can be transported immediately.': 'Número de unidades vazias/disponíveis para as quais as vítimas podem ser transportadas imediatamente.',
'Number or code used to mark the place of find, e.g. flag code, grid coordinates, site reference number or similar (if available)': 'Número ou código utilizado para marcar o local de achado, por exemplo código de bandeira, coordenadas de grelha, número de referência do local ou semelhante (se disponível)',
'Number': 'Número',
'Number/Percentage of affected population that is Female & Aged 0-5': 'Número/Percentagem da população afectada que é Feminina e de Idades 0-5',
'Number/Percentage of affected population that is Female & Aged 13-17': 'Número/Percentagem de população afectada que é Feminino e com Idade entre 13-17',
'Number/Percentage of affected population that is Female & Aged 18-25': 'Número/Percentagem de população afectada que é Feminino e de Idade 18-25',
'Number/Percentage of affected population that is Female & Aged 26-60': 'Números/ Percentagem de população feminina afectada com idades entre os 26-60 anos',
'Number/Percentage of affected population that is Female & Aged 6-12': 'Número/Percentagem de população afectada que é Feminino e Idade 6-12',
'Number/Percentage of affected population that is Female & Aged 61+': 'Número/Percentagem de população afectada que é Feminino e com Idade 61+',
'Number/Percentage of affected population that is Male & Aged 0-5': 'Número / Porcentagem da população afetada, que é do sexo Masculino e com idades entre 0-5',
'Number/Percentage of affected population that is Male & Aged 18-25': 'Número/Percentagem de população afectada que é Masculino e de Idades 18-25',
'Number/Percentage of affected population that is Male & Aged 26-60': 'Número / Percentagem da população afetada, que é do sexo masculino e envelhecido 26-60',
'Number/Percentage of affected population that is Male & Aged 6-12': 'Número/Percentagem de população afectada que é Masculino e com idade 6-12',
'Nursery Beds': 'Camas de Berçário/ Berços',
'Nutrition problems': 'Problemas Nutricionais',
'Nutrition': 'Nutrição',
'OK': 'Ok',
'OR Reason': 'Razão Bloco Operatório',
'OR Status Reason': 'Razão de Estado de Bloco Operatório',
'OR Status': 'Estado de Bloco Operatório',
'OR a site OR a location': 'OU um sítio OU uma localização',
'Observer': 'Observador',
'Obsolete': 'Obsoletos',
'Obstetrics/Gynecology': 'Obstetrícia / Ginecologia',
'Office Address': 'Endereço de Agência',
'Office Details': 'Detalhes de Serviço/Agência',
'Office added': 'posto / escritório adicionado',
'Office deleted': 'Escritório apagado',
'Office updated': 'Agência actualizada',
'Office': 'Escritório / Gabinete',
'Offices': 'Escritórios/Agências',
'Offline Sync (from USB/File Backup)': 'Sincronização Offline (de USB/Backup de Ficheiro)',
'Offline Sync': 'Sincronização Offline',
'Older person (>60 yrs)': 'Pessoa idosa (>60 anos)',
'On by default?': 'Ligado por padrão?',
'One Time Cost': 'Custo Único',
'One time cost': 'Custo único',
'One-time costs': 'Custos de uma só vez',
'One-time': 'De uma só vez',
'Oops! Something went wrong...': 'Ups! Alguma coisa correu mal....',
'Oops! something went wrong on our side.': 'Ups! Alguma coisa aconteceu de errado do seu lado.',
'Opacity (1 for opaque, 0 for fully-transparent)': 'Opacidade (1 para opaco, 0 para completamente transparente)',
'Open area': 'Área aberta',
'Open recent': 'Abrir recente',
'Open': 'Aberto',
'Operating Rooms': 'Salas de Operação',
'Optional link to an Incident which this Assessment was triggered by.': 'Ligação opcional a um Incidente/uma ocorrêcia pela qual esta avaliação foi desencadeada.',
'Optional': 'Opcional',
'Options': 'Opções',
'Organization Details': 'Detalhes da Organização',
'Organization Registry': 'Registo de Organizações',
'Organization added': 'Organização adicionada',
'Organization deleted': 'Organização apagada',
'Organization updated': 'Organização actualizada',
'Organization': 'organização',
'Organizations': 'Organizações',
'Origin': 'Origem',
'Other (specify)': 'Outro (especificar)',
'Other Evidence': 'Outras provas /evidências',
'Other Faucet/Piped Water': 'Outra Água de torneira/Canalizada',
'Other Isolation': 'isolamento de outros',
'Other activities of boys 13-17yrs before disaster': 'Outras actividades de rapazes 13-17 anos antes do desastre/catástrofe',
'Other activities of boys 13-17yrs': 'Outras actividades de rapazes entre os 13 e 17 anos de idade',
'Other activities of boys <12yrs before disaster': 'Outras actividades para rapazes <12anos antes do desastre',
'Other activities of boys <12yrs': 'Outras actividades para rapazes <12 anos',
'Other activities of girls 13-17yrs before disaster': 'Outras actividades de meninas 13-17 anos antes do desastre',
'Other activities of girls 13-17yrs': 'Outras actividades de raparigas 13-17 anos',
'Other activities of girls<12yrs before disaster': 'Outras actividades de raparigas <12 anos antes do desastre',
'Other activities of girls<12yrs': 'Outras actividades de menias <12 anos',
'Other alternative infant nutrition in use': 'Outra nutrição infantil alternativa em uso',
'Other alternative places for study': 'Outros locais alternativos para estudo',
'Other assistance needed': 'Outra assistência necessária',
'Other assistance, Rank': 'Outras formas de assistência, (Patente / Graduação)',
'Other current health problems, adults': 'Outros problemas de saúde actuais, adultos',
'Other current health problems, children': 'Outros problemas de saúde actuais, crianças',
'Other events': 'Outros eventos',
'Other factors affecting school attendance': 'Outros factores que afectam a frequência escolar',
'Other non-food items': 'Outros itens não-alimentares',
'Other recommendations': 'Outras recomendações',
'Other residential': 'Outra residencia',
'Other school assistance received': 'Outra assistência escolar recebida',
'Other school assistance, details': 'assistencia de outras escolas, detalhes',
'Other school assistance, source': 'Outra assistência escolar, origem',
'Other side dishes in stock': 'Outros pratos de acompanhamento em Stock ',
'Other types of water storage containers': 'Outros tipos de contentores de armazenamento de água',
'Other ways to obtain food': 'Outras formas de obter comida',
'Other': 'Outro',
'Outbound Mail settings are configured in models/000_config.py.': 'Definições de Correio em Saída são configuradas em models/000_config.py.',
'Outbox': 'Caixa de Saída',
'Outgoing SMS Handler': 'Manuseador SMS de saída',
'Outgoing SMS handler': 'Controlador de SMS em Saída',
'Overall Hazards': 'Perigos Gerais',
'Overland Flow Flood': 'Inundação de Escoamento Superficial',
'Owned Resources': 'Recursos Possuídos',
'PIN number ': 'Código PIN',
'PL Women': 'PL Mulheres',
'Pack': 'Pacote',
'Parameters': 'Parâmetros',
'Parapets, ornamentation': 'Parapeitos, ornamentação',
'Parent Office': 'Agência/Serviço "Pai"',
'Parent needs to be of the correct level': '"Pai" precisa ser do nível correto',
'Parent': 'Pai / antecedente',
'Participant': 'Participante',
'Password': 'Palavra-Passe',
'Path': 'Caminho',
'Pathology': 'Patologia',
'Patients': 'Pacientes',
'Pediatric ICU': 'Unidade de Cuidados Intensivos Pediátricos',
'Pediatric Psychiatric': 'Psiquiatria Pediátrica',
'Pediatrics': 'Pediatria',
'Peer Details': 'Detalhes do Ponto',
'Peer Registration Details': 'Detalhes de Registo de Ponto',
'Peer Registration Request': 'Pedido de registo do ponto',
'Peer Registration': 'Registo de Peer',
'Peer Type': 'Tipo de Ponto',
'Peer UID': 'Identificação única do terminal',
'Peer added': 'Ponto adicionado',
'Peer deleted': 'Ponto apagado',
'Peer not allowed to push': 'O Peer não tem permissão para empurrar',
'Peer registration request added': 'Pedido de registo de ponto adicionado',
'Peer registration request updated': 'pedido de registo de ponto actualizado',
'Peer': 'Ponto',
'Peers': 'Pontos/peers',
'Pending': 'Pendente',
'People Needing Food': 'Pessoas em Necessidade de Comida',
'People Needing Shelter': 'Pessoas a Precisar de Abrigo',
'People Needing Water': 'Pessoas com Necessidade de Água',
'Person 1': 'Pessoa 1',
'Person 1, Person 2 are the potentially duplicate records': 'Pessoa 1, Pessoa 2 são potenciais registos duplicados',
'Person 2': 'Pessoa 2',
'Person Details': ' Detalhes da Pessoa',
'Person Finder': 'Localizador de Pessoas',
'Person Registry': 'Registo/ Inscrição de Pessoa',
'Person added to Group': 'Membro de Grupo adicionado',
'Person added to Team': 'Membro de Grupo adicionado',
'Person added': 'Pessoa adicionada',
'Person deleted': 'Pessoa apagada',
'Person details updated': 'Detalhes de pessoa actualizados',
'Person interviewed': 'Pessoa entrevistada',
'Person missing': 'Pessoa em falta/ Desaparecida',
'Person reporting': 'Pessoa a comunicar',
'Person who has actually seen the person/group.': 'Pessoa que realmente viu a pessoa/grupo',
'Person': 'Pessoa',
'Person/Group': 'Pessoa/Grupo',
'Personal Effects Details': 'Detalhes de Objectos de Uso Pessoal',
'Personal Effects': 'Objectos de Uso Pessoal',
'Personal impact of disaster': 'Impacto pessoal do desastre',
'Personal': 'Pessoal',
'Persons in institutions': 'Pessoas em Instituições',
'Persons with disability (mental)': 'Pessoas com deficiência (mental)',
'Persons with disability (physical)': 'Pessoas com deficiência (física)',
'Persons': 'Pessoas',
'Phone 1': 'Telefone 1',
'Phone 2': 'Telefone 2',
'Phone': 'Telefone',
'Phone/Business': 'Telefone/Emprego',
'Phone/Emergency': 'Telefone/Emergência',
'Phone/Exchange': 'Telefone/Troca',
'Photo Details': 'Detalhes de Foto/ Fotográficos',
'Photo Taken?': 'Foto Tirada?',
'Photo added': 'Foto adicionada',
'Photo deleted': 'Foto excluída',
'Photo updated': 'Foto actualizada',
'Photo': 'Foto',
'Photograph': 'Fotografar',
'Photos': 'Fotos/ Fotografias',
'Physical Description': 'Descrição Física',
'Physical Safety': 'Segurança Física',
'Picture upload and finger print upload facility': 'Instalação de upload de imagem e impressão digital',
'Picture': 'Quadro',
'Place of Recovery': 'Local de Recuperação',
'Places for defecation': 'lugares para defecação',
'Planner': 'planejador',
'Playing': 'Tocando /Jogando',
'Please enter a First Name': 'Por favor insira um Primeiro Nome',
'Please enter a person': 'Por favor insira uma pessoa',
'Please enter the recipient': 'Por favor insira o recipiente ',
'Please fill this!': 'Por favor, preencha isto!',
'Please report here where you are:': 'Por favor comunique aqui onde está:',
'Please select another level': 'Por favor seleccione outro nível',
'Please select': 'Por favor Seleccionar',
'Please sign-up with your Cell Phone as this allows us to send you Text messages. Please include full Area code.': 'Por favor registe-se com o seu telemóvel/celular pois isto permite-nos enviar-lhe mensagens de texto. Por favor inclua código postal completo.',
'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.': 'Por favor especifique quaisquer problemas e obstáculos com o tratamento próprio da doença, em detalhe (em números, quando apropriado). Você também pode adicionar sugestões, a situação poderia melhorar.',
'Please use this field to record any additional information, including a history of the record if it is updated.': 'Por favor utilize este campo para registar qualquer informação adicional, incluindo um histórico do registo se este for actualizado.',
'Please use this field to record any additional information, including any Special Needs.': 'Por favor utilize este campo para registar qualquer informação adicional, incluindo quaisquer Necessidades Especiais.',
'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.': 'Por favor utilize este campo para registar qualquer informação adicional, tal como IDs de instâncias Ushahidi. Inclua um histórico do registo se ele é actualizado',
'Pledge Support': 'Garantir Suporte/apoio',
'Point': 'Ponto',
'Poisoning': 'Envenenamento',
'Poisonous Gas': 'Gás Venenoso',
'Police': 'Polícia',
'Pollution and other environmental': 'Poluição e outro ambiental',
'Polygon reference of the rating unit': 'Polígono de referência da unidade de classificação',
'Polygon': 'Polígono',
'Population Statistic Details': 'Detalhes de Estatística Populacional',
'Population Statistic added': ' Estatística de População adicionada',
'Population Statistic deleted': 'Estatística de População apagada',
'Population Statistic updated': 'Estatística Populacional Actualizada',
'Population Statistics': 'Estatística de População/ Populacional',
'Population': 'População',
'Porridge': 'Papas',
'Port Closure': 'Encerramento de Porta',
'Port': 'Porto/porta',
'Portuguese (Brazil)': 'Português (Brasil)',
'Portuguese': 'Português',
'Postcode': 'Código Postal',
'Poultry restocking, Rank': 'Reposição de aves para consumo humano, Posto/patente',
'Poultry': 'Aves domésticas',
'Pounds': 'Libras',
'Power Failure': 'Falha de Energia',
'Pre-cast connections': 'Conexões pré-cast',
'Preferred Name': 'Nome Preferido',
'Pregnant women': 'Mulheres grávidas',
'Preliminary': 'Preliminar',
'Presence Condition': 'Condição de Persença',
'Presence': 'Presença',
'Previous': 'Anterior',
'Primary Occupancy': 'Ocupação Principal',
'Priority': 'prioridade',
'Private': 'Privado',
'Problem Administration': 'Administração de Problema',
'Problem Details': 'Detalhes de Problema',
'Problem Group': 'Grupo de Problema',
'Problem Title': 'Título do Problema',
'Problem connecting to twitter.com - please refresh': 'Problema na ligação a twitter.com - Por favor recarregar',
'Problem deleted': 'Problema apagado',
'Problem updated': 'Problema Actualizado',
'Problem': 'Problema',
'Problems': 'Problemas',
'Procedure': 'Procedimento',
'Profile': 'Perfil',
'Project Details': 'Detalhes de Projecto',
'Project Status': 'Estado do Projecto',
'Project Tracking': 'Monitorização do Projecto',
'Project added': 'Projecto adicionado',
'Project deleted': 'Projecto apagado',
'Project has no Lat/Lon': 'O Projecto não tem Lat/Long',
'Project updated': 'Projecto actualizado',
'Project': 'Projecto',
'Projection Details': 'Detalhes de Projecção',
'Projection added': 'Projecção adicionada',
'Projection deleted': 'Projecção apagada',
'Projection updated': 'Projecção Actualizada',
'Projection': 'Projecção',
'Projections': 'Projecções',
'Projects': 'Projectos',
'Property reference in the council system': 'Referencia de propriedade do sistema de concelho',
'Protected resource': 'Recurso Protegido',
'Protection': 'Protecção',
'Provide Metadata for your media files': 'Providencia Metadados para os seus ficheiros media',
'Provide an optional sketch of the entire building or damage points. Indicate damage points.': 'Providenciar um esboço opcional para o edifício inteiro ou pontos de dano. Indicar pontos de dano.',
'Proxy-server': 'Servidor proxy',
'Psychiatrics/Adult': 'Psiquiatria/Adulto',
'Psychiatrics/Pediatric': 'Psiquiatria/Pediatria',
'Public Event': 'Evento Público',
'Public and private transportation': 'Transportes públicos e privados',
'Public assembly': 'assembleia pública',
'Public': 'Público',
'Punjabi': 'Punjabi (habitante de Punjab)',
'Push tickets to external system': 'Empurre tickets para sistema externo',
'Pyroclastic Flow': 'Fluxo piroclástico',
'Pyroclastic Surge': 'Onda Piroclástica',
'Python Serial module not available within the running Python - this needs installing to activate the Modem': 'Módulo Serial Python não disponível dentro do Python em funcionamento - necessita a instalação para activar o Modem',
'Python needs the ReportLab module installed for PDF export': 'Módulo ReportLab não disponível dentro do Python em execução - necessita ser instalado para output PDF!',
'Quantity Committed': 'Quantidade Consignada / entregue',
'Quantity Fulfilled': 'Quantidade Atingida/ (necessária preenchida)',
'Quantity in Transit': 'Quantidade em Trânsito',
'Quarantine': 'Quarentena',
'Queryable?': 'Queryável?',
'RC frame with masonry infill': 'RC quadro com alvenaria de enchimento',
'RECORD A': 'REGISTO A',
'RECORD B': 'REGISTO B',
'Race': 'Raça',
'Radiological Hazard': 'Risco Radiológico',
'Radiology': 'Radiologia',
'Railway Accident': 'Acidente Ferroviário',
'Rain Fall': 'Chuva / Aguaceiro',
'Rapid Assessment Details': 'Detalhes de Avaliação Rápida',
'Rapid Assessment added': 'Avaliação Rápida adicionada',
'Rapid Assessment deleted': 'Avaliação Rápida apagada',
'Rapid Assessment updated': 'Avaliação Rápida actualizada',
'Rapid Assessment': 'Avaliação Rápida',
'Rapid Assessments & Flexible Impact Assessments': 'Avaliações Rápidas e Avaliações de Impacto Flexível',
'Rapid Assessments': 'Avaliações rápidas',
'Rating Scale': 'Escala de Pontuação',
'Raw Database access': 'acesso à base de dados em modo Raw',
'Read-Only': 'Somente de Leitura',
'Read-only': 'Só de Leitura',
'Receive Items': 'Receber Items',
'Receive Shipment': 'Receber Carregamento',
'Receive this shipment?': 'Receber este carregamento?',
'Received By': 'Recebido Por',
'Received Item Details': 'Detalhes de Item Recebido',
'Received Item deleted': 'Item Recebido apagado',
'Received Item updated': 'Item Recebido actualizado',
'Received Shipment Details': 'Detalhes de Carregamento Recebido',
'Received Shipment canceled and items removed from Inventory': 'Carregamento Recebido cancelado e items removidos do inventário',
'Received Shipment canceled': 'Carregamento Recebido cancelado',
'Received Shipment updated': 'Carregamento Recebido actualizado',
'Received Shipments': 'Carregamentos Recebidos',
'Received': 'Recebido',
'Receiving and Sending Items': 'Receber e enviar items',
'Recipients': 'Recipientes / Receptores',
'Recommendations for Repair and Reconstruction or Demolition': 'Recomendações para Reparação e Reconstrução ou Demolição',
'Record Details': 'Detalhes de Registo',
'Record Saved': 'Registo Gravado',
'Record added': 'Registo adicionado',
'Record any restriction on use or entry': 'Registe qualquer restrição na utilização ou acesso',
'Record deleted': 'Registo apagado',
'Record last updated': 'Ultimo registo actualizado',
'Record not found!': 'Registo não encontrado!',
'Record not found': 'Registo não encontrado',
'Record updated': 'Registo actualizado',
'Record': 'Registo',
'Recording and Assigning Assets': 'Registo e distribuição de Bens',
'Records': 'Registos',
'Recovery Request added': 'Pedido de Recuperação adicionado',
'Recovery Request updated': 'Pedido de Recuperação actualizado',
'Recovery Requests': 'Pedidos de Recuperação',
'Recovery': 'Recuperação',
'Recurring Cost': 'Custo Recorrente',
'Recurring costs': 'Custos recorrentes',
'Recurring': 'recorrente',
'Red': 'Vermelho',
'Reference Document': 'Documento de Referência',
'Region Location': 'Localização da Região',
'Register Person into this Shelter': 'Registar Pessoa neste Abrigo',
'Register Person': 'Registar Pessoa',
'Register them as a volunteer': 'Registar como voluntário',
'Register': 'Registar',
'Registered People': 'Pessoas Registadas',
'Registered users can': 'Utilizadores registados podem',
'Registration Details': 'Detalhes de Inscrição',
'Registration added': 'Registo adicionado',
'Registration entry deleted': 'Entrada de Registo apagada',
'Registration is still pending approval from Approver (%s) - please wait until confirmation received.': 'O Registo ainda tem pendente a aprovação do Aprovador (%s) - por favor aguarde até ter recebido confirmação.',
'Registration updated': 'Inscrição actualizada',
'Registration': 'Registo',
'Rehabilitation/Long Term Care': 'Reabilitação / Cuidados de Longa Duração',
'Reinforced masonry': 'Alvenaria reforçada',
'Rejected': 'Rejeitado',
'Relief Team': 'Equipa de Socorro',
'Relief': 'Ajuda/Socorro',
'Religion': 'Religião',
'Religious': 'Religioso',
'Relocate as instructed in the <instruction>': 'Mudar de lugar como instruído na <instruction>',
'Remove Person from Group': 'Apagar Associação',
'Remove Person from Team': 'Apagar Associação',
'Remove': 'remover',
'Removed from Group': 'Associação apagada',
'Removed from Team': 'Associação apagada',
'Repeat your password': 'Repita a sua password',
'Replace if Master': 'Substituir se Master/Principal',
'Replace if Newer': 'Substituir se mais recente',
'Replace': 'Substituir',
'Report Another Assessment...': 'Inserir outro relatório de avaliação ...',
'Report Details': 'Detalhes de Relatório',
'Report Resource': 'Relatar Recurso',
'Report Types Include': 'Tipos de Relatório Inclui',
'Report added': 'Relatório adicionado',
'Report deleted': 'Relatório apagado',
'Report my location': 'Comunicar a minha localização',
'Report the contributing factors for the current EMS status.': 'Relatar/comunicar os factores contribuintes para o ponto de situação actual de Emergência Médica',
'Report the contributing factors for the current OR status.': 'Comunicar os factores que contribuem para o estado actual do Bloco de Operações.',
'Report them as found': 'Reportar como encontrados',
'Report them missing': 'Dar como desaparecidos',
'Report updated': 'Relatório actualizado',
'Report': 'Relatório/Comunicar',
'Reporter Name': 'Nome do Reporter/ Comunicador',
'Reporter': 'Reporter/Jornalista',
'Reports': 'Relatórios',
'Request Canceled': 'Pedido Cancelado',
'Request Details': 'Detalhes de Pedido',
'Request Item Details': 'Pedir Detalhes de Item',
'Request Item added': 'Item de Pedido adicionado',
'Request Item deleted': 'Item pedido eliminado',
'Request Item updated': 'Item de Pedido actualizado',
'Request Items': 'Itens de Pedido',
'Request Status': 'Estado de Pedido',
'Request Type': 'Tipo de Pedido',
'Request Updated': 'Pedido Actualizado',
'Request added': 'Pedido adicionado',
'Request deleted': 'Pedido apagado',
'Request for Role Upgrade': 'pedido para actualização de papel',
'Request updated': 'Pedido actualizado',
'Request': 'Pedido',
'Request, Response & Session': 'Pedido, Resposta e Sessão',
'Requested By': 'Pedido Por',
'Requested Items': 'Items pedidos/ requisitados',
'Requested on': 'Pedido em',
'Requested': 'Pedido/ Requisitado',
'Requester': 'Requerente',
'Requests': 'Pedidos',
'Rescue and recovery': 'Resgate e valorização',
'Reset Password': 'Redefinir Palavra-Passe',
'Reset': 'Limpar',
'Resolve Conflict': 'Resolver Conflito',
'Resolve': 'Resolver',
'Resource Details': 'Detalhes de Recurso',
'Resource added': 'Recurso adicionado',
'Resource deleted': 'Recurso apagado',
'Resource updated': 'Recurso actualizado',
'Resource': 'Recurso',
'Resources': 'recursos',
'Respiratory Infections': 'Infecções Respiratórias',
'Response': 'Resposta',
'Restricted Access': 'Acesso Restrito',
'Restricted Use': 'Uso Restrito',
'Results': 'Resultados',
'Retail Crime': 'Pormenorizar Crime/ Crime Espalhado',
'Retrieve Password': 'Recuperar Palavra-passe',
'Returned Status': 'Estado Devolvido',
'Returned': 'Devolvido',
'Rice': 'Arroz',
'Riot': 'Motim',
'River Details': 'Detalhes do Rio / Curso de água',
'River added': 'Rio adicionado',
'River deleted': 'Rio apagado',
'River updated': 'Rio/ curso de água actualizado',
'River': 'Rio',
'Rivers': 'Rios',
'Road Accident': 'Acidente Rodoviário',
'Road Closed': 'Estrada Fechada',
'Road Conditions': 'Condições da Estrada',
'Road Delay': 'Atraso na Estrada',
'Road Hijacking': 'Sequestro de Estrada',
'Road Usage Condition': 'Condições de Uso da Estrada',
'Role Details': 'Detalhes de função',
'Role Required': 'Função Requerida',
'Role Updated': 'Função Actualizada',
'Role added': 'Função adicionada',
'Role deleted': 'Função apagada',
'Role updated': 'Função actualizada',
'Role': 'Função',
'Role-based': 'Baseado na função / papel a desempenhar',
'Roles Permitted': 'Funções Permitidas',
'Roles': 'Funções',
'Roof tile': 'Telha de telhado',
'Roofs, floors (vertical load)': 'Telhados, pisos (carga vertical)',
'Roster': 'Lista',
'Row Choices (One Per Line)': 'Escolhas de Linha (uma por linha)',
'Rows in table': 'Linhas na tabela',
'Rows selected': 'Linhas seleccionadas',
'Run Functional Tests': 'Executar testes de funções',
'Run Interval': 'Executar Intervalo',
'Running Cost': 'Custo de Execução',
'Safety Assessment Form': 'Formulário de Avaliação de Segurança',
'Sahana Administrator': 'Administrador Sahana',
'Sahana Community Chat': 'Chat da Comunidade Sahana',
'Sahana Eden <=> Other': 'Sahana Eden <=> Outro',
'Sahana Eden Website': 'Site de Sahana Eden',
'Sahana Green': 'Verde Sahana',
'Sahana access granted': 'Acesso autorizado ao Sahana',
'Salted Fish': 'Peixe Salgado',
'Sanitation problems': 'Problemas de Saneamento',
'Satellite Office': 'Serviço de Satélite',
'Satellite': 'Satelite',
'Saturday': 'Sábado',
'Save': 'Gravar',
'Saved.': 'Gravado.',
'Saving...': 'Guardando...',
'Scale of Results': 'Escala de resultados',
'Schedule': 'Horário',
'School activities': 'Actividades escolares',
'School assistance': 'Assistência escolar',
'School attendance': 'Frequência escolar',
'School destroyed': 'Escola destruída',
'School heavily damaged': 'Escola muito danificada',
'School tents, source': 'Tendas Escola, origem',
'School': 'Escola',
'School/studying': 'Escola/a Estudar',
'Schools': 'Escolas',
'Search Activity Report': 'Procurar Relatório de Actividade',
'Search Addresses': 'Procurar Endereços',
'Search Alternative Items': 'Procurar Items Alternantivos',
'Search Assessment Summaries': 'Procurar Sumários de Avaliações',
'Search Asset Assignments': 'Procurar Atribuição de Bens',
'Search Assets': 'Procurar Bens',
'Search Baseline Type': 'Procurar tipo de plataforma',
'Search Budgets': 'Procurar Orçamentos',
'Search Bundles': 'Procurar Conjuntos',
'Search Catalog Items': 'Procurar Items do Catálogo',
'Search Checklists': 'Procurar Checklists',
'Search Commitment Items': 'Procurar Items de Entrega',
'Search Commitments': 'Procurar Compromissos',
'Search Contact Information': 'Procurar Informação de Contacto',
'Search Contacts': 'Procurar Contactos',
'Search Credentials': 'Procurar Credenciais',
'Search Documents': 'Procurar Documentos',
'Search Feature Layers': 'Pesquisar funções das camadas ',
'Search Flood Reports': 'Procurar Relatórios de Cheia/Inundação',
'Search Groups': 'Procurar Grupos',
'Search Identity': 'Procurar identidade',
'Search Impact Type': 'Procurar por tipo de impacto',
'Search Impacts': 'Procurar Impactos',
'Search Incident Reports': 'Pesquisar Reports (Registos) de Incidentes',
'Search Inventory Items': 'Procurar Items de Inventário',
'Search Item Categories': 'Procurar Categorias de Item',
'Search Items': 'Procurar Items',
'Search Kits': 'Procurar Conjuntos/ Kits',
'Search Layers': 'Procurar Camadas',
'Search Level 1 Assessments': 'Procurar Avaliações Nível 1',
'Search Level 2 Assessments': 'procurar avaliações nível 2',
'Search Locations': 'Procurar Localizações',
'Search Log Entry': 'Procurar entrada de Log/Diário',
'Search Map Configurations': 'Pesquisa Configurações do Mapa',
'Search Markers': 'Procurar Marcadores',
'Search Members': 'Procurar Sócio/Membro',
'Search Membership': 'Procurar membro/sócio',
'Search Memberships': 'Procurar Sócios/Membros',
'Search Need Type': 'Procurar Tipo de Necessidade',
'Search Needs': 'Procurar Necessidades',
'Search Notes': 'Procurar Notas',
'Search Offices': 'Procurar Agências',
'Search Organizations': 'Procurar organizações',
'Search Peer': 'Procurar Ponto',
'Search Personal Effects': 'Procurar Efeitos Pessoais',
'Search Persons': 'Procurar Pessoas',
'Search Photos': 'Procurar Fotos',
'Search Population Statistics': 'Procurar Estatísticas da População',
'Search Projects': 'Procurar Projectos',
'Search Rapid Assessments': 'Procurar Avaliações Rápidas',
'Search Received Items': 'Procurar Items Recebidos',
'Search Received Shipments': 'Procurar Carregamentos Recebidos',
'Search Records': 'Procurar Registos',
'Search Registations': 'Procurar Registos',
'Search Registration Request': 'Procurar Pedido de Registo',
'Search Report': 'Procurar Relatório',
'Search Reports': 'Procurar Relatórios',
'Search Request Items': 'Procurar Items de Pedido',
'Search Request': 'Procurar Pedido',
'Search Requested Items': 'Procurar Items Pedidos',
'Search Requests': 'Procurar Pedidos',
'Search Resources': 'Procurar Recursos',
'Search Rivers': 'Procurar Rios/ Cursos de água',
'Search Roles': 'Procurar Funções',
'Search Sections': 'Procurar Secções',
'Search Sectors': 'Procurar Sectores',
'Search Sent Shipments': 'Procurar Carregamentos Enviados',
'Search Service Profiles': 'Procurar Perfis de Serviço',
'Search Settings': 'Definições de Pesquisa',
'Search Shelter Services': 'Procurar Serviços de Abrigo',
'Search Shelter Types': 'Procurar Tipos de Abrigo',
'Search Shelters': 'Procurar Abrigos',
'Search Skills': 'Procurar Capacidades/Habilidades',
'Search Solutions': 'Procurar Soluções',
'Search Staff Types': 'Procurar tipos de funcionários / staff',
'Search Staff': 'Procurar Staff/ funcionário',
'Search Status': 'Procurar Estado',
'Search Subscriptions': 'Pesquisa subscrições',
'Search Support Requests': 'Procurar Pedidos de Apoio/Suporte',
'Search Tasks': 'Procurar Tarefas',
'Search Teams': 'Procurar Equipas',
'Search Themes': 'Pesquisa Temas',
'Search Tickets': 'Procurar Bilhetes',
'Search Tracks': 'Pesquisa trilhas / Registos',
'Search Twitter Tags': 'Procurar Tags de Twitter',
'Search Units': 'Unidades de Busca',
'Search Users': 'Procurar utilizadores',
'Search Volunteer Availability': 'Procurar Disponibilidade de Voluntários',
'Search Volunteers': 'Procurar Voluntários',
'Search Warehouses': 'Procurar Armazéns',
'Search and Edit Group': 'Procurar e Editar Grupo',
'Search and Edit Individual': 'Procurar e Editar Indivíduo',
'Search for a Location by name, including local names.': 'Procurar uma Localização por nome, incluindo nomes de lugares',
'Search for a Person': 'Procurar uma pessoa',
'Search for a Project': 'Procurar um Projecto',
'Search for a shipment by looking for text in any field.': 'Procure uma carga olhando para o texto em qualquer campo.',
'Search for a shipment received between these dates': 'Procurar um carregamento recebido entre estas datas',
'Search for an Organization by name or acronym': 'Procurar uma Organização por nome ou acrónimo/sigla',
'Search for an Organization by name or acronym.': 'Pesquisar uma organização por Nome ou acronimo',
'Search for an item by category.': 'Procurar um item por categoria.',
'Search for an item by text.': 'Procurar um item por texto.',
'Search here for a person record in order to:': 'Procurar aqui pelo registo de uma pessoa de modo a:',
'Search': 'Procurar',
'Searching for different groups and individuals': 'Procurar diferentes grupos e indivíduos',
'Secondary Server (Optional)': 'Servidor Secundário (Opcional)',
'Seconds must be a number between 0 and 60': 'Segundos devem ser um número entre 0 e 60',
'Section Details': 'Detalhes da Secção ',
'Section deleted': 'Secção apagada',
'Section updated': 'Secção actualizada',
'Sections': 'Secções',
'Sector Details': 'Detalhes de Sector',
'Sector added': 'Sector adicionado',
'Sector deleted': 'Sector apagado',
'Sector updated': 'Sector actualizade',
'Sectors': 'Sectores',
'Security Status': 'Estado de Segurança',
'Security problems': 'Problemas de Segurança',
'See unassigned recovery requests': 'Ver pedidos de recuperação não atribuídos/consignados',
'Seen': 'Visto',
'Select Items from the Request': 'Seleccionar Items do Pedido',
'Select Items from this Inventory': 'Seleccionar Items para este Inventário',
'Select Organization': 'Seleccionar Organização',
'Select a question from the list': 'Seleccione uma questão da lista',
'Select a range for the number of total beds': 'Seleccionar uma extensão/distância para o número total de camas',
'Select all that apply': 'Seleccionar todos os que se aplicam',
'Select an Organization to see a list of offices': 'Seleccione uma Organização para ver uma lista de gabinetes',
'Select the overlays for Assessments and Activities relating to each Need to identify the gap.': 'Seleccione as sobreposições para Avaliações e Actividades relacionadas com cada Necessidade para identificar o intervalo.',
'Select the person assigned to this role for this project.': 'Seleccionar a pessoa designada para esta função para este projecto.',
'Select to show this configuration in the Regions menu.': 'Seleccione para mostrar esta configuração no menu de Regiões.',
'Select': 'Seleccionar',
'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS': 'Selecciona quer seja par utilizar um Modem, Tropo ou outra Gateway para enviar SMS',
'Send Alerts using Email &/or SMS': 'Enviar Alertas utilizando E-mail e/ou SMS',
'Send Commitment as Shipment': 'Enviar Consignação como Carregamento',
'Send Notification': 'Enviar Notificação',
'Send Shipment': 'Enviar Carregamento',
'Send message': 'Enviar mensagem',
'Send new message': 'Enviar nova mensagem',
'Send': 'Enviar',
'Sends & Receives Alerts via Email & SMS': 'Envia e Recebe Alertas via Email e SMS',
'Senior (50+)': 'Sénior (50+)',
'Sent Item Details': 'Detalhes de Item Enviado',
'Sent Item deleted': 'Item enviado apagado',
'Sent Item updated': 'Item enviado actualizado',
'Sent Shipment Details': 'Detalhes de Carregamento Enviado',
'Sent Shipment canceled and items returned to Inventory': 'Carregamento Enviado cancelado e items devolvidos ao Inventário',
'Sent Shipment updated': 'Actualizado ponto de situação de Expedição',
'Sent Shipments': 'Carregamentos Expedidos',
'Sent': 'Enviado',
'Separated children, caregiving arrangements': 'Crianças separadas, arranjos de cuidados',
'Serial Number': 'Número de Série',
'Series': 'Séries',
'Server': 'Servidor',
'Service Catalog': 'Catálogo de Serviço',
'Service or Facility': 'Serviço ou Instalação',
'Service profile added': 'Perfil de Serviço adicionado',
'Service profile deleted': 'Perfil de serviço apagado',
'Service profile updated': 'Serviço de perfil atualizado',
'Service': 'Serviço',
'Services Available': 'Serviços disponíveis',
'Services': 'Serviços',
'Setting added': 'Definição adicionada',
'Setting deleted': 'Definição apagada',
'Setting updated': 'Definição actualizada',
'Settings updated': 'Definições actualizadas',
'Settings were reset because authenticating with Twitter failed': 'As Definições foram reiniciadas porque a autenticação com o Twitter falhou',
'Settings': 'definições',
'Severe': 'Grave',
'Severity': 'Severidade',
'Share a common Marker (unless over-ridden at the Feature level)': 'Compartilhar um marcador comum (a não ser sobre-posto no nível do recurso)',
'Shelter & Essential NFIs': 'NFIs de Abrigo e Essenciais',
'Shelter Details': 'Detalhes do Abrigo',
'Shelter Name': 'Nome de Abrigo',
'Shelter Registry': 'Registo de Abrigo ',
'Shelter Service Details': 'Detalhe de Serviço de Abrigo',
'Shelter Service added': 'Serviço de Abrigo adicionado',
'Shelter Service updated': 'Serviço de Abrigo actualizado',
'Shelter Service': 'Serviço de Abrigo',
'Shelter Services': 'Serviços de Abrigo',
'Shelter Type Details': 'Detalhes de Tipo de Abrigo',
'Shelter Type added': 'Tipo de Abrigo adicionado',
'Shelter Type deleted': 'Tipo de Abrigo apagado',
'Shelter Type updated': 'Tipo de Abrigo actualizado',
'Shelter Type': 'Tipo de Abrigo',
'Shelter Types and Services': 'Serviços e Tipos de Abrigos',
'Shelter Types': 'Tipos de Abrigo',
'Shelter added': 'Abrigo adicionado',
'Shelter deleted': 'Abrigo apagado',
'Shelter updated': 'Abrigo actualizado',
'Shelter': 'abrigo',
'Shelters': 'Abrigos',
'Shipment Created': 'Carregamento de Expedição criado',
'Shipment Items received by Inventory': 'Items de Carregamento recebidos pelo Inventário',
'Shipment Items sent from Inventory': 'Items de Carregamento enviados do Inventário',
'Shipment Items': 'Items de Carregamento',
'Shipments To': 'carregamentos para',
'Shooting': 'Tiroteio',
'Short Assessment': 'Avaliação Curta',
'Short Description': 'Descrição Curta',
'Show Checklist': 'Mostrar Lista de Verificação/Checklist',
'Show Details': 'Mostrar detalhes',
'Show Map': 'Mostrar Mapa',
'Show Region in Menu?': 'mostrar região no menu?',
'Show on map': 'Mostrar no Mapa',
'Sign-up as a volunteer': 'Inscrever-se como voluntário',
'Sign-up for Account': 'Inscrição para conta',
'Sign-up succesful - you should hear from us soon!': 'Inscrição efectuada com sucesso - entraremos em contacto em breve!',
'Sindhi': 'Sindi',
'Site Administration': 'Adiministração de Site/Local',
'Site or Location': 'Sítio ou localização',
'Site': 'site',
'Sites': 'Sites/ Locais/ Sitios',
'Situation Awareness & Geospatial Analysis': 'Situação Consciencialização e Análise Geo-espaciais',
'Situation': 'Situação',
'Sketch': 'Esboço',
'Skill Catalog': 'Catálogo de Capacidade/Habilidade',
'Skill Details': 'Detalhes de Capacidade/Habilidade',
'Skill Status': 'Estado da Capacidade/Habilidade',
'Skill added': 'Habilidade/capacidade adicionada',
'Skill deleted': 'Habilidade apagada',
'Skill updated': 'Capacidade/Habilidade actualizada',
'Skill': 'Capacidade/Habilidade',
'Skill/Training': 'Hailidade/Treino',
'Skills': 'Capacidades/habilidades',
'Slightly Damaged': 'Ligeiramente Danificado',
'Slope failure, debris': 'Talude Instável, detritos/ destroços',
'Small Trade': 'Pequena Transacção/Troca',
'Smoke': 'Fumo',
'Snapshot Report': 'Relatório Instantâneo',
'Snapshot': 'Instantâneo / Snapshot',
'Snow Fall': 'Queda de Neve',
'Snow Squall': 'Rajada de Neve',
'Soil bulging, liquefaction': 'Abaulamento do solo, liquefação',
'Solid waste': 'Resíduos Sólidos',
'Solution Details': 'Detalhes de Solução',
'Solution Item': 'item solução',
'Solution added': 'Solução adicionada',
'Solution deleted': 'Solução apagada',
'Solution updated': 'Solução actualizada',
'Solution': 'Solução',
'Solutions': 'Soluções',
'Some': 'Algum /um pouco',
'Sorry that location appears to be outside the area of the Parent.': 'Desculpe, essa localização parece estar fora da área "pai" (Fora dos limites administrativos).',
'Sorry that location appears to be outside the area supported by this deployment.': 'Desculpe, essa localização parece ser fora da área suportada por este destacamento.',
'Sorry, I could not understand your request': 'Desculpe, não percebi o seu pedido.',
'Sorry, only users with the MapAdmin role are allowed to create location groups.': 'Desculpe, apenas utilizadores com a função de MapAdmin estão autorizados a criar grupos de localização.',
'Sorry, only users with the MapAdmin role are allowed to edit these locations': 'Desculpe, somente utilizadores com a função de MapAdmin estão autorizados a editar estas localizações',
'Sorry, that page is forbidden for some reason.': 'Desculpe, esta página é proibida por alguma razão.',
'Sorry, that service is temporary unavailable.': 'Desculpe, esse serviço encontra-se indisponível ',
'Sorry, there are no addresses to display': 'Desculpe, não há endereços para exibir/mostrar.',
'Source ID': 'ID da Fonte',
'Source Time': 'Origem de Tempo',
'Source': 'Fonte',
'Sources of income': 'Fontes de rendimento',
'Spanish': 'Espanhol',
'Special Ice': 'gelo especial',
'Specialized Hospital': 'Hospital Especializado',
'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.': 'Área Específica (ex. Edifício/Sala) dentro da localização em que esta Pessoa/Grupo é visto.',
'Specific locations need to have a parent of level': 'Localizações específicas necessitam de ter um precedente de nível',
'Specify a descriptive title for the image.': 'Especificar um título descritivo para a imagem.',
'Specify the bed type of this unit.': 'Especificar o tipo de cama desta unidade.',
'Specify the number of available sets': 'Especificar o número de conjuntos disponíveis',
'Specify the number of sets needed per 24h': 'Especificar o número de conjuntos necessários por 24h',
'Specify the number of units (adult doses) needed per 24h': 'especificar o número de unidades (doses de adulto) necessárias por 24 horas',
'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h': 'Especificar o número de unidades (litros) de Lactato de Ringer ou soluções equivalentes necessárias a cada 24 horas',
'Spherical Mercator?': 'Esférico de Mercator?',
'Spreadsheet uploaded': 'Folha de Cálculo carregada',
'Spring': 'Primavera/ Fonte/ Manancial',
'Squall': 'Rajada',
'Staff Details': 'Detalhes de Staff / funcionário',
'Staff Members': 'Membros do Staff/ Funcionários',
'Staff Type Details': 'Detalhes de Tipo de Funcionário/Staff',
'Staff Type added': 'Tipo de Funcionário/Staff adicionado',
'Staff Type deleted': 'Tipo de Funcionário/Staff apagado',
'Staff Type updated': 'Tipo de Staff actualizado',
'Staff added': 'Staff aficionado',
'Staff deleted': 'Staff Apagado',
'Staff member added': 'Adicionado membro ao Staff',
'Staff member updated': 'Membro de Staff actualizado',
'Staff updated': 'Staff/Funcionário actualizado',
'Staff': 'Pessoal',
'Staffing': 'Pessoal/Funcionários',
'Stairs': 'Escadas',
'Start date': 'Data de Inicio',
'Start of Period': 'Inicio do Periodo',
'State': 'Estado',
'Stationery': 'Artigos de Papelaria',
'Status Report': 'Relatório de Estado/Ponto de Situação',
'Status added': 'Estado adicionado',
'Status deleted': 'Estado apagado',
'Status of clinical operation of the facility.': 'Estado da operação clínica da Instalação.',
'Status of morgue capacity.': 'Estado da capacidade da morgue',
'Status of operations of the emergency department of this hospital.': 'Estado de operações do departamento de emergência deste hospital.',
'Status of security procedures/access restrictions in the hospital.': 'Estado de procedimentos de segurança/ restrições de acesso no hospital.',
'Status of the operating rooms of this hospital.': 'Estado dos blocos operatórios deste hospital.',
'Status updated': 'Estado actualizado',
'Status': 'Estado',
'Steel frame': 'Moldura de Aço/ metal',
'Store spreadsheets in the Eden database': 'Armazenar folhas de cálculo na base de dados Eden',
'Storeys at and above ground level': 'Pisos em e acima do nível do chão',
'Storm Force Wind': 'Força do Vento da Tempestade',
'Storm Surge': 'Surto de Tempestade',
'Stowaway': 'Passageiro Clandestino',
'Street Address': 'Endereço de Rua',
'Structural Hazards': 'Perigos Estruturais',
'Structural': 'Estrutural',
'Sub-type': 'Sub-tipo',
'Subject': 'Assunto',
'Submission successful - please wait': 'Submissão bem sucedida - por favor aguarde',
'Submission successful - please wait...': 'Submissão efectuada com sucesso - por favor aguarde...',
'Submit New (full form)': 'Submeter Novo (formulário completo)',
'Submit New (triage)': 'Submeter Novo (triagem)',
'Submit New': 'Enviar Novo',
'Submit a request for recovery': 'Submeter um pedido para recuperação',
'Submit new Level 1 assessment (full form)': 'Submeter nova avaliação Nível 1 (formulário completo)',
'Submit new Level 1 assessment (triage)': 'Submeter nova avaliação Nível 1 (triagem)',
'Submit new Level 2 assessment': 'Submeter nova avaliação Nível 2',
'Subscription Details': 'Detalhes de Subscrição',
'Subscription added': 'Subscrição adicionada',
'Subscription deleted': 'Subscrição apagada',
'Subscription updated': 'Subscrição actualizada',
'Subscriptions': 'Subscrições/ assinaturas',
'Subsistence Cost': 'custo de subsistência',
'Suburb': 'Subúrbio',
'Suggest not changing this field unless you know what you are doing.': 'Sugiro que não altere este campo não ser que saiba o que está a fazer.',
'Summary by Administration Level': 'Sumário por Nível de Administração',
'Summary': 'Sumário',
'Support Request': 'Pedido de Suporte/apoio',
'Support Requests': 'Pedidos de Apoio',
'Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list.': 'Suporta a tomada de decisão de grandes grupos de Peritos de Gestão de Crise ajudando os grupos a criar lista de patentes.',
'Sure you want to delete this object?': 'Tem a certeza que pretende apagar este objecto?',
'Surgery': 'Cirurgia',
'Survey Answer Details': 'Detalhes de Resposta a Inquérito',
'Survey Answer added': 'Resposta de Inquérito adicionada',
'Survey Answer deleted': 'Resposta de Inquérito apagada',
'Survey Answer': 'Resposta de Inquérito',
'Survey Module': 'Módulo de Inquérito',
'Survey Name': 'Nome do Inquérito',
'Survey Question Details': 'Detalhes de Questão de Inquérito',
'Survey Question Display Name': 'Nome de Exibição de Questão de Inquérito',
'Survey Question added': 'Questão de Inquérito adicionada',
'Survey Question deleted': 'Questão de Inquérito apagada',
'Survey Question updated': 'Questão de Inquérito actualizada',
'Survey Question': 'Questão e Inquérito',
'Survey Section Display Name': 'Secção de Inquérito de Exibição de Nome',
'Survey Section added': 'Secção de Inquérito adicionada',
'Survey Section deleted': 'Secção de Inquérito apagada',
'Survey Section updated': 'Secção de questionários / inspecção actualizada',
'Survey Section': 'Secção de Inquérito',
'Survey Series Name': 'Nome de Séries de Inquérito',
'Survey Series added': 'Séries de Inquérito adicionadas',
'Survey Series deleted': 'Séries de Inquérito apagadas',
'Survey Series updated': 'Séries de Inquérito actualizadas',
'Survey Series': 'Séries de Inquérito',
'Survey Template Details': 'Detalhes de Modelo de Inquérito',
'Survey Template added': 'Inquérito Modelo adicionado',
'Survey Template deleted': 'Modelo de Inquérito apagado',
'Survey Template updated': 'Modelo de Inquérito actualizado',
'Survey Template': 'Modelo de Inquérito',
'Survey Templates': 'Modelos de Inquérito',
'Symbology': 'Simbologia',
'Sync Conflicts': 'Sincronizar Conflitos',
'Sync History': 'Sincronizar Histórico',
'Sync Now': 'Sincronizar agora',
'Sync Partners are instances or peers (SahanaEden, SahanaAgasti, Ushahidi, etc.) that you want to sync information with. Click on the link on the right to go the page where you can add sync partners, search for sync partners and modify them.': 'Parceiros Sincronizados são instâncias ou pontos/pares (SahanaEden, SahanaAgasti, Ushahidi, etc.) com quem você quer sincrocizar informação. Carregue na ligação à direita para ir para a página onde pode adicionar parceiros sincronizados, procurar por parceiros sincronizados e modificá-los.',
'Sync Partners': 'Sincronizar Parceiros',
'Sync Pools': 'Sincronizar Tanques/Piscinas',
'Sync Schedule': 'Sincronizar Cronograma',
'Sync Settings': 'Sincronizar definições',
'Sync process already started on ': 'Processo de sincronização já iniciado em ',
'Synchronisation': 'Sincronização',
'Synchronization Conflicts': 'Conflitos de Sincronização',
'Synchronization Details': 'Detalhes de Sincronização',
'Synchronization History': 'Histórico de Sincronização',
'Synchronization Settings': 'Definições de Sincronização',
'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden': 'Sincronização permite-lhe partilhar dados que possui com outros e actualizar a sua própria base de dados com os dados mais recentes de outros pontos/pares. Esta página fornece-lhe informação sobre como utilizar os aspectos de sincronização do Sahana Eden',
'Synchronization not configured.': 'Sincronização não configurada.',
'Synchronization settings updated': 'Definições de Sincronização actualizadas',
'Synchronization': 'Sincronização',
'Syncronisation History': 'Histórico de Sincronização',
'Tags': 'Etiquetas',
'Take shelter in place or per <instruction>': 'Abrigar-se no local ou por <instruction>',
'Task List': 'Lista de Tarefas',
'Task Status': 'Estado de Tarefa',
'Task added': 'tarefa adicionada',
'Task deleted': 'Tarefa apagada',
'Task updated': 'Tarefa Actualizada',
'Tasks': 'Tarefas',
'Team Description': 'Descrição de Equipa',
'Team Details': 'Detalhes de Equipa',
'Team Id': 'Id Equipa',
'Team Leader': 'Líder de Equipa',
'Team Member added': 'Membro de Equipa adicionado',
'Team Members': 'Membros de Equipa',
'Team Name': 'Nome de Equipa',
'Team Type': 'Tipo de equipa',
'Team added': 'Equipa adicionada',
'Team deleted': 'Equipa apagada',
'Team updated': 'Equipa actualizada',
'Team': 'Equipa',
'Teams': 'Equipas',
'Technical testing only, all recipients disregard': 'Somente Teste Técnico, todos os destinatários devem ignorar',
'Telecommunications': 'Telecomunicações',
'Telephone': 'Telefone',
'Telephony': 'Telefonia',
'Template file %s not readable - unable to apply theme!': '%s do ficheiro modelo não legíveis - não é possível aplicar o tema!',
'Templates': 'Modelos',
'Term for the fifth-level within-country administrative division (e.g. a voting or postcode subdivision). This level is not often used.': 'Termo para o quinto nível dentro da divisão administrativa do país (ex. uma subdivisão de código postal ou votação). Este nível não é muito utilizado.',
'Term for the fourth-level within-country administrative division (e.g. Village, Neighborhood or Precinct).': 'termo do quarto nível dentro da divisão administrativa do território (ex. Vila / Cidade, freguesia ou bairro, zona / lugar)',
'Term for the primary within-country administrative division (e.g. State or Province).': 'Termo para a divisão administrativa primária dentro do país (ex. Estado ou Província).',
'Term for the secondary within-country administrative division (e.g. District).': 'Termo para o secundário dentro da divisão administrativa do país (ex. Distrito).',
'Territorial Authority': 'Autoridade Territorial',
'Terrorism': 'Terrorismo',
'Tertiary Server (Optional)': 'Servidor Terciário (Opcional)',
'Text Color for Text blocks': 'Cor de Texto para blocos de Texto',
'Text before each Text Field (One per line)': 'Texto antes de cada Campo de Texto (Um por linha)',
'Text': 'Texto',
'Thanks for your assistance': 'Obrigado pela sua assistência.',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.': 'A "query" é uma condição como "db.table1.field1==\'value\'". Algo como "db.table1.field1 == db.table2.field2" resulta num SQL JOIN.',
'The Area which this Site is located within.': 'a área dentro da qual este Sitio está localizado.',
'The Assessments module allows field workers to send in assessments.': 'O módulo Avaliações permite aos trabalhadores do terreno enviar avaliações.',
'The Author of this Document (optional)': 'O Autor deste Documento (opcional)',
'The Building Asssesments module allows building safety to be assessed, e.g. after an Earthquake.': 'O módulo de Avaliações de Edifício permite que a segurança dos edifícios seja avaliada, por exemplo, após um Terramoto',
'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'A Localização Actual para Pessoa/Grupo, que pode ser geral (para Relatórios) ou precisa (para exibir no Mapa). Inserir alguns caracteres para procurar entre as localizações disponíveis.',
'The Email Address to which approval requests are sent (normally this would be a Group mail rather than an individual). If the field is blank then requests are approved automatically if the domain matches.': 'O Endereço de Email para o qual são enviados os pedidos de aprovação (normalmente isto seria um email de Grupo preferencialmente a um email individual). Se o campo está em branco então os pedidos são aprovados automaticamente se o domínio corresponde.',
'The Incident Reporting System allows the General Public to Report Incidents & have these Tracked.': 'O Sistema de Comunicação de Ocorrência/Incidente permite ao Público em geral comunicar/relatar incidentes e ter estes acompanhados.',
'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'A Localização da qual a Pessoa veio, que pode ser geral (para Relatórios) ou precisa (para exibir no Mapa). Insira alguns caracteres para procurar por localizações disponíveis.',
'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'A localização para onde a pessoa se está a dirigir, que pode ser geral, (para Relatório) ou precisa (para exibição no Mapa / SIG). Inserir alguns caracteres para pesquisa a partir de localizações disponiveis',
'The Media Library provides a catalog of digital media.': 'A Biblioteca de Média disponibiliza um catálogo de média digital',
'The Messaging Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': 'O Módulo de Mensagem é o principal hub de comunicações do sistema Sahana. É utilizado para enviar alertas e/ou mensagens utilizando SMS e Email para vários grupos e indivíduos antes, durante e depois de um desastre.',
'The Requests Management System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.': 'O Sistema de Gestão de Pedidos é um repositório/armazém online onde todas as organizações de socorro, trabalhadores de socorro, agentes governamentais e parques de campismo para desalojados podem coordenar o fornecimento de ajuda com a sua procura. Permite aos utilizadores alocar/distribuir/partilhar os recursos disponíveis para satisfazer as exigências efectiva e eficientemente.',
'The Role this person plays within this hospital.': 'A função que esta pessoa desempenha neste hospital.',
'The Role to which this Role reports.': 'A Função à qual esta Função reporta.',
'The Shelter Registry tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': 'O Registo de Abrigo segue todos os detalhes relativos aos abrigos e armazéns. Colabora com outros módulos para seguir pessoas associado a um abrigo, os serviços disponíveis, etc.',
'The Shelter this Request is from (optional).': 'O abrigo a que este pedido pertence (opcional).',
'The Shelter this person is checking into.': 'O Abrigo em que esta pessoa está a dar entrada.',
'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.': 'A URL para a GetCapabilities de um Serviço WMS cujas camadas que deseja acessível através do Mapa.',
'The URL of your web gateway without the post parameters': 'O URL do seu gateway web sem os parametros de post',
'The URL to access the service.': 'O URL para aceder ao serviço',
'The Unique Identifier (UUID) as assigned to this facility by the government.': 'O Identificador Único (UUID) como designado a estas instalações pelo governo.',
'The attribute within the KML which is used for the title of popups.': 'O atributo dentro do KML o qual é utilizado para os popups de título.',
'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)': 'O atributo dentro do KML que é utilizado para o corpo dos popups. (utilizar um espaço entre atributos)',
'The body height (crown to heel) in cm.': 'A altura do corpo (cabeça aos pés) em cm.',
'The contact person for this organization.': 'A pessoa de contacto para esta organização.',
'The duplicate record will be deleted': 'O registo duplicado será apagado',
'The first or only name of the person (mandatory).': 'O primeiro ou único nome da pessoa (obrigatório).',
'The language you wish the site to be displayed in.': 'a linguagem em que deseja que o site seja exibido.',
'The last known location of the missing person before disappearance.': 'A última localização conhecida da pessoa desaparecida antes do seu desaparecimento.',
'The list of Brands are maintained by the Administrators.': 'A lista de Marcas é mantida pelos Administradores.',
'The list of Item categories are maintained by the Administrators.': 'A lista de categorias de items é mantida pelos Administradores.',
'The name to be used when calling for or directly addressing the person (optional).': 'O nome a ser utilizado para chamar ou dirigir-se directamente à pessoa (opcional).',
'The next screen will allow you to detail the number of people here & their needs.': 'O próximo ecrâ permitir-lhe-á detalhar o número de pessoas aqui e as suas necessidades.',
'The number of Units of Measure of the Alternative Items which is equal to One Unit of Measure of the Item': 'O número de Unidades de Medida dos Items Alternativos que é igual a Uma Unidade de Medida do item.',
'The number of tiles around the visible map to download. Zero means that the 1st page loads faster, higher numbers mean subsequent panning is faster.': 'O número de mosaicos em redor do mapa visível para descarregar. Zero significa que a 1ª página carrega rapidamente, números maiores significam que a transição para páginas seguintes é mais rápida.',
'The person at the location who is reporting this incident (optional)': 'A pessoa na localização que está a comunicar este incidente (opcional)',
'The person reporting the missing person.': 'a pessoa que comunicou a pessoa desaparecida.',
'The post variable containing the phone number': 'A variável do método post contendo o número de telefone',
'The post variable on the URL used for sending messages': 'A variável post do URL usado para enviar mensagens',
'The post variables other than the ones containing the message and the phone number': 'As variáveis de postagem que não sejam aquelas contendo a mensagem e o número de telefone',
'The server did not receive a timely response from another server that it was accessing to fill the request by the browser.': 'O servidor não recebeu uma resposta atempada do outro servidor ao qual estava a aceder para preencher o pedido pelo browser.',
'The server received an incorrect response from another server that it was accessing to fill the request by the browser.': 'O servidor recebeu uma resposta incorrecta de outro servidor ao qual estava a aceder para preencher o pedido pelo browser.',
'The subject event no longer poses a threat or concern and any follow on action is described in <instruction>': 'O evento sujeito já não constitui ameaça ou preocupação e qualquer acção seguimento em acção é descrito em <instruction>',
'The token associated with this application on': 'O sinal/símbolo associado com esta aplicação ligada',
'The weight in kg.': 'O peso em Kg.',
'Theme added': 'Tema adicionado',
'Theme deleted': 'Tema apagado',
'Theme updated': 'Tema actualizado',
'Theme': 'Tema',
'There are errors': 'Existem erros',
'There are multiple records at this location': 'Existem múltiplos registos nesta localização',
'There are not sufficient items in the Inventory to send this shipment': 'Não existem items suficientes no inventário para enviar este carregamento',
'There is no address for this person yet. Add new address.': 'Ainda não existe endereço para esta pessoa. Adicionar novo endereço.',
'These are settings for Inbound Mail.': 'Estas são definições para correio de entrada',
'These are the Incident Categories visible to normal End-Users': 'Estas são as Categorias de Ocorrência visíveis para Utilizadores- Finais normais.',
'These need to be added in Decimal Degrees.': 'Estes devem ser adicionados em Graus Decimais.',
'They': 'Eles',
'This Group has no Members yet': 'Não há membros/sócios registados actualmente',
'This Team has no Members yet': 'Não há membros/sócios registados actualmente',
'This appears to be a duplicate of ': 'Isto parece ser um duplicado de',
'This file already exists on the server as': 'O ficheiro já existe no servidor como',
'This is the way to transfer data between machines as it maintains referential integrity.': 'esta é a forma de transferir dados entre máquinas ao mesmo tempo que mantém integridade referencial.',
'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!': 'Esta é a forma de transferir dados entre máquinas enquanto mantém o integridade referencial... dados duplicados devem ser removidos manualmente primeiro!',
'This might be due to a temporary overloading or maintenance of the server.': 'Isto pode ser devido a uma sobrecarga temporária ou manutenção do servidor.',
'This page shows you logs of past syncs. Click on the link below to go to this page.': 'Esta página mostra logs de sincronizações passadas. Clique na ligação abaixo para ir para esta página.',
'This screen allows you to upload a collection of photos to the server.': 'Este ecrã permite-lhe carregar uma colecção de fotos/fotografias para o servidor.',
'This shipment has already been received.': 'O carregamento já foi recebido.',
'This shipment has already been sent.': 'Este carregamento já foi enviado.',
'This shipment has not been received - it has NOT been canceled because can still be edited.': 'O carregamento não foi recebido - NÃO foi cancelado porque ainda pode ser editado',
'This shipment has not been sent - it has NOT been canceled because can still be edited.': 'Este carregamento não foi enviado - NÃO foi cancelado porque ainda pode ser editado.',
'This shipment will be confirmed as received.': 'Este carregamento será confirmado quando recebido.',
'This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.': 'Este valor acrescenta uma pequena distância em redor dos pontos. Sem isto, os pontos extremos estariam na caixa delimitadora, e poderiam não ser visíveis.',
'This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.': 'Este valor dá uma largura e altura mínima em graus para a região mostrada. Sem isto, um mapa mostrando um único ponto não mostraria nenhuma extensão em redor desse ponto. Após o mapa ser exibido, pode ser ampliado como desejado.',
'Thunderstorm': 'Trovoada',
'Thursday': 'Quinta-feira',
'Ticket Details': 'Detalhes do Bilhete',
'Ticket ID': 'ID de Bilhete',
'Ticket added': 'Bilhete adicionado',
'Ticket deleted': 'Bilhete apagado',
'Ticket updated': 'Bilhete actualizado',
'Ticket': 'Bilhete',
'Ticketing Module': 'Módulo de Etiquetagem',
'Tickets': 'Bilhetes',
'Tilt-up concrete': 'Tilt-up concreto',
'Timber frame': 'Estrutura de Madeira',
'Timeline Report': 'relatório de linha de tempo',
'Timeline': 'Linha de Tempo/Cronograma',
'Title': 'Título',
'To Location': 'Para Localização',
'To Person': 'Para Pessoa',
'To begin the sync process, click the button on the right => ': 'Para iniciar o processo de sincronização, clique no botão do lado direito =>',
'To begin the sync process, click this button => ': 'Para iniciar o processo de sincronização, clique neste botão =>',
'To create a personal map configuration, click ': 'Para criar uma configuração pessoal do mapa, clique',
'To edit OpenStreetMap, you need to edit the OpenStreetMap settings in models/000_config.py': 'Para editar OpenStreetMap, é preciso editar as definições de OpenStreetMap em models/000_config.py',
'To variable': 'Para a variável',
'To': 'Para',
'Tools': 'Ferramentas',
'Tornado': 'Tornado/ Ciclone',
'Total # of Target Beneficiaries': 'Número total de beneficiários-alvo #',
'Total # of households of site visited': '# total de lares do sítio visitado',
'Total Beds': 'Total de Camas',
'Total Beneficiaries': 'Total de beneficiários',
'Total Cost per Megabyte': 'Custo Total por Megabyte',
'Total Cost per Minute': 'Custo Total por Minuto',
'Total Monthly Cost': 'Custo Mensal Total',
'Total Monthly Cost: ': 'Custo Total Mensal',
'Total Monthly': 'Total Mensal',
'Total Persons': 'Total de Pessoas',
'Total Recurring Costs': 'total dos custos recorrentes',
'Total Unit Cost': 'Custo Unitário Total',
'Total Unit Cost: ': 'Custo Unitário Total',
'Total Units': 'Total de Unidades',
'Total gross floor area (square meters)': 'Área bruta de construção (metros quadrados)',
'Total number of beds in this hospital. Automatically updated from daily reports.': 'Número total de camas neste hospital. Actualizado automaticamente a partir dos relatórios diários.',
'Total number of houses in the area': 'Número total de casas na área',
'Total number of schools in affected area': 'Número total de escolas na área afectada',
'Total population of site visited': 'População total do local visitada',
'Totals for Bundle:': 'Totais por Pacote/Embalagem:',
'Tourist Group': 'Grupo de Turistas',
'Traces internally displaced people (IDPs) and their needs': 'Localiza pessoas deslocadas internamente (IDPs) e as suas necessidades',
'Tracing': 'Traçado/ desenho',
'Track Details': 'Detalhes da Percurso',
'Track deleted': 'Trajecto apagado',
'Track updated': 'Trajecto actualizado',
'Track uploaded': 'Trajecto carregado',
'Track with this Person?': 'Localizar com esta pessoa?',
'Track': 'Caminho/ trajecto',
'Tracking of Projects, Activities and Tasks': 'Acompanhamento de Projectos, Actividades e Tarefas',
'Tracking of basic information on the location, facilities and size of the Shelters': 'Acompanhamento de Informação básica sobre a localização, instalações e tamanho dos Abrigos',
'Tracks the location, distibution, capacity and breakdown of victims in Shelters': 'Acompanha a localização, distribuição, capacidade e repartição de vítimas em Abrigos',
'Tracks': 'Trajectos',
'Traffic Report': 'Relatório de Trânsito',
'Training': 'Formação/treinamento',
'Transit Status': 'Estado do Trânsito',
'Transit': 'Trânsito',
'Transition Effect': 'Efeito de Transição',
'Transparent?': 'Transparente?',
'Transportation assistance, Rank': 'Assistência de transporte, Posto',
'Trauma Center': 'Centro de Trauma',
'Travel Cost': 'Custo da Viagem',
'Tropical Storm': 'Tempestade Tropical',
'Tropo Messaging Token': 'Token do sistema de Mensagens Tropo',
'Tropo Settings': 'Definições Tropo',
'Tropo settings updated': 'Configurações Tropo atualizadas',
'Truck': 'Camião',
'Try checking the URL for errors, maybe it was mistyped.': 'Tente verificar o URL por erros, talvez tenha sido mal escrito.',
'Try hitting refresh/reload button or trying the URL from the address bar again.': 'Tente carregar no botão refrescar/recarregar ou tente novamente o endereço URL da barra de endereço.',
'Try refreshing the page or hitting the back button on your browser.': 'Tente refrescar a página ou carregar no botão de retroceder no seu browser.',
'Tuesday': 'Terça-feira',
'Twitter ID or #hashtag': 'ID Twitter ou #hashtag',
'Twitter Settings': 'Definições de Twitter',
'Type of Construction': 'Tipo de Construção',
'Type': 'Tipo',
'Unable to parse CSV file!': 'Não é possível processar o arquivo CSV!',
'Understaffed': 'Com falta de pessoal',
'Unidentified': 'Não Identificado',
'Unit Cost': 'Custo/ Preço Unitário',
'Unit added': 'Unidade adicionada',
'Unit deleted': 'Unidade apagada',
'Unit of Measure': 'Unidade de Medida',
'Unit updated': 'unidade actualizada',
'Units': 'Unidades',
'Unknown Peer': 'Ponto Desconhecido',
'Unknown': 'Desconhecido',
'Unreinforced masonry': 'Alvenaria não reforçada',
'Unresolved Conflicts': 'Conflitos por resolver',
'Unsafe': 'inseguro / pouco seguro',
'Unselect to disable the modem': 'Desseleccionar para desactivar o modem',
'Unsent': 'Não enviado',
'Unsupported data format!': 'Formato de dados não suportado!',
'Unsupported method!': 'Método não suportado!',
'Update Activity Report': 'Actualizar Relatório de Actividade',
'Update Cholera Treatment Capability Information': 'Actualizar Informação de Capacidade de Tratamento de Cólera',
'Update Service Profile': 'Actualizar Perfil de Serviço',
'Update Task Status': 'Actualizar Estado de Tarefa',
'Update Unit': 'Actualizar Unidade',
'Update if Master': 'Actualizar se Mestre',
'Update if Newer': 'Actualizar se Novo',
'Update': 'Actualizad',
'Upload Photos': 'Carregar Fotos',
'Upload Track': 'Carregar Trajecto',
'Upload a Spreadsheet': 'Carregar uma Folha de Cálculo',
'Upload an image file here.': 'Carregar um ficheiro de imagem aqui.',
'Upload an image, such as a photo': 'Carregar uma imagem, como uma fotografia',
'Urban Fire': 'Incêndio Urbano',
'Urban area': 'Área Urbana',
'Urgent': 'Urgente',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Utilize (...)&(...) For AND, (...)|(...) for OR, and ~(...) For NOT para construir queries complexas',
'Use default': 'Utilizar Padrão',
'Use these links to download data that is currently in the database.': 'Utilize estas ligações para descarregar dados que estão actualmente na base de dados.',
'Used to import data from spreadsheets into the database': 'Utilizado para importar dados da folha de cálculo para a base de dados',
'User Account has been Disabled': 'Conta do utilizador foi desactivada',
'User Management': 'Gestão de Utilizador',
'User Profile': 'Perfil de Utilizador',
'User Requests': 'Pedidos de Utilizador',
'User Updated': 'Utilizador Actualizado',
'User added': 'Utilizador adicionado',
'User already has this role': 'Utilizador já tem esta função',
'User deleted': 'Utilizador apagado',
'User updated': 'Utilizador actualizado',
'User': 'Utilizador',
'Username': 'Nome de Utilizador',
'Users removed': 'utilizadores removidos',
'Utilities': 'Utilidades',
'Utility, telecommunication, other non-transport infrastructure': 'Utilidade, Telecomunicação, outra infraestrutura que não de transportes',
'Vacancies': 'Vagas',
'Various Reporting functionalities': 'Várias funcionalidades de Comunicação/relatório',
'Vehicle Crime': 'Criminalidade automóvel',
'Vehicle Types': 'tipos de veículos',
'Vehicle': 'Veículo',
'Verification Status': 'Estado de Verificação',
'Verified?': 'Verificado?',
'Verify password': 'Verificar Palavra-Passe',
'Version': 'Versão',
'Very High': 'Muito Alto',
'View Alerts received using either Email or SMS': 'Ver Alertas recebidos utilizando quer E-mail ou SMS',
'View Fullscreen Map': 'Ver Mapa em Ecrã Inteiro',
'View Image': 'Ver Imagem',
'View Outbox': 'Ver Caixa de Saída',
'View Settings': 'Ver configurações',
'View Tickets': 'Ver Bilhetes',
'View and/or update their details': 'Visualizar e/ou actualizar os seus detalhes',
'View or update the status of a hospital.': 'Ver ou actualizar o estado de um hospital.',
'View pending requests and pledge support.': 'Visualizar pedidos pendentes e garantir apoio.',
'View the hospitals on a map.': 'Ver os hospitais num mapa.',
'Village Leader': 'Líder da Aldeia/ Povoação',
'Village': 'Vila/Aldeia',
'Visual Recognition': 'Reconhecimento Visual',
'Volcanic Ash Cloud': 'Nuvem de cinza vulcânica',
'Volcanic Event': 'Evento Vulcânico',
'Volunteer Availability': 'Disponibilidade de Voluntário',
'Volunteer Management': 'Gestão de Voluntários',
'Volunteer Project': 'Projecto Voluntário',
'Volunteer Request': 'Pedido de Voluntário',
'Volunteer added': 'Voluntário adicionado',
'Volunteer availability added': 'Disponibilidade Voluntária adicionada',
'Volunteer availability deleted': 'Disponibilidade de Voluntário apagada',
'Volunteer availability updated': 'Disponibilidade Voluntária actualizada',
'Volunteer deleted': 'Voluntário apagado',
'Volunteer details updated': 'Detalhes de Voluntário actualizados',
'Volunteer updated': 'Voluntário actualizado',
'Volunteers List': 'Lista de Voluntários',
'Volunteers were notified!': 'Voluntários foram notificados!',
'Volunteers': 'Voluntários',
'Vote': 'Voto/Votar',
'Votes': 'Votos / Votação',
'WASH': 'LAVAR',
'WMS Browser Name': 'Nome de Browser WMS',
'WMS Browser URL': 'URL de Browser WMS',
'Walking Only': 'Somente para Caminhar/ Só para Peões',
'Wall or other structural damage': 'Danos de parede ou outros estruturais',
'Warehouse Details': 'Detalhes de Armazém',
'Warehouse deleted': 'Armazém apagado',
'Warehouse updated': 'Armazém actualizado',
'Warehouses': 'Armazéns',
'Water Sanitation Hygiene': 'Higiene de Saneamento de Água',
'Water collection': 'Recolha de Água',
'Water gallon': 'Galão de água',
'Water storage containers in households': 'Contentores de armazenamento de água em lares',
'Waterspout': "bica/ tromba d'água",
'Website': 'Sitio Web',
'Wednesday': 'Quarta-feira',
'Weight (kg)': 'Peso (kg)',
'Weight': 'Peso',
'Welcome to the Sahana Portal at': 'Bem-Vindo ao Portal Sahana em',
'Well-Known Text': 'Texto bem conhecido',
'Wheat': 'Trigo',
'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points.': 'Quando um mapa é exibido que é focado numa colecção de pontos, o mapa é ampliado para mostrar apenas a região ligando os pontos.',
'When reports were entered': 'Quando os relatórios foram inseridos/ deram entrada',
'Whiskers': 'Bigodes',
'Who is doing what and where': 'Quem está a fazer o quê e onde',
'Who usually collects water for the family?': 'Quem normalmente recolhe água para a família?',
'Width': 'Largura',
'Wild Fire': 'Fogo Selvagem',
'Window frame': 'Moldura de Janela',
'Winter Storm': 'Tempestade de Inverno',
'Women of Child Bearing Age': 'Mulheres em idade fértil',
'Women who are Pregnant or in Labour': 'Mulheres que estão Grávidas ou em Trabalho de Parto',
'Womens Focus Groups': 'Grupos de Foco de Mulheres',
'Wooden plank': 'Prancha de Madeira',
'Wooden poles': 'Postes de Madeira',
'Working hours end': 'Fim de horas de trabalho',
'Working hours start': 'Início de horas de trabalho',
'Working or other to provide money/food': 'Trabalho de outro para providenciar dinheiro/comida',
'X-Ray': 'Raio-X',
'Year built': 'Ano de Construção',
'Year of Manufacture': 'Ano de Construção',
'Yellow': 'Amarelo',
'Yes': 'Sim',
'You are a recovery team?': 'Você é a equipa de recuperação?',
'You are attempting to delete your own account - are you sure you want to proceed?': 'Você está a tentar apagar a sua própria conta - tem a certeza que quer continuar?',
'You are currently reported missing!': 'Você está actualmente dado como desaparecido!',
'You can click on the map below to select the Lat/Lon fields': 'Você pode carregar no mapa abaixo para seleccionar os campos de Lat/Long',
'You can set the modem settings for SMS here.': 'Você pode programar as definições do modem para SMS aqui.',
'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutes/Seconds.': 'Você pode utilizar a Ferramenta de Conversão para converter quer a partir de coordenadas GPS ou Graus/Minutos/Segundos.',
'You do no have permission to cancel this received shipment.': 'Você não tem permissão para cancelar este carregamento recebido',
'You do no have permission to cancel this sent shipment.': 'Você não tem permissão para cancelar este carregamento enviado.',
'You do no have permission to receive this shipment.': 'Você não tem permissão para receber este carregamento.',
'You do no have permission to send this shipment.': 'Você não tem permissão para enviar este carregamento.',
'You do not have permission for any site to add an inventory item.': 'Você não tem permissão para nenhum site para adicionar um item do inventário.',
'You do not have permission for any site to make a commitment.': 'Você não tem permissão para nenhum site para fazer uma consignação.',
'You do not have permission for any site to make a request.': 'Você não tem permissão para nenhum local/Site para fazer um pedido.',
'You do not have permission for any site to perform this action.': 'Não tem permissão em nenhum sítio para executar esta acção.',
'You do not have permission for any site to receive a shipment.': 'Você não tem permissão em nenhum sítio para receber um carregamento.',
'You do not have permission for any site to send a shipment.': 'Você não tem autorização para nenhum sítio para enviar um carregamento',
'You do not have permission to send a shipment from this site.': 'Você não tem permissão para enviar um carregamento a partir deste site/local.',
'You have a personal map configuration. To change your personal configuration, click ': 'Você tem uma configuração de mapa pessoal. Para modificar a sua configuração pessoal, pressione',
'You have found a dead body?': 'Você encontrou algum cadáver?',
'You must be logged in to report persons missing or found.': 'Você tem que estar logged in para comunicar pessoas desaparecidas ou encontradas.',
'You must provide a series id to proceed.': 'Você tem de providenciar uma identificação de série para prosseguir.',
'You should edit Twitter settings in models/000_config.py': 'Você deve editar as definições do Twitter em models/000_config.py',
'Your current ordered list of solution items is shown below. You can change it by voting again.': 'A sua actual lista de encomendas de items de solução é mostrada abaixo. Pode alterá-la votando novamente.',
'Your post was added successfully.': 'A sua publicação foi adicionada com sucesso.',
'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.': 'Foi atribuída ao seu sistema uma identificação única (UUID), a qual outros computadores ao seu redor podem utilizar para o identificar. Para visualizar a sua UUID, pode ir a Sincronização -> Definições de Sincronização. Pode também ver outras definições nessa página.',
'Zinc roof': 'Telhado de Zinco',
'Zoom Levels': 'Níveis de Zoom',
'active': 'activo',
'an individual/team to do in 1-2 days': 'uma equipa / individual para fazer em 1-2 dias',
'assigned': 'atribuído',
'average': 'Média',
'black': 'preto',
'blond': 'loiro(a)',
'blue': 'azul',
'brown': 'castanho',
'by': 'por',
'can be used to extract data from spreadsheets and put them into database tables.': 'pode ser utilizado para extrair dados das tabelas e colocá-los em folhas de base de dados / folhas de calculo',
'cancelled': 'Cancelado',
'caucasoid': 'caucasóide',
'check all': 'verificar tudo',
'click for more details': 'Carregue para mais detalhes',
'completed': 'completo',
'confirmed': 'confirmado',
'consider': 'considerar',
'curly': 'encaracolado',
'currently registered': 'registado actualmente',
'daily': 'Diariamente / diária',
'dark': 'escuro',
'data uploaded': 'dados carregados',
'database %s select': 'base de dados %s seleccionar',
'database': 'Base de dados',
'delete all checked': 'Apagar todos os verificados',
'deleted': 'apagado',
'design': 'design/ desenho',
'diseased': 'falecido',
'displaced': 'deslocado',
'divorced': 'divorciado',
'done!': 'efectuado!',
'duplicate': 'duplicado',
'edit': 'editar',
'eg. gas, electricity, water': 'Ex. Gás, eletricidade, água',
'embedded': 'incorporado',
'enclosed area': 'Área fechada',
'export as csv file': 'Exportar como ficheiro csv',
'fat': 'gordo/ gordura',
'feedback': 'feedback/ comentários/ parecer',
'female': 'feminino',
'flush latrine with septic tank': 'latrina com tanque séptico',
'food_sources': 'Fontes de Alimentos',
'forehead': 'testa',
'from Twitter': 'de Twitter',
'green': 'verde',
'here': 'aqui',
'high': 'alto',
'hourly': 'hora a hora/por hora',
'households': 'Lares',
'identified': 'identificado',
'ignore': 'ignorar',
'in Deg Min Sec format': 'em formato Grau Min Seg',
'in GPS format': 'no formato GPS',
'inactive': 'Inactivo',
'injured': 'ferido',
'insert new %s': 'Inserir novas %s',
'insert new': 'inserir novo',
'invalid request': 'pedido inválido',
'invalid': 'inválido',
'is a central online repository where information on all the disaster victims and families, especially identified casualties, evacuees and displaced people can be stored. Information like name, age, contact number, identity card number, displaced location, and other details are captured. Picture and finger print details of the people can be uploaded to the system. People can also be captured by group for efficiency and convenience.': 'é um repositório online central onde a informação sobre todas as famílias e vítimas do desastre, especialmente baixas identificadas, evacuados e desalojados podem ser armazenados. Informação como nome, idade, número de contacto, número de cartão de identidade, localização de desalojados, e outros de talhes são captados. Detalhes pessoais como fotografia e impressão digital podem ser carregados para o sistema. As pessoas também podem ser captadas por grupo para maior eficiência e conveniência.',
'is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities': 'está previsto ser composto por vários sub-módulos que trabalham juntos para providenciar funcionalidade complexa para a gestão dos items de projecto e socorro por uma organização. isto inclui um sistema de recepção, um sistema de gestão de armazém, localização de mercadorias, gestão de cadeia de abastecimento, gestão de frota, aquisição, localização financeira e outras capacidades de gestão de bens e recursos.',
'keeps track of all incoming tickets allowing them to be categorised & routed to the appropriate place for actioning.': 'acompanha todos os bilhetes que entram permitindo que sejam categorizados e direccionados para o lugar apropriado para accionamento.',
'latrines': 'WC /Latrinas',
'leave empty to detach account': 'deixar vazio para desanexar conta',
'legend URL': 'legenda URL',
'light': 'luz/leve',
'login': 'Iniciar sessão',
'long': 'longo/ comprido',
'long>12cm': 'Comprido >12cm',
'low': 'baixo',
'male': 'masculino',
'married': 'casado',
'maxResolution': 'Resolução Máxima',
'medium': 'médio',
'medium<12cm': 'médio<12cm',
'meters': 'metros',
'module allows the site administrator to configure various options.': 'módulo permite ao administrador do site configurar várias opções.',
'module helps monitoring the status of hospitals.': 'o módulo ajuda a monitorizar o ponto de situação operacional dos hospitais.',
'mongoloid': 'Monglóide/ Trissomia 21',
'more': 'Mais',
'n/a': 'não disponível',
'never': 'nunca',
'new': 'novo',
'next 100 rows': 'próximas 100 linhas',
'no': 'não',
'none': 'nenhum',
'not accessible - no cached version available!': 'Não acessível - não há versão em cache disponível!',
'not accessible - using cached version from': 'não acessível - utilizando versão em cache de',
'num Zoom Levels': 'Numero de Níveis de Zoom ',
'on': 'em / ligado',
'once': 'uma vez',
'open defecation': 'defecação a céu aberto',
'or import from csv file': 'ou importar do ficheiro csv',
'other': 'outro',
'over one hour': 'mais de uma hora',
'people': 'pessoas',
'pit latrine': 'Latrina de fosso',
'pit': 'cova/fosso',
'postponed': 'adiado',
'preliminary template or draft, not actionable in its current form': 'modelo preliminar ou rascunho, não accionável na sua forma actual',
'previous 100 rows': '100 linhas anteriores',
'record does not exist': 'Registo não existe',
'record id': 'Id de registo',
'red': 'vermelho',
'reported': 'comunicado / relatado',
'reports successfully imported.': 'Relatórios importados com sucesso.',
'representation of the Polygon/Line.': 'representação do Polígono/Linha.',
'retired': 'reformado/ retirado',
'river': 'rio',
'see comment': 'ver comentário',
'selected': 'seleccionado',
'separated from family': 'separado da família',
'separated': 'separado',
'shaved': 'barbeado',
'short<6cm': 'curto <6cm',
'sides': 'lados',
'sign-up now': 'Registar agora',
'single': 'único / separado',
'slim': 'magro',
'specify': 'especificar',
'staff': 'staff/ funcionários',
'state': 'estado',
'straight': 'direito',
'suffered financial losses': 'Sofreu perdas financeiras',
'table': 'Tabela',
'tall': 'alto',
'this': 'isto',
'to access the system': 'para aceder ao sistema',
'tonsure': 'Tonsura',
'unable to parse csv file': 'incapaz de analisar o arquivo csv',
'uncheck all': 'desmarcar tudo',
'unidentified': 'não identificado',
'unspecified': 'não especificado',
'updated': 'actualizado',
'updates only': 'somente actualizações',
'verified': 'verificado',
'volunteer': 'voluntário',
'wavy': 'ondulado',
'weekly': 'semanal',
'white': 'branco',
'wider area, longer term, usually contain multiple Activities': 'área maior, termo mais longo, normalmente contém múltiplas Actividades',
'widowed': 'viúva',
'window': 'janela',
'within human habitat': 'dentro do habitat/habitação humano',
'xlwt module not available within the running Python - this needs installing for XLS output!': 'módulo xlwt não disponível dentro do Python em execução - necessita instalação para output XLS!',
'yes': 'Sim',
}
|
code-for-india/sahana_shelter_worldbank
|
languages/pt.py
|
Python
|
mit
| 207,773
|
[
"VisIt"
] |
9025f9c3b8100e81038cbf472bbcc3cf1997160784998200f35004e5538f8f6b
|
#!/usr/bin/python2
#~main.py~
import actions
import player
from time import sleep
version = 1.2
#enables the debug menu option in the main menu
DEBUG_MODE = "enabled"
#DEBUG_MODE = "disabled"
if DEBUG_MODE == "enabled":
import debug
cache = None #place to remember last function
def menu(Player):
global cache
actions.clearscreen()
startScreen = ("Current Health: %d\n"
"\nWhat would you like to do?\n"
"***************************\n"
"** Enter: Prev Action **\n"
"** R: Roll Dice **\n"
"** V: Visit Shop **\n"
"** L: List Inventory **\n"
"** C: Change Weapon **\n"
"** U: Use Potion **\n"
"** S: Save Game **\n"
"** Q: Quit **\n" %(Player.health))
if DEBUG_MODE == "enabled":
print "%s** D: Debug Menu **" %(startScreen)
print "***************************"
choice = raw_input("\nChoice: ").lower()
#using this method helps clean up all those logic gates
choices = {
'r': actions.roll_dice,
'l': newPlayer.list_inventory,
'c': newPlayer.set_current_weapon,
'v': actions.visit_shop,
'u': newPlayer.use_potion,
's': actions.save_game,
'q': actions.quit_game,
'd': debug.menu,
'': cache, #for convenience
}
if not choice and not cache:
print ("\nThere is no previous action.\n"
"Please choose again.")
sleep(2)
else:
try:
if choices[choice] != cache:
cache = choices[choice]
x = choices[choice]()
if x == 0:
return 0
except TypeError:
if choices[choice] != cache:
cache = choices[choice]
choices[choice](Player)
except KeyError:
print ("\nYou didn't select a valid choice.\n"
"Please choose again.")
sleep(2)
#Starts the game
actions.clearscreen()
print "Dungeon Quest v%.2f" % version
#name = raw_input("\nWho dares to enter the dungeon? ")
name = "Bran"
newPlayer = player.CreatePlayer(name)
while newPlayer.health > 0:
if menu(newPlayer) == 0:
break
if newPlayer.basilisk_attack is True:
print "\nCongratulations! You made it through the dungeon alive!\n"
break
elif newPlayer.run_away > 5:
clearscreen()
print ("\nYou're too much of a wimp to make it though the dungeon alive!\n"
"Don't show your face here again until you toughen yourself up!\n")
break
if not newPlayer.health:
print "\nYou were slain! Maybe you should carry more health potions with you next time!\n"
|
brando56894/Dungeon-Quest
|
main.py
|
Python
|
gpl-3.0
| 2,837
|
[
"VisIt"
] |
b9192d4c97c2e68b0a34928d4defea1389e922a270aa7179c2c71e251653bb91
|
"""
Extended docstrings for functions.py
"""
pi = r"""
`\pi`, roughly equal to 3.141592654, represents the area of the unit
circle, the half-period of trigonometric functions, and many other
things in mathematics.
Mpmath can evaluate `\pi` to arbitrary precision::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +pi
3.1415926535897932384626433832795028841971693993751
This shows digits 99991-100000 of `\pi`::
>>> mp.dps = 100000
>>> str(pi)[-10:]
'5549362464'
**Possible issues**
:data:`pi` always rounds to the nearest floating-point
number when used. This means that exact mathematical identities
involving `\pi` will generally not be preserved in floating-point
arithmetic. In particular, multiples of :data:`pi` (except for
the trivial case ``0*pi``) are *not* the exact roots of
:func:`~mpmath.sin`, but differ roughly by the current epsilon::
>>> mp.dps = 15
>>> sin(pi)
1.22464679914735e-16
One solution is to use the :func:`~mpmath.sinpi` function instead::
>>> sinpi(1)
0.0
See the documentation of trigonometric functions for additional
details.
"""
degree = r"""
Represents one degree of angle, `1^{\circ} = \pi/180`, or
about 0.01745329. This constant may be evaluated to arbitrary
precision::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +degree
0.017453292519943295769236907684886127134428718885417
The :data:`degree` object is convenient for conversion
to radians::
>>> sin(30 * degree)
0.5
>>> asin(0.5) / degree
30.0
"""
e = r"""
The transcendental number `e` = 2.718281828... is the base of the
natural logarithm (:func:`~mpmath.ln`) and of the exponential function
(:func:`~mpmath.exp`).
Mpmath can be evaluate `e` to arbitrary precision::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +e
2.7182818284590452353602874713526624977572470937
This shows digits 99991-100000 of `e`::
>>> mp.dps = 100000
>>> str(e)[-10:]
'2100427165'
**Possible issues**
:data:`e` always rounds to the nearest floating-point number
when used, and mathematical identities involving `e` may not
hold in floating-point arithmetic. For example, ``ln(e)``
might not evaluate exactly to 1.
In particular, don't use ``e**x`` to compute the exponential
function. Use ``exp(x)`` instead; this is both faster and more
accurate.
"""
phi = r"""
Represents the golden ratio `\phi = (1+\sqrt 5)/2`,
approximately equal to 1.6180339887. To high precision,
its value is::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +phi
1.6180339887498948482045868343656381177203091798058
Formulas for the golden ratio include the following::
>>> (1+sqrt(5))/2
1.6180339887498948482045868343656381177203091798058
>>> findroot(lambda x: x**2-x-1, 1)
1.6180339887498948482045868343656381177203091798058
>>> limit(lambda n: fib(n+1)/fib(n), inf)
1.6180339887498948482045868343656381177203091798058
"""
euler = r"""
Euler's constant or the Euler-Mascheroni constant `\gamma`
= 0.57721566... is a number of central importance to
number theory and special functions. It is defined as the limit
.. math ::
\gamma = \lim_{n\to\infty} H_n - \log n
where `H_n = 1 + \frac{1}{2} + \ldots + \frac{1}{n}` is a harmonic
number (see :func:`~mpmath.harmonic`).
Evaluation of `\gamma` is supported at arbitrary precision::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +euler
0.57721566490153286060651209008240243104215933593992
We can also compute `\gamma` directly from the definition,
although this is less efficient::
>>> limit(lambda n: harmonic(n)-log(n), inf)
0.57721566490153286060651209008240243104215933593992
This shows digits 9991-10000 of `\gamma`::
>>> mp.dps = 10000
>>> str(euler)[-10:]
'4679858165'
Integrals, series, and representations for `\gamma` in terms of
special functions include the following (there are many others)::
>>> mp.dps = 25
>>> -quad(lambda x: exp(-x)*log(x), [0,inf])
0.5772156649015328606065121
>>> quad(lambda x,y: (x-1)/(1-x*y)/log(x*y), [0,1], [0,1])
0.5772156649015328606065121
>>> nsum(lambda k: 1/k-log(1+1/k), [1,inf])
0.5772156649015328606065121
>>> nsum(lambda k: (-1)**k*zeta(k)/k, [2,inf])
0.5772156649015328606065121
>>> -diff(gamma, 1)
0.5772156649015328606065121
>>> limit(lambda x: 1/x-gamma(x), 0)
0.5772156649015328606065121
>>> limit(lambda x: zeta(x)-1/(x-1), 1)
0.5772156649015328606065121
>>> (log(2*pi*nprod(lambda n:
... exp(-2+2/n)*(1+2/n)**n, [1,inf]))-3)/2
0.5772156649015328606065121
For generalizations of the identities `\gamma = -\Gamma'(1)`
and `\gamma = \lim_{x\to1} \zeta(x)-1/(x-1)`, see
:func:`~mpmath.psi` and :func:`~mpmath.stieltjes` respectively.
"""
catalan = r"""
Catalan's constant `K` = 0.91596559... is given by the infinite
series
.. math ::
K = \sum_{k=0}^{\infty} \frac{(-1)^k}{(2k+1)^2}.
Mpmath can evaluate it to arbitrary precision::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +catalan
0.91596559417721901505460351493238411077414937428167
One can also compute `K` directly from the definition, although
this is significantly less efficient::
>>> nsum(lambda k: (-1)**k/(2*k+1)**2, [0, inf])
0.91596559417721901505460351493238411077414937428167
This shows digits 9991-10000 of `K`::
>>> mp.dps = 10000
>>> str(catalan)[-10:]
'9537871503'
Catalan's constant has numerous integral representations::
>>> mp.dps = 50
>>> quad(lambda x: -log(x)/(1+x**2), [0, 1])
0.91596559417721901505460351493238411077414937428167
>>> quad(lambda x: atan(x)/x, [0, 1])
0.91596559417721901505460351493238411077414937428167
>>> quad(lambda x: ellipk(x**2)/2, [0, 1])
0.91596559417721901505460351493238411077414937428167
>>> quad(lambda x,y: 1/(1+(x*y)**2), [0, 1], [0, 1])
0.91596559417721901505460351493238411077414937428167
As well as series representations::
>>> pi*log(sqrt(3)+2)/8 + 3*nsum(lambda n:
... (fac(n)/(2*n+1))**2/fac(2*n), [0, inf])/8
0.91596559417721901505460351493238411077414937428167
>>> 1-nsum(lambda n: n*zeta(2*n+1)/16**n, [1,inf])
0.91596559417721901505460351493238411077414937428167
"""
khinchin = r"""
Khinchin's constant `K` = 2.68542... is a number that
appears in the theory of continued fractions. Mpmath can evaluate
it to arbitrary precision::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +khinchin
2.6854520010653064453097148354817956938203822939945
An integral representation is::
>>> I = quad(lambda x: log((1-x**2)/sincpi(x))/x/(1+x), [0, 1])
>>> 2*exp(1/log(2)*I)
2.6854520010653064453097148354817956938203822939945
The computation of ``khinchin`` is based on an efficient
implementation of the following series::
>>> f = lambda n: (zeta(2*n)-1)/n*sum((-1)**(k+1)/mpf(k)
... for k in range(1,2*int(n)))
>>> exp(nsum(f, [1,inf])/log(2))
2.6854520010653064453097148354817956938203822939945
"""
glaisher = r"""
Glaisher's constant `A`, also known as the Glaisher-Kinkelin
constant, is a number approximately equal to 1.282427129 that
sometimes appears in formulas related to gamma and zeta functions.
It is also related to the Barnes G-function (see :func:`~mpmath.barnesg`).
The constant is defined as `A = \exp(1/12-\zeta'(-1))` where
`\zeta'(s)` denotes the derivative of the Riemann zeta function
(see :func:`~mpmath.zeta`).
Mpmath can evaluate Glaisher's constant to arbitrary precision:
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +glaisher
1.282427129100622636875342568869791727767688927325
We can verify that the value computed by :data:`glaisher` is
correct using mpmath's facilities for numerical
differentiation and arbitrary evaluation of the zeta function:
>>> exp(mpf(1)/12 - diff(zeta, -1))
1.282427129100622636875342568869791727767688927325
Here is an example of an integral that can be evaluated in
terms of Glaisher's constant:
>>> mp.dps = 15
>>> quad(lambda x: log(gamma(x)), [1, 1.5])
-0.0428537406502909
>>> -0.5 - 7*log(2)/24 + log(pi)/4 + 3*log(glaisher)/2
-0.042853740650291
Mpmath computes Glaisher's constant by applying Euler-Maclaurin
summation to a slowly convergent series. The implementation is
reasonably efficient up to about 10,000 digits. See the source
code for additional details.
References:
http://mathworld.wolfram.com/Glaisher-KinkelinConstant.html
"""
apery = r"""
Represents Apery's constant, which is the irrational number
approximately equal to 1.2020569 given by
.. math ::
\zeta(3) = \sum_{k=1}^\infty\frac{1}{k^3}.
The calculation is based on an efficient hypergeometric
series. To 50 decimal places, the value is given by::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +apery
1.2020569031595942853997381615114499907649862923405
Other ways to evaluate Apery's constant using mpmath
include::
>>> zeta(3)
1.2020569031595942853997381615114499907649862923405
>>> -psi(2,1)/2
1.2020569031595942853997381615114499907649862923405
>>> 8*nsum(lambda k: 1/(2*k+1)**3, [0,inf])/7
1.2020569031595942853997381615114499907649862923405
>>> f = lambda k: 2/k**3/(exp(2*pi*k)-1)
>>> 7*pi**3/180 - nsum(f, [1,inf])
1.2020569031595942853997381615114499907649862923405
This shows digits 9991-10000 of Apery's constant::
>>> mp.dps = 10000
>>> str(apery)[-10:]
'3189504235'
"""
mertens = r"""
Represents the Mertens or Meissel-Mertens constant, which is the
prime number analog of Euler's constant:
.. math ::
B_1 = \lim_{N\to\infty}
\left(\sum_{p_k \le N} \frac{1}{p_k} - \log \log N \right)
Here `p_k` denotes the `k`-th prime number. Other names for this
constant include the Hadamard-de la Vallee-Poussin constant or
the prime reciprocal constant.
The following gives the Mertens constant to 50 digits::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +mertens
0.2614972128476427837554268386086958590515666482612
References:
http://mathworld.wolfram.com/MertensConstant.html
"""
twinprime = r"""
Represents the twin prime constant, which is the factor `C_2`
featuring in the Hardy-Littlewood conjecture for the growth of the
twin prime counting function,
.. math ::
\pi_2(n) \sim 2 C_2 \frac{n}{\log^2 n}.
It is given by the product over primes
.. math ::
C_2 = \prod_{p\ge3} \frac{p(p-2)}{(p-1)^2} \approx 0.66016
Computing `C_2` to 50 digits::
>>> from mpmath import *
>>> mp.dps = 50; mp.pretty = True
>>> +twinprime
0.66016181584686957392781211001455577843262336028473
References:
http://mathworld.wolfram.com/TwinPrimesConstant.html
"""
ln = r"""
Computes the natural logarithm of `x`, `\ln x`.
See :func:`~mpmath.log` for additional documentation."""
sqrt = r"""
``sqrt(x)`` gives the principal square root of `x`, `\sqrt x`.
For positive real numbers, the principal root is simply the
positive square root. For arbitrary complex numbers, the principal
square root is defined to satisfy `\sqrt x = \exp(\log(x)/2)`.
The function thus has a branch cut along the negative half real axis.
For all mpmath numbers ``x``, calling ``sqrt(x)`` is equivalent to
performing ``x**0.5``.
**Examples**
Basic examples and limits::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> sqrt(10)
3.16227766016838
>>> sqrt(100)
10.0
>>> sqrt(-4)
(0.0 + 2.0j)
>>> sqrt(1+1j)
(1.09868411346781 + 0.455089860562227j)
>>> sqrt(inf)
+inf
Square root evaluation is fast at huge precision::
>>> mp.dps = 50000
>>> a = sqrt(3)
>>> str(a)[-10:]
'9329332814'
:func:`mpmath.iv.sqrt` supports interval arguments::
>>> iv.dps = 15; iv.pretty = True
>>> iv.sqrt([16,100])
[4.0, 10.0]
>>> iv.sqrt(2)
[1.4142135623730949234, 1.4142135623730951455]
>>> iv.sqrt(2) ** 2
[1.9999999999999995559, 2.0000000000000004441]
"""
cbrt = r"""
``cbrt(x)`` computes the cube root of `x`, `x^{1/3}`. This
function is faster and more accurate than raising to a floating-point
fraction::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> 125**(mpf(1)/3)
mpf('4.9999999999999991')
>>> cbrt(125)
mpf('5.0')
Every nonzero complex number has three cube roots. This function
returns the cube root defined by `\exp(\log(x)/3)` where the
principal branch of the natural logarithm is used. Note that this
does not give a real cube root for negative real numbers::
>>> mp.pretty = True
>>> cbrt(-1)
(0.5 + 0.866025403784439j)
"""
exp = r"""
Computes the exponential function,
.. math ::
\exp(x) = e^x = \sum_{k=0}^{\infty} \frac{x^k}{k!}.
For complex numbers, the exponential function also satisfies
.. math ::
\exp(x+yi) = e^x (\cos y + i \sin y).
**Basic examples**
Some values of the exponential function::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> exp(0)
1.0
>>> exp(1)
2.718281828459045235360287
>>> exp(-1)
0.3678794411714423215955238
>>> exp(inf)
+inf
>>> exp(-inf)
0.0
Arguments can be arbitrarily large::
>>> exp(10000)
8.806818225662921587261496e+4342
>>> exp(-10000)
1.135483865314736098540939e-4343
Evaluation is supported for interval arguments via
:func:`mpmath.iv.exp`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.exp([-inf,0])
[0.0, 1.0]
>>> iv.exp([0,1])
[1.0, 2.71828182845904523536028749558]
The exponential function can be evaluated efficiently to arbitrary
precision::
>>> mp.dps = 10000
>>> exp(pi) #doctest: +ELLIPSIS
23.140692632779269005729...8984304016040616
**Functional properties**
Numerical verification of Euler's identity for the complex
exponential function::
>>> mp.dps = 15
>>> exp(j*pi)+1
(0.0 + 1.22464679914735e-16j)
>>> chop(exp(j*pi)+1)
0.0
This recovers the coefficients (reciprocal factorials) in the
Maclaurin series expansion of exp::
>>> nprint(taylor(exp, 0, 5))
[1.0, 1.0, 0.5, 0.166667, 0.0416667, 0.00833333]
The exponential function is its own derivative and antiderivative::
>>> exp(pi)
23.1406926327793
>>> diff(exp, pi)
23.1406926327793
>>> quad(exp, [-inf, pi])
23.1406926327793
The exponential function can be evaluated using various methods,
including direct summation of the series, limits, and solving
the defining differential equation::
>>> nsum(lambda k: pi**k/fac(k), [0,inf])
23.1406926327793
>>> limit(lambda k: (1+pi/k)**k, inf)
23.1406926327793
>>> odefun(lambda t, x: x, 0, 1)(pi)
23.1406926327793
"""
cosh = r"""
Computes the hyperbolic cosine of `x`,
`\cosh(x) = (e^x + e^{-x})/2`. Values and limits include::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> cosh(0)
1.0
>>> cosh(1)
1.543080634815243778477906
>>> cosh(-inf), cosh(+inf)
(+inf, +inf)
The hyperbolic cosine is an even, convex function with
a global minimum at `x = 0`, having a Maclaurin series
that starts::
>>> nprint(chop(taylor(cosh, 0, 5)))
[1.0, 0.0, 0.5, 0.0, 0.0416667, 0.0]
Generalized to complex numbers, the hyperbolic cosine is
equivalent to a cosine with the argument rotated
in the imaginary direction, or `\cosh x = \cos ix`::
>>> cosh(2+3j)
(-3.724545504915322565473971 + 0.5118225699873846088344638j)
>>> cos(3-2j)
(-3.724545504915322565473971 + 0.5118225699873846088344638j)
"""
sinh = r"""
Computes the hyperbolic sine of `x`,
`\sinh(x) = (e^x - e^{-x})/2`. Values and limits include::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> sinh(0)
0.0
>>> sinh(1)
1.175201193643801456882382
>>> sinh(-inf), sinh(+inf)
(-inf, +inf)
The hyperbolic sine is an odd function, with a Maclaurin
series that starts::
>>> nprint(chop(taylor(sinh, 0, 5)))
[0.0, 1.0, 0.0, 0.166667, 0.0, 0.00833333]
Generalized to complex numbers, the hyperbolic sine is
essentially a sine with a rotation `i` applied to
the argument; more precisely, `\sinh x = -i \sin ix`::
>>> sinh(2+3j)
(-3.590564589985779952012565 + 0.5309210862485198052670401j)
>>> j*sin(3-2j)
(-3.590564589985779952012565 + 0.5309210862485198052670401j)
"""
tanh = r"""
Computes the hyperbolic tangent of `x`,
`\tanh(x) = \sinh(x)/\cosh(x)`. Values and limits include::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> tanh(0)
0.0
>>> tanh(1)
0.7615941559557648881194583
>>> tanh(-inf), tanh(inf)
(-1.0, 1.0)
The hyperbolic tangent is an odd, sigmoidal function, similar
to the inverse tangent and error function. Its Maclaurin
series is::
>>> nprint(chop(taylor(tanh, 0, 5)))
[0.0, 1.0, 0.0, -0.333333, 0.0, 0.133333]
Generalized to complex numbers, the hyperbolic tangent is
essentially a tangent with a rotation `i` applied to
the argument; more precisely, `\tanh x = -i \tan ix`::
>>> tanh(2+3j)
(0.9653858790221331242784803 - 0.009884375038322493720314034j)
>>> j*tan(3-2j)
(0.9653858790221331242784803 - 0.009884375038322493720314034j)
"""
cos = r"""
Computes the cosine of `x`, `\cos(x)`.
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> cos(pi/3)
0.5
>>> cos(100000001)
-0.9802850113244713353133243
>>> cos(2+3j)
(-4.189625690968807230132555 - 9.109227893755336597979197j)
>>> cos(inf)
nan
>>> nprint(chop(taylor(cos, 0, 6)))
[1.0, 0.0, -0.5, 0.0, 0.0416667, 0.0, -0.00138889]
Intervals are supported via :func:`mpmath.iv.cos`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.cos([0,1])
[0.540302305868139717400936602301, 1.0]
>>> iv.cos([0,2])
[-0.41614683654714238699756823214, 1.0]
"""
sin = r"""
Computes the sine of `x`, `\sin(x)`.
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> sin(pi/3)
0.8660254037844386467637232
>>> sin(100000001)
0.1975887055794968911438743
>>> sin(2+3j)
(9.1544991469114295734673 - 4.168906959966564350754813j)
>>> sin(inf)
nan
>>> nprint(chop(taylor(sin, 0, 6)))
[0.0, 1.0, 0.0, -0.166667, 0.0, 0.00833333, 0.0]
Intervals are supported via :func:`mpmath.iv.sin`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.sin([0,1])
[0.0, 0.841470984807896506652502331201]
>>> iv.sin([0,2])
[0.0, 1.0]
"""
tan = r"""
Computes the tangent of `x`, `\tan(x) = \frac{\sin(x)}{\cos(x)}`.
The tangent function is singular at `x = (n+1/2)\pi`, but
``tan(x)`` always returns a finite result since `(n+1/2)\pi`
cannot be represented exactly using floating-point arithmetic.
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> tan(pi/3)
1.732050807568877293527446
>>> tan(100000001)
-0.2015625081449864533091058
>>> tan(2+3j)
(-0.003764025641504248292751221 + 1.003238627353609801446359j)
>>> tan(inf)
nan
>>> nprint(chop(taylor(tan, 0, 6)))
[0.0, 1.0, 0.0, 0.333333, 0.0, 0.133333, 0.0]
Intervals are supported via :func:`mpmath.iv.tan`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.tan([0,1])
[0.0, 1.55740772465490223050697482944]
>>> iv.tan([0,2]) # Interval includes a singularity
[-inf, +inf]
"""
sec = r"""
Computes the secant of `x`, `\mathrm{sec}(x) = \frac{1}{\cos(x)}`.
The secant function is singular at `x = (n+1/2)\pi`, but
``sec(x)`` always returns a finite result since `(n+1/2)\pi`
cannot be represented exactly using floating-point arithmetic.
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> sec(pi/3)
2.0
>>> sec(10000001)
-1.184723164360392819100265
>>> sec(2+3j)
(-0.04167496441114427004834991 + 0.0906111371962375965296612j)
>>> sec(inf)
nan
>>> nprint(chop(taylor(sec, 0, 6)))
[1.0, 0.0, 0.5, 0.0, 0.208333, 0.0, 0.0847222]
Intervals are supported via :func:`mpmath.iv.sec`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.sec([0,1])
[1.0, 1.85081571768092561791175326276]
>>> iv.sec([0,2]) # Interval includes a singularity
[-inf, +inf]
"""
csc = r"""
Computes the cosecant of `x`, `\mathrm{csc}(x) = \frac{1}{\sin(x)}`.
This cosecant function is singular at `x = n \pi`, but with the
exception of the point `x = 0`, ``csc(x)`` returns a finite result
since `n \pi` cannot be represented exactly using floating-point
arithmetic.
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> csc(pi/3)
1.154700538379251529018298
>>> csc(10000001)
-1.864910497503629858938891
>>> csc(2+3j)
(0.09047320975320743980579048 + 0.04120098628857412646300981j)
>>> csc(inf)
nan
Intervals are supported via :func:`mpmath.iv.csc`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.csc([0,1]) # Interval includes a singularity
[1.18839510577812121626159943988, +inf]
>>> iv.csc([0,2])
[1.0, +inf]
"""
cot = r"""
Computes the cotangent of `x`,
`\mathrm{cot}(x) = \frac{1}{\tan(x)} = \frac{\cos(x)}{\sin(x)}`.
This cotangent function is singular at `x = n \pi`, but with the
exception of the point `x = 0`, ``cot(x)`` returns a finite result
since `n \pi` cannot be represented exactly using floating-point
arithmetic.
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> cot(pi/3)
0.5773502691896257645091488
>>> cot(10000001)
1.574131876209625656003562
>>> cot(2+3j)
(-0.003739710376336956660117409 - 0.9967577965693583104609688j)
>>> cot(inf)
nan
Intervals are supported via :func:`mpmath.iv.cot`::
>>> iv.dps = 25; iv.pretty = True
>>> iv.cot([0,1]) # Interval includes a singularity
[0.642092615934330703006419974862, +inf]
>>> iv.cot([1,2])
[-inf, +inf]
"""
acos = r"""
Computes the inverse cosine or arccosine of `x`, `\cos^{-1}(x)`.
Since `-1 \le \cos(x) \le 1` for real `x`, the inverse
cosine is real-valued only for `-1 \le x \le 1`. On this interval,
:func:`~mpmath.acos` is defined to be a monotonically decreasing
function assuming values between `+\pi` and `0`.
Basic values are::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> acos(-1)
3.141592653589793238462643
>>> acos(0)
1.570796326794896619231322
>>> acos(1)
0.0
>>> nprint(chop(taylor(acos, 0, 6)))
[1.5708, -1.0, 0.0, -0.166667, 0.0, -0.075, 0.0]
:func:`~mpmath.acos` is defined so as to be a proper inverse function of
`\cos(\theta)` for `0 \le \theta < \pi`.
We have `\cos(\cos^{-1}(x)) = x` for all `x`, but
`\cos^{-1}(\cos(x)) = x` only for `0 \le \Re[x] < \pi`::
>>> for x in [1, 10, -1, 2+3j, 10+3j]:
... print("%s %s" % (cos(acos(x)), acos(cos(x))))
...
1.0 1.0
(10.0 + 0.0j) 2.566370614359172953850574
-1.0 1.0
(2.0 + 3.0j) (2.0 + 3.0j)
(10.0 + 3.0j) (2.566370614359172953850574 - 3.0j)
The inverse cosine has two branch points: `x = \pm 1`. :func:`~mpmath.acos`
places the branch cuts along the line segments `(-\infty, -1)` and
`(+1, +\infty)`. In general,
.. math ::
\cos^{-1}(x) = \frac{\pi}{2} + i \log\left(ix + \sqrt{1-x^2} \right)
where the principal-branch log and square root are implied.
"""
asin = r"""
Computes the inverse sine or arcsine of `x`, `\sin^{-1}(x)`.
Since `-1 \le \sin(x) \le 1` for real `x`, the inverse
sine is real-valued only for `-1 \le x \le 1`.
On this interval, it is defined to be a monotonically increasing
function assuming values between `-\pi/2` and `\pi/2`.
Basic values are::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> asin(-1)
-1.570796326794896619231322
>>> asin(0)
0.0
>>> asin(1)
1.570796326794896619231322
>>> nprint(chop(taylor(asin, 0, 6)))
[0.0, 1.0, 0.0, 0.166667, 0.0, 0.075, 0.0]
:func:`~mpmath.asin` is defined so as to be a proper inverse function of
`\sin(\theta)` for `-\pi/2 < \theta < \pi/2`.
We have `\sin(\sin^{-1}(x)) = x` for all `x`, but
`\sin^{-1}(\sin(x)) = x` only for `-\pi/2 < \Re[x] < \pi/2`::
>>> for x in [1, 10, -1, 1+3j, -2+3j]:
... print("%s %s" % (chop(sin(asin(x))), asin(sin(x))))
...
1.0 1.0
10.0 -0.5752220392306202846120698
-1.0 -1.0
(1.0 + 3.0j) (1.0 + 3.0j)
(-2.0 + 3.0j) (-1.141592653589793238462643 - 3.0j)
The inverse sine has two branch points: `x = \pm 1`. :func:`~mpmath.asin`
places the branch cuts along the line segments `(-\infty, -1)` and
`(+1, +\infty)`. In general,
.. math ::
\sin^{-1}(x) = -i \log\left(ix + \sqrt{1-x^2} \right)
where the principal-branch log and square root are implied.
"""
atan = r"""
Computes the inverse tangent or arctangent of `x`, `\tan^{-1}(x)`.
This is a real-valued function for all real `x`, with range
`(-\pi/2, \pi/2)`.
Basic values are::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> atan(-inf)
-1.570796326794896619231322
>>> atan(-1)
-0.7853981633974483096156609
>>> atan(0)
0.0
>>> atan(1)
0.7853981633974483096156609
>>> atan(inf)
1.570796326794896619231322
>>> nprint(chop(taylor(atan, 0, 6)))
[0.0, 1.0, 0.0, -0.333333, 0.0, 0.2, 0.0]
The inverse tangent is often used to compute angles. However,
the atan2 function is often better for this as it preserves sign
(see :func:`~mpmath.atan2`).
:func:`~mpmath.atan` is defined so as to be a proper inverse function of
`\tan(\theta)` for `-\pi/2 < \theta < \pi/2`.
We have `\tan(\tan^{-1}(x)) = x` for all `x`, but
`\tan^{-1}(\tan(x)) = x` only for `-\pi/2 < \Re[x] < \pi/2`::
>>> mp.dps = 25
>>> for x in [1, 10, -1, 1+3j, -2+3j]:
... print("%s %s" % (tan(atan(x)), atan(tan(x))))
...
1.0 1.0
10.0 0.5752220392306202846120698
-1.0 -1.0
(1.0 + 3.0j) (1.000000000000000000000001 + 3.0j)
(-2.0 + 3.0j) (1.141592653589793238462644 + 3.0j)
The inverse tangent has two branch points: `x = \pm i`. :func:`~mpmath.atan`
places the branch cuts along the line segments `(-i \infty, -i)` and
`(+i, +i \infty)`. In general,
.. math ::
\tan^{-1}(x) = \frac{i}{2}\left(\log(1-ix)-\log(1+ix)\right)
where the principal-branch log is implied.
"""
acot = r"""Computes the inverse cotangent of `x`,
`\mathrm{cot}^{-1}(x) = \tan^{-1}(1/x)`."""
asec = r"""Computes the inverse secant of `x`,
`\mathrm{sec}^{-1}(x) = \cos^{-1}(1/x)`."""
acsc = r"""Computes the inverse cosecant of `x`,
`\mathrm{csc}^{-1}(x) = \sin^{-1}(1/x)`."""
coth = r"""Computes the hyperbolic cotangent of `x`,
`\mathrm{coth}(x) = \frac{\cosh(x)}{\sinh(x)}`.
"""
sech = r"""Computes the hyperbolic secant of `x`,
`\mathrm{sech}(x) = \frac{1}{\cosh(x)}`.
"""
csch = r"""Computes the hyperbolic cosecant of `x`,
`\mathrm{csch}(x) = \frac{1}{\sinh(x)}`.
"""
acosh = r"""Computes the inverse hyperbolic cosine of `x`,
`\mathrm{cosh}^{-1}(x) = \log(x+\sqrt{x+1}\sqrt{x-1})`.
"""
asinh = r"""Computes the inverse hyperbolic sine of `x`,
`\mathrm{sinh}^{-1}(x) = \log(x+\sqrt{1+x^2})`.
"""
atanh = r"""Computes the inverse hyperbolic tangent of `x`,
`\mathrm{tanh}^{-1}(x) = \frac{1}{2}\left(\log(1+x)-\log(1-x)\right)`.
"""
acoth = r"""Computes the inverse hyperbolic cotangent of `x`,
`\mathrm{coth}^{-1}(x) = \tanh^{-1}(1/x)`."""
asech = r"""Computes the inverse hyperbolic secant of `x`,
`\mathrm{sech}^{-1}(x) = \cosh^{-1}(1/x)`."""
acsch = r"""Computes the inverse hyperbolic cosecant of `x`,
`\mathrm{csch}^{-1}(x) = \sinh^{-1}(1/x)`."""
sinpi = r"""
Computes `\sin(\pi x)`, more accurately than the expression
``sin(pi*x)``::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> sinpi(10**10), sin(pi*(10**10))
(0.0, -2.23936276195592e-6)
>>> sinpi(10**10+0.5), sin(pi*(10**10+0.5))
(1.0, 0.999999999998721)
"""
cospi = r"""
Computes `\cos(\pi x)`, more accurately than the expression
``cos(pi*x)``::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> cospi(10**10), cos(pi*(10**10))
(1.0, 0.999999999997493)
>>> cospi(10**10+0.5), cos(pi*(10**10+0.5))
(0.0, 1.59960492420134e-6)
"""
sinc = r"""
``sinc(x)`` computes the unnormalized sinc function, defined as
.. math ::
\mathrm{sinc}(x) = \begin{cases}
\sin(x)/x, & \mbox{if } x \ne 0 \\
1, & \mbox{if } x = 0.
\end{cases}
See :func:`~mpmath.sincpi` for the normalized sinc function.
Simple values and limits include::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> sinc(0)
1.0
>>> sinc(1)
0.841470984807897
>>> sinc(inf)
0.0
The integral of the sinc function is the sine integral Si::
>>> quad(sinc, [0, 1])
0.946083070367183
>>> si(1)
0.946083070367183
"""
sincpi = r"""
``sincpi(x)`` computes the normalized sinc function, defined as
.. math ::
\mathrm{sinc}_{\pi}(x) = \begin{cases}
\sin(\pi x)/(\pi x), & \mbox{if } x \ne 0 \\
1, & \mbox{if } x = 0.
\end{cases}
Equivalently, we have
`\mathrm{sinc}_{\pi}(x) = \mathrm{sinc}(\pi x)`.
The normalization entails that the function integrates
to unity over the entire real line::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> quadosc(sincpi, [-inf, inf], period=2.0)
1.0
Like, :func:`~mpmath.sinpi`, :func:`~mpmath.sincpi` is evaluated accurately
at its roots::
>>> sincpi(10)
0.0
"""
expj = r"""
Convenience function for computing `e^{ix}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> expj(0)
(1.0 + 0.0j)
>>> expj(-1)
(0.5403023058681397174009366 - 0.8414709848078965066525023j)
>>> expj(j)
(0.3678794411714423215955238 + 0.0j)
>>> expj(1+j)
(0.1987661103464129406288032 + 0.3095598756531121984439128j)
"""
expjpi = r"""
Convenience function for computing `e^{i \pi x}`.
Evaluation is accurate near zeros (see also :func:`~mpmath.cospi`,
:func:`~mpmath.sinpi`)::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> expjpi(0)
(1.0 + 0.0j)
>>> expjpi(1)
(-1.0 + 0.0j)
>>> expjpi(0.5)
(0.0 + 1.0j)
>>> expjpi(-1)
(-1.0 + 0.0j)
>>> expjpi(j)
(0.04321391826377224977441774 + 0.0j)
>>> expjpi(1+j)
(-0.04321391826377224977441774 + 0.0j)
"""
floor = r"""
Computes the floor of `x`, `\lfloor x \rfloor`, defined as
the largest integer less than or equal to `x`::
>>> from mpmath import *
>>> mp.pretty = False
>>> floor(3.5)
mpf('3.0')
.. note ::
:func:`~mpmath.floor`, :func:`~mpmath.ceil` and :func:`~mpmath.nint` return a
floating-point number, not a Python ``int``. If `\lfloor x \rfloor` is
too large to be represented exactly at the present working precision,
the result will be rounded, not necessarily in the direction
implied by the mathematical definition of the function.
To avoid rounding, use *prec=0*::
>>> mp.dps = 15
>>> print(int(floor(10**30+1)))
1000000000000000019884624838656
>>> print(int(floor(10**30+1, prec=0)))
1000000000000000000000000000001
The floor function is defined for complex numbers and
acts on the real and imaginary parts separately::
>>> floor(3.25+4.75j)
mpc(real='3.0', imag='4.0')
"""
ceil = r"""
Computes the ceiling of `x`, `\lceil x \rceil`, defined as
the smallest integer greater than or equal to `x`::
>>> from mpmath import *
>>> mp.pretty = False
>>> ceil(3.5)
mpf('4.0')
The ceiling function is defined for complex numbers and
acts on the real and imaginary parts separately::
>>> ceil(3.25+4.75j)
mpc(real='4.0', imag='5.0')
See notes about rounding for :func:`~mpmath.floor`.
"""
nint = r"""
Evaluates the nearest integer function, `\mathrm{nint}(x)`.
This gives the nearest integer to `x`; on a tie, it
gives the nearest even integer::
>>> from mpmath import *
>>> mp.pretty = False
>>> nint(3.2)
mpf('3.0')
>>> nint(3.8)
mpf('4.0')
>>> nint(3.5)
mpf('4.0')
>>> nint(4.5)
mpf('4.0')
The nearest integer function is defined for complex numbers and
acts on the real and imaginary parts separately::
>>> nint(3.25+4.75j)
mpc(real='3.0', imag='5.0')
See notes about rounding for :func:`~mpmath.floor`.
"""
frac = r"""
Gives the fractional part of `x`, defined as
`\mathrm{frac}(x) = x - \lfloor x \rfloor` (see :func:`~mpmath.floor`).
In effect, this computes `x` modulo 1, or `x+n` where
`n \in \mathbb{Z}` is such that `x+n \in [0,1)`::
>>> from mpmath import *
>>> mp.pretty = False
>>> frac(1.25)
mpf('0.25')
>>> frac(3)
mpf('0.0')
>>> frac(-1.25)
mpf('0.75')
For a complex number, the fractional part function applies to
the real and imaginary parts separately::
>>> frac(2.25+3.75j)
mpc(real='0.25', imag='0.75')
Plotted, the fractional part function gives a sawtooth
wave. The Fourier series coefficients have a simple
form::
>>> mp.dps = 15
>>> nprint(fourier(lambda x: frac(x)-0.5, [0,1], 4))
([0.0, 0.0, 0.0, 0.0, 0.0], [0.0, -0.31831, -0.159155, -0.106103, -0.0795775])
>>> nprint([-1/(pi*k) for k in range(1,5)])
[-0.31831, -0.159155, -0.106103, -0.0795775]
.. note::
The fractional part is sometimes defined as a symmetric
function, i.e. returning `-\mathrm{frac}(-x)` if `x < 0`.
This convention is used, for instance, by Mathematica's
``FractionalPart``.
"""
sign = r"""
Returns the sign of `x`, defined as `\mathrm{sign}(x) = x / |x|`
(with the special case `\mathrm{sign}(0) = 0`)::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> sign(10)
mpf('1.0')
>>> sign(-10)
mpf('-1.0')
>>> sign(0)
mpf('0.0')
Note that the sign function is also defined for complex numbers,
for which it gives the projection onto the unit circle::
>>> mp.dps = 15; mp.pretty = True
>>> sign(1+j)
(0.707106781186547 + 0.707106781186547j)
"""
arg = r"""
Computes the complex argument (phase) of `x`, defined as the
signed angle between the positive real axis and `x` in the
complex plane::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> arg(3)
0.0
>>> arg(3+3j)
0.785398163397448
>>> arg(3j)
1.5707963267949
>>> arg(-3)
3.14159265358979
>>> arg(-3j)
-1.5707963267949
The angle is defined to satisfy `-\pi < \arg(x) \le \pi` and
with the sign convention that a nonnegative imaginary part
results in a nonnegative argument.
The value returned by :func:`~mpmath.arg` is an ``mpf`` instance.
"""
fabs = r"""
Returns the absolute value of `x`, `|x|`. Unlike :func:`abs`,
:func:`~mpmath.fabs` converts non-mpmath numbers (such as ``int``)
into mpmath numbers::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> fabs(3)
mpf('3.0')
>>> fabs(-3)
mpf('3.0')
>>> fabs(3+4j)
mpf('5.0')
"""
re = r"""
Returns the real part of `x`, `\Re(x)`. Unlike ``x.real``,
:func:`~mpmath.re` converts `x` to a mpmath number::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> re(3)
mpf('3.0')
>>> re(-1+4j)
mpf('-1.0')
"""
im = r"""
Returns the imaginary part of `x`, `\Im(x)`. Unlike ``x.imag``,
:func:`~mpmath.im` converts `x` to a mpmath number::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> im(3)
mpf('0.0')
>>> im(-1+4j)
mpf('4.0')
"""
conj = r"""
Returns the complex conjugate of `x`, `\overline{x}`. Unlike
``x.conjugate()``, :func:`~mpmath.im` converts `x` to a mpmath number::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> conj(3)
mpf('3.0')
>>> conj(-1+4j)
mpc(real='-1.0', imag='-4.0')
"""
polar = r"""
Returns the polar representation of the complex number `z`
as a pair `(r, \phi)` such that `z = r e^{i \phi}`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> polar(-2)
(2.0, 3.14159265358979)
>>> polar(3-4j)
(5.0, -0.927295218001612)
"""
rect = r"""
Returns the complex number represented by polar
coordinates `(r, \phi)`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> chop(rect(2, pi))
-2.0
>>> rect(sqrt(2), -pi/4)
(1.0 - 1.0j)
"""
expm1 = r"""
Computes `e^x - 1`, accurately for small `x`.
Unlike the expression ``exp(x) - 1``, ``expm1(x)`` does not suffer from
potentially catastrophic cancellation::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> exp(1e-10)-1; print(expm1(1e-10))
1.00000008274037e-10
1.00000000005e-10
>>> exp(1e-20)-1; print(expm1(1e-20))
0.0
1.0e-20
>>> 1/(exp(1e-20)-1)
Traceback (most recent call last):
...
ZeroDivisionError
>>> 1/expm1(1e-20)
1.0e+20
Evaluation works for extremely tiny values::
>>> expm1(0)
0.0
>>> expm1('1e-10000000')
1.0e-10000000
"""
powm1 = r"""
Computes `x^y - 1`, accurately when `x^y` is very close to 1.
This avoids potentially catastrophic cancellation::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> power(0.99999995, 1e-10) - 1
0.0
>>> powm1(0.99999995, 1e-10)
-5.00000012791934e-18
Powers exactly equal to 1, and only those powers, yield 0 exactly::
>>> powm1(-j, 4)
(0.0 + 0.0j)
>>> powm1(3, 0)
0.0
>>> powm1(fadd(-1, 1e-100, exact=True), 4)
-4.0e-100
Evaluation works for extremely tiny `y`::
>>> powm1(2, '1e-100000')
6.93147180559945e-100001
>>> powm1(j, '1e-1000')
(-1.23370055013617e-2000 + 1.5707963267949e-1000j)
"""
root = r"""
``root(z, n, k=0)`` computes an `n`-th root of `z`, i.e. returns a number
`r` that (up to possible approximation error) satisfies `r^n = z`.
(``nthroot`` is available as an alias for ``root``.)
Every complex number `z \ne 0` has `n` distinct `n`-th roots, which are
equidistant points on a circle with radius `|z|^{1/n}`, centered around the
origin. A specific root may be selected using the optional index
`k`. The roots are indexed counterclockwise, starting with `k = 0` for the root
closest to the positive real half-axis.
The `k = 0` root is the so-called principal `n`-th root, often denoted by
`\sqrt[n]{z}` or `z^{1/n}`, and also given by `\exp(\log(z) / n)`. If `z` is
a positive real number, the principal root is just the unique positive
`n`-th root of `z`. Under some circumstances, non-principal real roots exist:
for positive real `z`, `n` even, there is a negative root given by `k = n/2`;
for negative real `z`, `n` odd, there is a negative root given by `k = (n-1)/2`.
To obtain all roots with a simple expression, use
``[root(z,n,k) for k in range(n)]``.
An important special case, ``root(1, n, k)`` returns the `k`-th `n`-th root of
unity, `\zeta_k = e^{2 \pi i k / n}`. Alternatively, :func:`~mpmath.unitroots`
provides a slightly more convenient way to obtain the roots of unity,
including the option to compute only the primitive roots of unity.
Both `k` and `n` should be integers; `k` outside of ``range(n)`` will be
reduced modulo `n`. If `n` is negative, `x^{-1/n} = 1/x^{1/n}` (or
the equivalent reciprocal for a non-principal root with `k \ne 0`) is computed.
:func:`~mpmath.root` is implemented to use Newton's method for small
`n`. At high precision, this makes `x^{1/n}` not much more
expensive than the regular exponentiation, `x^n`. For very large
`n`, :func:`~mpmath.nthroot` falls back to use the exponential function.
**Examples**
:func:`~mpmath.nthroot`/:func:`~mpmath.root` is faster and more accurate than raising to a
floating-point fraction::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = False
>>> 16807 ** (mpf(1)/5)
mpf('7.0000000000000009')
>>> root(16807, 5)
mpf('7.0')
>>> nthroot(16807, 5) # Alias
mpf('7.0')
A high-precision root::
>>> mp.dps = 50; mp.pretty = True
>>> nthroot(10, 5)
1.584893192461113485202101373391507013269442133825
>>> nthroot(10, 5) ** 5
10.0
Computing principal and non-principal square and cube roots::
>>> mp.dps = 15
>>> root(10, 2)
3.16227766016838
>>> root(10, 2, 1)
-3.16227766016838
>>> root(-10, 3)
(1.07721734501594 + 1.86579517236206j)
>>> root(-10, 3, 1)
-2.15443469003188
>>> root(-10, 3, 2)
(1.07721734501594 - 1.86579517236206j)
All the 7th roots of a complex number::
>>> for r in [root(3+4j, 7, k) for k in range(7)]:
... print("%s %s" % (r, r**7))
...
(1.24747270589553 + 0.166227124177353j) (3.0 + 4.0j)
(0.647824911301003 + 1.07895435170559j) (3.0 + 4.0j)
(-0.439648254723098 + 1.17920694574172j) (3.0 + 4.0j)
(-1.19605731775069 + 0.391492658196305j) (3.0 + 4.0j)
(-1.05181082538903 - 0.691023585965793j) (3.0 + 4.0j)
(-0.115529328478668 - 1.25318497558335j) (3.0 + 4.0j)
(0.907748109144957 - 0.871672518271819j) (3.0 + 4.0j)
Cube roots of unity::
>>> for k in range(3): print(root(1, 3, k))
...
1.0
(-0.5 + 0.866025403784439j)
(-0.5 - 0.866025403784439j)
Some exact high order roots::
>>> root(75**210, 105)
5625.0
>>> root(1, 128, 96)
(0.0 - 1.0j)
>>> root(4**128, 128, 96)
(0.0 - 4.0j)
"""
unitroots = r"""
``unitroots(n)`` returns `\zeta_0, \zeta_1, \ldots, \zeta_{n-1}`,
all the distinct `n`-th roots of unity, as a list. If the option
*primitive=True* is passed, only the primitive roots are returned.
Every `n`-th root of unity satisfies `(\zeta_k)^n = 1`. There are `n` distinct
roots for each `n` (`\zeta_k` and `\zeta_j` are the same when
`k = j \pmod n`), which form a regular polygon with vertices on the unit
circle. They are ordered counterclockwise with increasing `k`, starting
with `\zeta_0 = 1`.
**Examples**
The roots of unity up to `n = 4`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> nprint(unitroots(1))
[1.0]
>>> nprint(unitroots(2))
[1.0, -1.0]
>>> nprint(unitroots(3))
[1.0, (-0.5 + 0.866025j), (-0.5 - 0.866025j)]
>>> nprint(unitroots(4))
[1.0, (0.0 + 1.0j), -1.0, (0.0 - 1.0j)]
Roots of unity form a geometric series that sums to 0::
>>> mp.dps = 50
>>> chop(fsum(unitroots(25)))
0.0
Primitive roots up to `n = 4`::
>>> mp.dps = 15
>>> nprint(unitroots(1, primitive=True))
[1.0]
>>> nprint(unitroots(2, primitive=True))
[-1.0]
>>> nprint(unitroots(3, primitive=True))
[(-0.5 + 0.866025j), (-0.5 - 0.866025j)]
>>> nprint(unitroots(4, primitive=True))
[(0.0 + 1.0j), (0.0 - 1.0j)]
There are only four primitive 12th roots::
>>> nprint(unitroots(12, primitive=True))
[(0.866025 + 0.5j), (-0.866025 + 0.5j), (-0.866025 - 0.5j), (0.866025 - 0.5j)]
The `n`-th roots of unity form a group, the cyclic group of order `n`.
Any primitive root `r` is a generator for this group, meaning that
`r^0, r^1, \ldots, r^{n-1}` gives the whole set of unit roots (in
some permuted order)::
>>> for r in unitroots(6): print(r)
...
1.0
(0.5 + 0.866025403784439j)
(-0.5 + 0.866025403784439j)
-1.0
(-0.5 - 0.866025403784439j)
(0.5 - 0.866025403784439j)
>>> r = unitroots(6, primitive=True)[1]
>>> for k in range(6): print(chop(r**k))
...
1.0
(0.5 - 0.866025403784439j)
(-0.5 - 0.866025403784439j)
-1.0
(-0.5 + 0.866025403784438j)
(0.5 + 0.866025403784438j)
The number of primitive roots equals the Euler totient function `\phi(n)`::
>>> [len(unitroots(n, primitive=True)) for n in range(1,20)]
[1, 1, 2, 2, 4, 2, 6, 4, 6, 4, 10, 4, 12, 6, 8, 8, 16, 6, 18]
"""
log = r"""
Computes the base-`b` logarithm of `x`, `\log_b(x)`. If `b` is
unspecified, :func:`~mpmath.log` computes the natural (base `e`) logarithm
and is equivalent to :func:`~mpmath.ln`. In general, the base `b` logarithm
is defined in terms of the natural logarithm as
`\log_b(x) = \ln(x)/\ln(b)`.
By convention, we take `\log(0) = -\infty`.
The natural logarithm is real if `x > 0` and complex if `x < 0` or if
`x` is complex. The principal branch of the complex logarithm is
used, meaning that `\Im(\ln(x)) = -\pi < \arg(x) \le \pi`.
**Examples**
Some basic values and limits::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> log(1)
0.0
>>> log(2)
0.693147180559945
>>> log(1000,10)
3.0
>>> log(4, 16)
0.5
>>> log(j)
(0.0 + 1.5707963267949j)
>>> log(-1)
(0.0 + 3.14159265358979j)
>>> log(0)
-inf
>>> log(inf)
+inf
The natural logarithm is the antiderivative of `1/x`::
>>> quad(lambda x: 1/x, [1, 5])
1.6094379124341
>>> log(5)
1.6094379124341
>>> diff(log, 10)
0.1
The Taylor series expansion of the natural logarithm around
`x = 1` has coefficients `(-1)^{n+1}/n`::
>>> nprint(taylor(log, 1, 7))
[0.0, 1.0, -0.5, 0.333333, -0.25, 0.2, -0.166667, 0.142857]
:func:`~mpmath.log` supports arbitrary precision evaluation::
>>> mp.dps = 50
>>> log(pi)
1.1447298858494001741434273513530587116472948129153
>>> log(pi, pi**3)
0.33333333333333333333333333333333333333333333333333
>>> mp.dps = 25
>>> log(3+4j)
(1.609437912434100374600759 + 0.9272952180016122324285125j)
"""
log10 = r"""
Computes the base-10 logarithm of `x`, `\log_{10}(x)`. ``log10(x)``
is equivalent to ``log(x, 10)``.
"""
fmod = r"""
Converts `x` and `y` to mpmath numbers and returns `x \mod y`.
For mpmath numbers, this is equivalent to ``x % y``.
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> fmod(100, pi)
2.61062773871641
You can use :func:`~mpmath.fmod` to compute fractional parts of numbers::
>>> fmod(10.25, 1)
0.25
"""
radians = r"""
Converts the degree angle `x` to radians::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> radians(60)
1.0471975511966
"""
degrees = r"""
Converts the radian angle `x` to a degree angle::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> degrees(pi/3)
60.0
"""
atan2 = r"""
Computes the two-argument arctangent, `\mathrm{atan2}(y, x)`,
giving the signed angle between the positive `x`-axis and the
point `(x, y)` in the 2D plane. This function is defined for
real `x` and `y` only.
The two-argument arctangent essentially computes
`\mathrm{atan}(y/x)`, but accounts for the signs of both
`x` and `y` to give the angle for the correct quadrant. The
following examples illustrate the difference::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> atan2(1,1), atan(1/1.)
(0.785398163397448, 0.785398163397448)
>>> atan2(1,-1), atan(1/-1.)
(2.35619449019234, -0.785398163397448)
>>> atan2(-1,1), atan(-1/1.)
(-0.785398163397448, -0.785398163397448)
>>> atan2(-1,-1), atan(-1/-1.)
(-2.35619449019234, 0.785398163397448)
The angle convention is the same as that used for the complex
argument; see :func:`~mpmath.arg`.
"""
fibonacci = r"""
``fibonacci(n)`` computes the `n`-th Fibonacci number, `F(n)`. The
Fibonacci numbers are defined by the recurrence `F(n) = F(n-1) + F(n-2)`
with the initial values `F(0) = 0`, `F(1) = 1`. :func:`~mpmath.fibonacci`
extends this definition to arbitrary real and complex arguments
using the formula
.. math ::
F(z) = \frac{\phi^z - \cos(\pi z) \phi^{-z}}{\sqrt 5}
where `\phi` is the golden ratio. :func:`~mpmath.fibonacci` also uses this
continuous formula to compute `F(n)` for extremely large `n`, where
calculating the exact integer would be wasteful.
For convenience, :func:`~mpmath.fib` is available as an alias for
:func:`~mpmath.fibonacci`.
**Basic examples**
Some small Fibonacci numbers are::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for i in range(10):
... print(fibonacci(i))
...
0.0
1.0
1.0
2.0
3.0
5.0
8.0
13.0
21.0
34.0
>>> fibonacci(50)
12586269025.0
The recurrence for `F(n)` extends backwards to negative `n`::
>>> for i in range(10):
... print(fibonacci(-i))
...
0.0
1.0
-1.0
2.0
-3.0
5.0
-8.0
13.0
-21.0
34.0
Large Fibonacci numbers will be computed approximately unless
the precision is set high enough::
>>> fib(200)
2.8057117299251e+41
>>> mp.dps = 45
>>> fib(200)
280571172992510140037611932413038677189525.0
:func:`~mpmath.fibonacci` can compute approximate Fibonacci numbers
of stupendous size::
>>> mp.dps = 15
>>> fibonacci(10**25)
3.49052338550226e+2089876402499787337692720
**Real and complex arguments**
The extended Fibonacci function is an analytic function. The
property `F(z) = F(z-1) + F(z-2)` holds for arbitrary `z`::
>>> mp.dps = 15
>>> fib(pi)
2.1170270579161
>>> fib(pi-1) + fib(pi-2)
2.1170270579161
>>> fib(3+4j)
(-5248.51130728372 - 14195.962288353j)
>>> fib(2+4j) + fib(1+4j)
(-5248.51130728372 - 14195.962288353j)
The Fibonacci function has infinitely many roots on the
negative half-real axis. The first root is at 0, the second is
close to -0.18, and then there are infinitely many roots that
asymptotically approach `-n+1/2`::
>>> findroot(fib, -0.2)
-0.183802359692956
>>> findroot(fib, -2)
-1.57077646820395
>>> findroot(fib, -17)
-16.4999999596115
>>> findroot(fib, -24)
-23.5000000000479
**Mathematical relationships**
For large `n`, `F(n+1)/F(n)` approaches the golden ratio::
>>> mp.dps = 50
>>> fibonacci(101)/fibonacci(100)
1.6180339887498948482045868343656381177203127439638
>>> +phi
1.6180339887498948482045868343656381177203091798058
The sum of reciprocal Fibonacci numbers converges to an irrational
number for which no closed form expression is known::
>>> mp.dps = 15
>>> nsum(lambda n: 1/fib(n), [1, inf])
3.35988566624318
Amazingly, however, the sum of odd-index reciprocal Fibonacci
numbers can be expressed in terms of a Jacobi theta function::
>>> nsum(lambda n: 1/fib(2*n+1), [0, inf])
1.82451515740692
>>> sqrt(5)*jtheta(2,0,(3-sqrt(5))/2)**2/4
1.82451515740692
Some related sums can be done in closed form::
>>> nsum(lambda k: 1/(1+fib(2*k+1)), [0, inf])
1.11803398874989
>>> phi - 0.5
1.11803398874989
>>> f = lambda k:(-1)**(k+1) / sum(fib(n)**2 for n in range(1,int(k+1)))
>>> nsum(f, [1, inf])
0.618033988749895
>>> phi-1
0.618033988749895
**References**
1. http://mathworld.wolfram.com/FibonacciNumber.html
"""
altzeta = r"""
Gives the Dirichlet eta function, `\eta(s)`, also known as the
alternating zeta function. This function is defined in analogy
with the Riemann zeta function as providing the sum of the
alternating series
.. math ::
\eta(s) = \sum_{k=0}^{\infty} \frac{(-1)^k}{k^s}
= 1-\frac{1}{2^s}+\frac{1}{3^s}-\frac{1}{4^s}+\ldots
The eta function, unlike the Riemann zeta function, is an entire
function, having a finite value for all complex `s`. The special case
`\eta(1) = \log(2)` gives the value of the alternating harmonic series.
The alternating zeta function may expressed using the Riemann zeta function
as `\eta(s) = (1 - 2^{1-s}) \zeta(s)`. It can also be expressed
in terms of the Hurwitz zeta function, for example using
:func:`~mpmath.dirichlet` (see documentation for that function).
**Examples**
Some special values are::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> altzeta(1)
0.693147180559945
>>> altzeta(0)
0.5
>>> altzeta(-1)
0.25
>>> altzeta(-2)
0.0
An example of a sum that can be computed more accurately and
efficiently via :func:`~mpmath.altzeta` than via numerical summation::
>>> sum(-(-1)**n / n**2.5 for n in range(1, 100))
0.86720495150398402
>>> altzeta(2.5)
0.867199889012184
At positive even integers, the Dirichlet eta function
evaluates to a rational multiple of a power of `\pi`::
>>> altzeta(2)
0.822467033424113
>>> pi**2/12
0.822467033424113
Like the Riemann zeta function, `\eta(s)`, approaches 1
as `s` approaches positive infinity, although it does
so from below rather than from above::
>>> altzeta(30)
0.999999999068682
>>> altzeta(inf)
1.0
>>> mp.pretty = False
>>> altzeta(1000, rounding='d')
mpf('0.99999999999999989')
>>> altzeta(1000, rounding='u')
mpf('1.0')
**References**
1. http://mathworld.wolfram.com/DirichletEtaFunction.html
2. http://en.wikipedia.org/wiki/Dirichlet_eta_function
"""
factorial = r"""
Computes the factorial, `x!`. For integers `n \ge 0`, we have
`n! = 1 \cdot 2 \cdots (n-1) \cdot n` and more generally the factorial
is defined for real or complex `x` by `x! = \Gamma(x+1)`.
**Examples**
Basic values and limits::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for k in range(6):
... print("%s %s" % (k, fac(k)))
...
0 1.0
1 1.0
2 2.0
3 6.0
4 24.0
5 120.0
>>> fac(inf)
+inf
>>> fac(0.5), sqrt(pi)/2
(0.886226925452758, 0.886226925452758)
For large positive `x`, `x!` can be approximated by
Stirling's formula::
>>> x = 10**10
>>> fac(x)
2.32579620567308e+95657055186
>>> sqrt(2*pi*x)*(x/e)**x
2.32579597597705e+95657055186
:func:`~mpmath.fac` supports evaluation for astronomically large values::
>>> fac(10**30)
6.22311232304258e+29565705518096748172348871081098
Reciprocal factorials appear in the Taylor series of the
exponential function (among many other contexts)::
>>> nsum(lambda k: 1/fac(k), [0, inf]), exp(1)
(2.71828182845905, 2.71828182845905)
>>> nsum(lambda k: pi**k/fac(k), [0, inf]), exp(pi)
(23.1406926327793, 23.1406926327793)
"""
gamma = r"""
Computes the gamma function, `\Gamma(x)`. The gamma function is a
shifted version of the ordinary factorial, satisfying
`\Gamma(n) = (n-1)!` for integers `n > 0`. More generally, it
is defined by
.. math ::
\Gamma(x) = \int_0^{\infty} t^{x-1} e^{-t}\, dt
for any real or complex `x` with `\Re(x) > 0` and for `\Re(x) < 0`
by analytic continuation.
**Examples**
Basic values and limits::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for k in range(1, 6):
... print("%s %s" % (k, gamma(k)))
...
1 1.0
2 1.0
3 2.0
4 6.0
5 24.0
>>> gamma(inf)
+inf
>>> gamma(0)
Traceback (most recent call last):
...
ValueError: gamma function pole
The gamma function of a half-integer is a rational multiple of
`\sqrt{\pi}`::
>>> gamma(0.5), sqrt(pi)
(1.77245385090552, 1.77245385090552)
>>> gamma(1.5), sqrt(pi)/2
(0.886226925452758, 0.886226925452758)
We can check the integral definition::
>>> gamma(3.5)
3.32335097044784
>>> quad(lambda t: t**2.5*exp(-t), [0,inf])
3.32335097044784
:func:`~mpmath.gamma` supports arbitrary-precision evaluation and
complex arguments::
>>> mp.dps = 50
>>> gamma(sqrt(3))
0.91510229697308632046045539308226554038315280564184
>>> mp.dps = 25
>>> gamma(2j)
(0.009902440080927490985955066 - 0.07595200133501806872408048j)
Arguments can also be large. Note that the gamma function grows
very quickly::
>>> mp.dps = 15
>>> gamma(10**20)
1.9328495143101e+1956570551809674817225
"""
psi = r"""
Gives the polygamma function of order `m` of `z`, `\psi^{(m)}(z)`.
Special cases are known as the *digamma function* (`\psi^{(0)}(z)`),
the *trigamma function* (`\psi^{(1)}(z)`), etc. The polygamma
functions are defined as the logarithmic derivatives of the gamma
function:
.. math ::
\psi^{(m)}(z) = \left(\frac{d}{dz}\right)^{m+1} \log \Gamma(z)
In particular, `\psi^{(0)}(z) = \Gamma'(z)/\Gamma(z)`. In the
present implementation of :func:`~mpmath.psi`, the order `m` must be a
nonnegative integer, while the argument `z` may be an arbitrary
complex number (with exception for the polygamma function's poles
at `z = 0, -1, -2, \ldots`).
**Examples**
For various rational arguments, the polygamma function reduces to
a combination of standard mathematical constants::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> psi(0, 1), -euler
(-0.5772156649015328606065121, -0.5772156649015328606065121)
>>> psi(1, '1/4'), pi**2+8*catalan
(17.19732915450711073927132, 17.19732915450711073927132)
>>> psi(2, '1/2'), -14*apery
(-16.82879664423431999559633, -16.82879664423431999559633)
The polygamma functions are derivatives of each other::
>>> diff(lambda x: psi(3, x), pi), psi(4, pi)
(-0.1105749312578862734526952, -0.1105749312578862734526952)
>>> quad(lambda x: psi(4, x), [2, 3]), psi(3,3)-psi(3,2)
(-0.375, -0.375)
The digamma function diverges logarithmically as `z \to \infty`,
while higher orders tend to zero::
>>> psi(0,inf), psi(1,inf), psi(2,inf)
(+inf, 0.0, 0.0)
Evaluation for a complex argument::
>>> psi(2, -1-2j)
(0.03902435405364952654838445 + 0.1574325240413029954685366j)
Evaluation is supported for large orders `m` and/or large
arguments `z`::
>>> psi(3, 10**100)
2.0e-300
>>> psi(250, 10**30+10**20*j)
(-1.293142504363642687204865e-7010 + 3.232856260909107391513108e-7018j)
**Application to infinite series**
Any infinite series where the summand is a rational function of
the index `k` can be evaluated in closed form in terms of polygamma
functions of the roots and poles of the summand::
>>> a = sqrt(2)
>>> b = sqrt(3)
>>> nsum(lambda k: 1/((k+a)**2*(k+b)), [0, inf])
0.4049668927517857061917531
>>> (psi(0,a)-psi(0,b)-a*psi(1,a)+b*psi(1,a))/(a-b)**2
0.4049668927517857061917531
This follows from the series representation (`m > 0`)
.. math ::
\psi^{(m)}(z) = (-1)^{m+1} m! \sum_{k=0}^{\infty}
\frac{1}{(z+k)^{m+1}}.
Since the roots of a polynomial may be complex, it is sometimes
necessary to use the complex polygamma function to evaluate
an entirely real-valued sum::
>>> nsum(lambda k: 1/(k**2-2*k+3), [0, inf])
1.694361433907061256154665
>>> nprint(polyroots([1,-2,3]))
[(1.0 - 1.41421j), (1.0 + 1.41421j)]
>>> r1 = 1-sqrt(2)*j
>>> r2 = r1.conjugate()
>>> (psi(0,-r2)-psi(0,-r1))/(r1-r2)
(1.694361433907061256154665 + 0.0j)
"""
digamma = r"""
Shortcut for ``psi(0,z)``.
"""
harmonic = r"""
If `n` is an integer, ``harmonic(n)`` gives a floating-point
approximation of the `n`-th harmonic number `H(n)`, defined as
.. math ::
H(n) = 1 + \frac{1}{2} + \frac{1}{3} + \ldots + \frac{1}{n}
The first few harmonic numbers are::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(8):
... print("%s %s" % (n, harmonic(n)))
...
0 0.0
1 1.0
2 1.5
3 1.83333333333333
4 2.08333333333333
5 2.28333333333333
6 2.45
7 2.59285714285714
The infinite harmonic series `1 + 1/2 + 1/3 + \ldots` diverges::
>>> harmonic(inf)
+inf
:func:`~mpmath.harmonic` is evaluated using the digamma function rather
than by summing the harmonic series term by term. It can therefore
be computed quickly for arbitrarily large `n`, and even for
nonintegral arguments::
>>> harmonic(10**100)
230.835724964306
>>> harmonic(0.5)
0.613705638880109
>>> harmonic(3+4j)
(2.24757548223494 + 0.850502209186044j)
:func:`~mpmath.harmonic` supports arbitrary precision evaluation::
>>> mp.dps = 50
>>> harmonic(11)
3.0198773448773448773448773448773448773448773448773
>>> harmonic(pi)
1.8727388590273302654363491032336134987519132374152
The harmonic series diverges, but at a glacial pace. It is possible
to calculate the exact number of terms required before the sum
exceeds a given amount, say 100::
>>> mp.dps = 50
>>> v = 10**findroot(lambda x: harmonic(10**x) - 100, 10)
>>> v
15092688622113788323693563264538101449859496.864101
>>> v = int(ceil(v))
>>> print(v)
15092688622113788323693563264538101449859497
>>> harmonic(v-1)
99.999999999999999999999999999999999999999999942747
>>> harmonic(v)
100.000000000000000000000000000000000000000000009
"""
bernoulli = r"""
Computes the nth Bernoulli number, `B_n`, for any integer `n \ge 0`.
The Bernoulli numbers are rational numbers, but this function
returns a floating-point approximation. To obtain an exact
fraction, use :func:`~mpmath.bernfrac` instead.
**Examples**
Numerical values of the first few Bernoulli numbers::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(15):
... print("%s %s" % (n, bernoulli(n)))
...
0 1.0
1 -0.5
2 0.166666666666667
3 0.0
4 -0.0333333333333333
5 0.0
6 0.0238095238095238
7 0.0
8 -0.0333333333333333
9 0.0
10 0.0757575757575758
11 0.0
12 -0.253113553113553
13 0.0
14 1.16666666666667
Bernoulli numbers can be approximated with arbitrary precision::
>>> mp.dps = 50
>>> bernoulli(100)
-2.8382249570693706959264156336481764738284680928013e+78
Arbitrarily large `n` are supported::
>>> mp.dps = 15
>>> bernoulli(10**20 + 2)
3.09136296657021e+1876752564973863312327
The Bernoulli numbers are related to the Riemann zeta function
at integer arguments::
>>> -bernoulli(8) * (2*pi)**8 / (2*fac(8))
1.00407735619794
>>> zeta(8)
1.00407735619794
**Algorithm**
For small `n` (`n < 3000`) :func:`~mpmath.bernoulli` uses a recurrence
formula due to Ramanujan. All results in this range are cached,
so sequential computation of small Bernoulli numbers is
guaranteed to be fast.
For larger `n`, `B_n` is evaluated in terms of the Riemann zeta
function.
"""
stieltjes = r"""
For a nonnegative integer `n`, ``stieltjes(n)`` computes the
`n`-th Stieltjes constant `\gamma_n`, defined as the
`n`-th coefficient in the Laurent series expansion of the
Riemann zeta function around the pole at `s = 1`. That is,
we have:
.. math ::
\zeta(s) = \frac{1}{s-1} \sum_{n=0}^{\infty}
\frac{(-1)^n}{n!} \gamma_n (s-1)^n
More generally, ``stieltjes(n, a)`` gives the corresponding
coefficient `\gamma_n(a)` for the Hurwitz zeta function
`\zeta(s,a)` (with `\gamma_n = \gamma_n(1)`).
**Examples**
The zeroth Stieltjes constant is just Euler's constant `\gamma`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> stieltjes(0)
0.577215664901533
Some more values are::
>>> stieltjes(1)
-0.0728158454836767
>>> stieltjes(10)
0.000205332814909065
>>> stieltjes(30)
0.00355772885557316
>>> stieltjes(1000)
-1.57095384420474e+486
>>> stieltjes(2000)
2.680424678918e+1109
>>> stieltjes(1, 2.5)
-0.23747539175716
An alternative way to compute `\gamma_1`::
>>> diff(extradps(15)(lambda x: 1/(x-1) - zeta(x)), 1)
-0.0728158454836767
:func:`~mpmath.stieltjes` supports arbitrary precision evaluation::
>>> mp.dps = 50
>>> stieltjes(2)
-0.0096903631928723184845303860352125293590658061013408
**Algorithm**
:func:`~mpmath.stieltjes` numerically evaluates the integral in
the following representation due to Ainsworth, Howell and
Coffey [1], [2]:
.. math ::
\gamma_n(a) = \frac{\log^n a}{2a} - \frac{\log^{n+1}(a)}{n+1} +
\frac{2}{a} \Re \int_0^{\infty}
\frac{(x/a-i)\log^n(a-ix)}{(1+x^2/a^2)(e^{2\pi x}-1)} dx.
For some reference values with `a = 1`, see e.g. [4].
**References**
1. O. R. Ainsworth & L. W. Howell, "An integral representation of
the generalized Euler-Mascheroni constants", NASA Technical
Paper 2456 (1985),
http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/19850014994_1985014994.pdf
2. M. W. Coffey, "The Stieltjes constants, their relation to the
`\eta_j` coefficients, and representation of the Hurwitz
zeta function", arXiv:0706.0343v1 http://arxiv.org/abs/0706.0343
3. http://mathworld.wolfram.com/StieltjesConstants.html
4. http://pi.lacim.uqam.ca/piDATA/stieltjesgamma.txt
"""
gammaprod = r"""
Given iterables `a` and `b`, ``gammaprod(a, b)`` computes the
product / quotient of gamma functions:
.. math ::
\frac{\Gamma(a_0) \Gamma(a_1) \cdots \Gamma(a_p)}
{\Gamma(b_0) \Gamma(b_1) \cdots \Gamma(b_q)}
Unlike direct calls to :func:`~mpmath.gamma`, :func:`~mpmath.gammaprod` considers
the entire product as a limit and evaluates this limit properly if
any of the numerator or denominator arguments are nonpositive
integers such that poles of the gamma function are encountered.
That is, :func:`~mpmath.gammaprod` evaluates
.. math ::
\lim_{\epsilon \to 0}
\frac{\Gamma(a_0+\epsilon) \Gamma(a_1+\epsilon) \cdots
\Gamma(a_p+\epsilon)}
{\Gamma(b_0+\epsilon) \Gamma(b_1+\epsilon) \cdots
\Gamma(b_q+\epsilon)}
In particular:
* If there are equally many poles in the numerator and the
denominator, the limit is a rational number times the remaining,
regular part of the product.
* If there are more poles in the numerator, :func:`~mpmath.gammaprod`
returns ``+inf``.
* If there are more poles in the denominator, :func:`~mpmath.gammaprod`
returns 0.
**Examples**
The reciprocal gamma function `1/\Gamma(x)` evaluated at `x = 0`::
>>> from mpmath import *
>>> mp.dps = 15
>>> gammaprod([], [0])
0.0
A limit::
>>> gammaprod([-4], [-3])
-0.25
>>> limit(lambda x: gamma(x-1)/gamma(x), -3, direction=1)
-0.25
>>> limit(lambda x: gamma(x-1)/gamma(x), -3, direction=-1)
-0.25
"""
beta = r"""
Computes the beta function,
`B(x,y) = \Gamma(x) \Gamma(y) / \Gamma(x+y)`.
The beta function is also commonly defined by the integral
representation
.. math ::
B(x,y) = \int_0^1 t^{x-1} (1-t)^{y-1} \, dt
**Examples**
For integer and half-integer arguments where all three gamma
functions are finite, the beta function becomes either rational
number or a rational multiple of `\pi`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> beta(5, 2)
0.0333333333333333
>>> beta(1.5, 2)
0.266666666666667
>>> 16*beta(2.5, 1.5)
3.14159265358979
Where appropriate, :func:`~mpmath.beta` evaluates limits. A pole
of the beta function is taken to result in ``+inf``::
>>> beta(-0.5, 0.5)
0.0
>>> beta(-3, 3)
-0.333333333333333
>>> beta(-2, 3)
+inf
>>> beta(inf, 1)
0.0
>>> beta(inf, 0)
nan
:func:`~mpmath.beta` supports complex numbers and arbitrary precision
evaluation::
>>> beta(1, 2+j)
(0.4 - 0.2j)
>>> mp.dps = 25
>>> beta(j,0.5)
(1.079424249270925780135675 - 1.410032405664160838288752j)
>>> mp.dps = 50
>>> beta(pi, e)
0.037890298781212201348153837138927165984170287886464
Various integrals can be computed by means of the
beta function::
>>> mp.dps = 15
>>> quad(lambda t: t**2.5*(1-t)**2, [0, 1])
0.0230880230880231
>>> beta(3.5, 3)
0.0230880230880231
>>> quad(lambda t: sin(t)**4 * sqrt(cos(t)), [0, pi/2])
0.319504062596158
>>> beta(2.5, 0.75)/2
0.319504062596158
"""
betainc = r"""
``betainc(a, b, x1=0, x2=1, regularized=False)`` gives the generalized
incomplete beta function,
.. math ::
I_{x_1}^{x_2}(a,b) = \int_{x_1}^{x_2} t^{a-1} (1-t)^{b-1} dt.
When `x_1 = 0, x_2 = 1`, this reduces to the ordinary (complete)
beta function `B(a,b)`; see :func:`~mpmath.beta`.
With the keyword argument ``regularized=True``, :func:`~mpmath.betainc`
computes the regularized incomplete beta function
`I_{x_1}^{x_2}(a,b) / B(a,b)`. This is the cumulative distribution of the
beta distribution with parameters `a`, `b`.
.. note :
Implementations of the incomplete beta function in some other
software uses a different argument order. For example, Mathematica uses the
reversed argument order ``Beta[x1,x2,a,b]``. For the equivalent of SciPy's
three-argument incomplete beta integral (implicitly with `x1 = 0`), use
``betainc(a,b,0,x2,regularized=True)``.
**Examples**
Verifying that :func:`~mpmath.betainc` computes the integral in the
definition::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> x,y,a,b = 3, 4, 0, 6
>>> betainc(x, y, a, b)
-4010.4
>>> quad(lambda t: t**(x-1) * (1-t)**(y-1), [a, b])
-4010.4
The arguments may be arbitrary complex numbers::
>>> betainc(0.75, 1-4j, 0, 2+3j)
(0.2241657956955709603655887 + 0.3619619242700451992411724j)
With regularization::
>>> betainc(1, 2, 0, 0.25, regularized=True)
0.4375
>>> betainc(pi, e, 0, 1, regularized=True) # Complete
1.0
The beta integral satisfies some simple argument transformation
symmetries::
>>> mp.dps = 15
>>> betainc(2,3,4,5), -betainc(2,3,5,4), betainc(3,2,1-5,1-4)
(56.0833333333333, 56.0833333333333, 56.0833333333333)
The beta integral can often be evaluated analytically. For integer and
rational arguments, the incomplete beta function typically reduces to a
simple algebraic-logarithmic expression::
>>> mp.dps = 25
>>> identify(chop(betainc(0, 0, 3, 4)))
'-(log((9/8)))'
>>> identify(betainc(2, 3, 4, 5))
'(673/12)'
>>> identify(betainc(1.5, 1, 1, 2))
'((-12+sqrt(1152))/18)'
"""
binomial = r"""
Computes the binomial coefficient
.. math ::
{n \choose k} = \frac{n!}{k!(n-k)!}.
The binomial coefficient gives the number of ways that `k` items
can be chosen from a set of `n` items. More generally, the binomial
coefficient is a well-defined function of arbitrary real or
complex `n` and `k`, via the gamma function.
**Examples**
Generate Pascal's triangle::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(5):
... nprint([binomial(n,k) for k in range(n+1)])
...
[1.0]
[1.0, 1.0]
[1.0, 2.0, 1.0]
[1.0, 3.0, 3.0, 1.0]
[1.0, 4.0, 6.0, 4.0, 1.0]
There is 1 way to select 0 items from the empty set, and 0 ways to
select 1 item from the empty set::
>>> binomial(0, 0)
1.0
>>> binomial(0, 1)
0.0
:func:`~mpmath.binomial` supports large arguments::
>>> binomial(10**20, 10**20-5)
8.33333333333333e+97
>>> binomial(10**20, 10**10)
2.60784095465201e+104342944813
Nonintegral binomial coefficients find use in series
expansions::
>>> nprint(taylor(lambda x: (1+x)**0.25, 0, 4))
[1.0, 0.25, -0.09375, 0.0546875, -0.0375977]
>>> nprint([binomial(0.25, k) for k in range(5)])
[1.0, 0.25, -0.09375, 0.0546875, -0.0375977]
An integral representation::
>>> n, k = 5, 3
>>> f = lambda t: exp(-j*k*t)*(1+exp(j*t))**n
>>> chop(quad(f, [-pi,pi])/(2*pi))
10.0
>>> binomial(n,k)
10.0
"""
rf = r"""
Computes the rising factorial or Pochhammer symbol,
.. math ::
x^{(n)} = x (x+1) \cdots (x+n-1) = \frac{\Gamma(x+n)}{\Gamma(x)}
where the rightmost expression is valid for nonintegral `n`.
**Examples**
For integral `n`, the rising factorial is a polynomial::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(5):
... nprint(taylor(lambda x: rf(x,n), 0, n))
...
[1.0]
[0.0, 1.0]
[0.0, 1.0, 1.0]
[0.0, 2.0, 3.0, 1.0]
[0.0, 6.0, 11.0, 6.0, 1.0]
Evaluation is supported for arbitrary arguments::
>>> rf(2+3j, 5.5)
(-7202.03920483347 - 3777.58810701527j)
"""
ff = r"""
Computes the falling factorial,
.. math ::
(x)_n = x (x-1) \cdots (x-n+1) = \frac{\Gamma(x+1)}{\Gamma(x-n+1)}
where the rightmost expression is valid for nonintegral `n`.
**Examples**
For integral `n`, the falling factorial is a polynomial::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(5):
... nprint(taylor(lambda x: ff(x,n), 0, n))
...
[1.0]
[0.0, 1.0]
[0.0, -1.0, 1.0]
[0.0, 2.0, -3.0, 1.0]
[0.0, -6.0, 11.0, -6.0, 1.0]
Evaluation is supported for arbitrary arguments::
>>> ff(2+3j, 5.5)
(-720.41085888203 + 316.101124983878j)
"""
fac2 = r"""
Computes the double factorial `x!!`, defined for integers
`x > 0` by
.. math ::
x!! = \begin{cases}
1 \cdot 3 \cdots (x-2) \cdot x & x \;\mathrm{odd} \\
2 \cdot 4 \cdots (x-2) \cdot x & x \;\mathrm{even}
\end{cases}
and more generally by [1]
.. math ::
x!! = 2^{x/2} \left(\frac{\pi}{2}\right)^{(\cos(\pi x)-1)/4}
\Gamma\left(\frac{x}{2}+1\right).
**Examples**
The integer sequence of double factorials begins::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> nprint([fac2(n) for n in range(10)])
[1.0, 1.0, 2.0, 3.0, 8.0, 15.0, 48.0, 105.0, 384.0, 945.0]
For large `x`, double factorials follow a Stirling-like asymptotic
approximation::
>>> x = mpf(10000)
>>> fac2(x)
5.97272691416282e+17830
>>> sqrt(pi)*x**((x+1)/2)*exp(-x/2)
5.97262736954392e+17830
The recurrence formula `x!! = x (x-2)!!` can be reversed to
define the double factorial of negative odd integers (but
not negative even integers)::
>>> fac2(-1), fac2(-3), fac2(-5), fac2(-7)
(1.0, -1.0, 0.333333333333333, -0.0666666666666667)
>>> fac2(-2)
Traceback (most recent call last):
...
ValueError: gamma function pole
With the exception of the poles at negative even integers,
:func:`~mpmath.fac2` supports evaluation for arbitrary complex arguments.
The recurrence formula is valid generally::
>>> fac2(pi+2j)
(-1.3697207890154e-12 + 3.93665300979176e-12j)
>>> (pi+2j)*fac2(pi-2+2j)
(-1.3697207890154e-12 + 3.93665300979176e-12j)
Double factorials should not be confused with nested factorials,
which are immensely larger::
>>> fac(fac(20))
5.13805976125208e+43675043585825292774
>>> fac2(20)
3715891200.0
Double factorials appear, among other things, in series expansions
of Gaussian functions and the error function. Infinite series
include::
>>> nsum(lambda k: 1/fac2(k), [0, inf])
3.05940740534258
>>> sqrt(e)*(1+sqrt(pi/2)*erf(sqrt(2)/2))
3.05940740534258
>>> nsum(lambda k: 2**k/fac2(2*k-1), [1, inf])
4.06015693855741
>>> e * erf(1) * sqrt(pi)
4.06015693855741
A beautiful Ramanujan sum::
>>> nsum(lambda k: (-1)**k*(fac2(2*k-1)/fac2(2*k))**3, [0,inf])
0.90917279454693
>>> (gamma('9/8')/gamma('5/4')/gamma('7/8'))**2
0.90917279454693
**References**
1. http://functions.wolfram.com/GammaBetaErf/Factorial2/27/01/0002/
2. http://mathworld.wolfram.com/DoubleFactorial.html
"""
hyper = r"""
Evaluates the generalized hypergeometric function
.. math ::
\,_pF_q(a_1,\ldots,a_p; b_1,\ldots,b_q; z) =
\sum_{n=0}^\infty \frac{(a_1)_n (a_2)_n \ldots (a_p)_n}
{(b_1)_n(b_2)_n\ldots(b_q)_n} \frac{z^n}{n!}
where `(x)_n` denotes the rising factorial (see :func:`~mpmath.rf`).
The parameters lists ``a_s`` and ``b_s`` may contain integers,
real numbers, complex numbers, as well as exact fractions given in
the form of tuples `(p, q)`. :func:`~mpmath.hyper` is optimized to handle
integers and fractions more efficiently than arbitrary
floating-point parameters (since rational parameters are by
far the most common).
**Examples**
Verifying that :func:`~mpmath.hyper` gives the sum in the definition, by
comparison with :func:`~mpmath.nsum`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> a,b,c,d = 2,3,4,5
>>> x = 0.25
>>> hyper([a,b],[c,d],x)
1.078903941164934876086237
>>> fn = lambda n: rf(a,n)*rf(b,n)/rf(c,n)/rf(d,n)*x**n/fac(n)
>>> nsum(fn, [0, inf])
1.078903941164934876086237
The parameters can be any combination of integers, fractions,
floats and complex numbers::
>>> a, b, c, d, e = 1, (-1,2), pi, 3+4j, (2,3)
>>> x = 0.2j
>>> hyper([a,b],[c,d,e],x)
(0.9923571616434024810831887 - 0.005753848733883879742993122j)
>>> b, e = -0.5, mpf(2)/3
>>> fn = lambda n: rf(a,n)*rf(b,n)/rf(c,n)/rf(d,n)/rf(e,n)*x**n/fac(n)
>>> nsum(fn, [0, inf])
(0.9923571616434024810831887 - 0.005753848733883879742993122j)
The `\,_0F_0` and `\,_1F_0` series are just elementary functions::
>>> a, z = sqrt(2), +pi
>>> hyper([],[],z)
23.14069263277926900572909
>>> exp(z)
23.14069263277926900572909
>>> hyper([a],[],z)
(-0.09069132879922920160334114 + 0.3283224323946162083579656j)
>>> (1-z)**(-a)
(-0.09069132879922920160334114 + 0.3283224323946162083579656j)
If any `a_k` coefficient is a nonpositive integer, the series terminates
into a finite polynomial::
>>> hyper([1,1,1,-3],[2,5],1)
0.7904761904761904761904762
>>> identify(_)
'(83/105)'
If any `b_k` is a nonpositive integer, the function is undefined (unless the
series terminates before the division by zero occurs)::
>>> hyper([1,1,1,-3],[-2,5],1)
Traceback (most recent call last):
...
ZeroDivisionError: pole in hypergeometric series
>>> hyper([1,1,1,-1],[-2,5],1)
1.1
Except for polynomial cases, the radius of convergence `R` of the hypergeometric
series is either `R = \infty` (if `p \le q`), `R = 1` (if `p = q+1`), or
`R = 0` (if `p > q+1`).
The analytic continuations of the functions with `p = q+1`, i.e. `\,_2F_1`,
`\,_3F_2`, `\,_4F_3`, etc, are all implemented and therefore these functions
can be evaluated for `|z| \ge 1`. The shortcuts :func:`~mpmath.hyp2f1`, :func:`~mpmath.hyp3f2`
are available to handle the most common cases (see their documentation),
but functions of higher degree are also supported via :func:`~mpmath.hyper`::
>>> hyper([1,2,3,4], [5,6,7], 1) # 4F3 at finite-valued branch point
1.141783505526870731311423
>>> hyper([4,5,6,7], [1,2,3], 1) # 4F3 at pole
+inf
>>> hyper([1,2,3,4,5], [6,7,8,9], 10) # 5F4
(1.543998916527972259717257 - 0.5876309929580408028816365j)
>>> hyper([1,2,3,4,5,6], [7,8,9,10,11], 1j) # 6F5
(0.9996565821853579063502466 + 0.0129721075905630604445669j)
Near `z = 1` with noninteger parameters::
>>> hyper(['1/3',1,'3/2',2], ['1/5','11/6','41/8'], 1)
2.219433352235586121250027
>>> hyper(['1/3',1,'3/2',2], ['1/5','11/6','5/4'], 1)
+inf
>>> eps1 = extradps(6)(lambda: 1 - mpf('1e-6'))()
>>> hyper(['1/3',1,'3/2',2], ['1/5','11/6','5/4'], eps1)
2923978034.412973409330956
Please note that, as currently implemented, evaluation of `\,_pF_{p-1}`
with `p \ge 3` may be slow or inaccurate when `|z-1|` is small,
for some parameter values.
When `p > q+1`, ``hyper`` computes the (iterated) Borel sum of the divergent
series. For `\,_2F_0` the Borel sum has an analytic solution and can be
computed efficiently (see :func:`~mpmath.hyp2f0`). For higher degrees, the functions
is evaluated first by attempting to sum it directly as an asymptotic
series (this only works for tiny `|z|`), and then by evaluating the Borel
regularized sum using numerical integration. Except for
special parameter combinations, this can be extremely slow.
>>> hyper([1,1], [], 0.5) # regularization of 2F0
(1.340965419580146562086448 + 0.8503366631752726568782447j)
>>> hyper([1,1,1,1], [1], 0.5) # regularization of 4F1
(1.108287213689475145830699 + 0.5327107430640678181200491j)
With the following magnitude of argument, the asymptotic series for `\,_3F_1`
gives only a few digits. Using Borel summation, ``hyper`` can produce
a value with full accuracy::
>>> mp.dps = 15
>>> hyper([2,0.5,4], [5.25], '0.08', force_series=True)
Traceback (most recent call last):
...
NoConvergence: Hypergeometric series converges too slowly. Try increasing maxterms.
>>> hyper([2,0.5,4], [5.25], '0.08', asymp_tol=1e-4)
1.0725535790737
>>> hyper([2,0.5,4], [5.25], '0.08')
(1.07269542893559 + 5.54668863216891e-5j)
>>> hyper([2,0.5,4], [5.25], '-0.08', asymp_tol=1e-4)
0.946344925484879
>>> hyper([2,0.5,4], [5.25], '-0.08')
0.946312503737771
>>> mp.dps = 25
>>> hyper([2,0.5,4], [5.25], '-0.08')
0.9463125037377662296700858
Note that with the positive `z` value, there is a complex part in the
correct result, which falls below the tolerance of the asymptotic series.
"""
hypercomb = r"""
Computes a weighted combination of hypergeometric functions
.. math ::
\sum_{r=1}^N \left[ \prod_{k=1}^{l_r} {w_{r,k}}^{c_{r,k}}
\frac{\prod_{k=1}^{m_r} \Gamma(\alpha_{r,k})}{\prod_{k=1}^{n_r}
\Gamma(\beta_{r,k})}
\,_{p_r}F_{q_r}(a_{r,1},\ldots,a_{r,p}; b_{r,1},
\ldots, b_{r,q}; z_r)\right].
Typically the parameters are linear combinations of a small set of base
parameters; :func:`~mpmath.hypercomb` permits computing a correct value in
the case that some of the `\alpha`, `\beta`, `b` turn out to be
nonpositive integers, or if division by zero occurs for some `w^c`,
assuming that there are opposing singularities that cancel out.
The limit is computed by evaluating the function with the base
parameters perturbed, at a higher working precision.
The first argument should be a function that takes the perturbable
base parameters ``params`` as input and returns `N` tuples
``(w, c, alpha, beta, a, b, z)``, where the coefficients ``w``, ``c``,
gamma factors ``alpha``, ``beta``, and hypergeometric coefficients
``a``, ``b`` each should be lists of numbers, and ``z`` should be a single
number.
**Examples**
The following evaluates
.. math ::
(a-1) \frac{\Gamma(a-3)}{\Gamma(a-4)} \,_1F_1(a,a-1,z) = e^z(a-4)(a+z-1)
with `a=1, z=3`. There is a zero factor, two gamma function poles, and
the 1F1 function is singular; all singularities cancel out to give a finite
value::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> hypercomb(lambda a: [([a-1],[1],[a-3],[a-4],[a],[a-1],3)], [1])
-180.769832308689
>>> -9*exp(3)
-180.769832308689
"""
hyp0f1 = r"""
Gives the hypergeometric function `\,_0F_1`, sometimes known as the
confluent limit function, defined as
.. math ::
\,_0F_1(a,z) = \sum_{k=0}^{\infty} \frac{1}{(a)_k} \frac{z^k}{k!}.
This function satisfies the differential equation `z f''(z) + a f'(z) = f(z)`,
and is related to the Bessel function of the first kind (see :func:`~mpmath.besselj`).
``hyp0f1(a,z)`` is equivalent to ``hyper([],[a],z)``; see documentation for
:func:`~mpmath.hyper` for more information.
**Examples**
Evaluation for arbitrary arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hyp0f1(2, 0.25)
1.130318207984970054415392
>>> hyp0f1((1,2), 1234567)
6.27287187546220705604627e+964
>>> hyp0f1(3+4j, 1000000j)
(3.905169561300910030267132e+606 + 3.807708544441684513934213e+606j)
Evaluation is supported for arbitrarily large values of `z`,
using asymptotic expansions::
>>> hyp0f1(1, 10**50)
2.131705322874965310390701e+8685889638065036553022565
>>> hyp0f1(1, -10**50)
1.115945364792025420300208e-13
Verifying the differential equation::
>>> a = 2.5
>>> f = lambda z: hyp0f1(a,z)
>>> for z in [0, 10, 3+4j]:
... chop(z*diff(f,z,2) + a*diff(f,z) - f(z))
...
0.0
0.0
0.0
"""
hyp1f1 = r"""
Gives the confluent hypergeometric function of the first kind,
.. math ::
\,_1F_1(a,b,z) = \sum_{k=0}^{\infty} \frac{(a)_k}{(b)_k} \frac{z^k}{k!},
also known as Kummer's function and sometimes denoted by `M(a,b,z)`. This
function gives one solution to the confluent (Kummer's) differential equation
.. math ::
z f''(z) + (b-z) f'(z) - af(z) = 0.
A second solution is given by the `U` function; see :func:`~mpmath.hyperu`.
Solutions are also given in an alternate form by the Whittaker
functions (:func:`~mpmath.whitm`, :func:`~mpmath.whitw`).
``hyp1f1(a,b,z)`` is equivalent
to ``hyper([a],[b],z)``; see documentation for :func:`~mpmath.hyper` for more
information.
**Examples**
Evaluation for real and complex values of the argument `z`, with
fixed parameters `a = 2, b = -1/3`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hyp1f1(2, (-1,3), 3.25)
-2815.956856924817275640248
>>> hyp1f1(2, (-1,3), -3.25)
-1.145036502407444445553107
>>> hyp1f1(2, (-1,3), 1000)
-8.021799872770764149793693e+441
>>> hyp1f1(2, (-1,3), -1000)
0.000003131987633006813594535331
>>> hyp1f1(2, (-1,3), 100+100j)
(-3.189190365227034385898282e+48 - 1.106169926814270418999315e+49j)
Parameters may be complex::
>>> hyp1f1(2+3j, -1+j, 10j)
(261.8977905181045142673351 + 160.8930312845682213562172j)
Arbitrarily large values of `z` are supported::
>>> hyp1f1(3, 4, 10**20)
3.890569218254486878220752e+43429448190325182745
>>> hyp1f1(3, 4, -10**20)
6.0e-60
>>> hyp1f1(3, 4, 10**20*j)
(-1.935753855797342532571597e-20 - 2.291911213325184901239155e-20j)
Verifying the differential equation::
>>> a, b = 1.5, 2
>>> f = lambda z: hyp1f1(a,b,z)
>>> for z in [0, -10, 3, 3+4j]:
... chop(z*diff(f,z,2) + (b-z)*diff(f,z) - a*f(z))
...
0.0
0.0
0.0
0.0
An integral representation::
>>> a, b = 1.5, 3
>>> z = 1.5
>>> hyp1f1(a,b,z)
2.269381460919952778587441
>>> g = lambda t: exp(z*t)*t**(a-1)*(1-t)**(b-a-1)
>>> gammaprod([b],[a,b-a])*quad(g, [0,1])
2.269381460919952778587441
"""
hyp1f2 = r"""
Gives the hypergeometric function `\,_1F_2(a_1,a_2;b_1,b_2; z)`.
The call ``hyp1f2(a1,b1,b2,z)`` is equivalent to
``hyper([a1],[b1,b2],z)``.
Evaluation works for complex and arbitrarily large arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> a, b, c = 1.5, (-1,3), 2.25
>>> hyp1f2(a, b, c, 10**20)
-1.159388148811981535941434e+8685889639
>>> hyp1f2(a, b, c, -10**20)
-12.60262607892655945795907
>>> hyp1f2(a, b, c, 10**20*j)
(4.237220401382240876065501e+6141851464 - 2.950930337531768015892987e+6141851464j)
>>> hyp1f2(2+3j, -2j, 0.5j, 10-20j)
(135881.9905586966432662004 - 86681.95885418079535738828j)
"""
hyp2f2 = r"""
Gives the hypergeometric function `\,_2F_2(a_1,a_2;b_1,b_2; z)`.
The call ``hyp2f2(a1,a2,b1,b2,z)`` is equivalent to
``hyper([a1,a2],[b1,b2],z)``.
Evaluation works for complex and arbitrarily large arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> a, b, c, d = 1.5, (-1,3), 2.25, 4
>>> hyp2f2(a, b, c, d, 10**20)
-5.275758229007902299823821e+43429448190325182663
>>> hyp2f2(a, b, c, d, -10**20)
2561445.079983207701073448
>>> hyp2f2(a, b, c, d, 10**20*j)
(2218276.509664121194836667 - 1280722.539991603850462856j)
>>> hyp2f2(2+3j, -2j, 0.5j, 4j, 10-20j)
(80500.68321405666957342788 - 20346.82752982813540993502j)
"""
hyp2f3 = r"""
Gives the hypergeometric function `\,_2F_3(a_1,a_2;b_1,b_2,b_3; z)`.
The call ``hyp2f3(a1,a2,b1,b2,b3,z)`` is equivalent to
``hyper([a1,a2],[b1,b2,b3],z)``.
Evaluation works for arbitrarily large arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> a1,a2,b1,b2,b3 = 1.5, (-1,3), 2.25, 4, (1,5)
>>> hyp2f3(a1,a2,b1,b2,b3,10**20)
-4.169178177065714963568963e+8685889590
>>> hyp2f3(a1,a2,b1,b2,b3,-10**20)
7064472.587757755088178629
>>> hyp2f3(a1,a2,b1,b2,b3,10**20*j)
(-5.163368465314934589818543e+6141851415 + 1.783578125755972803440364e+6141851416j)
>>> hyp2f3(2+3j, -2j, 0.5j, 4j, -1-j, 10-20j)
(-2280.938956687033150740228 + 13620.97336609573659199632j)
>>> hyp2f3(2+3j, -2j, 0.5j, 4j, -1-j, 10000000-20000000j)
(4.849835186175096516193e+3504 - 3.365981529122220091353633e+3504j)
"""
hyp2f1 = r"""
Gives the Gauss hypergeometric function `\,_2F_1` (often simply referred to as
*the* hypergeometric function), defined for `|z| < 1` as
.. math ::
\,_2F_1(a,b,c,z) = \sum_{k=0}^{\infty}
\frac{(a)_k (b)_k}{(c)_k} \frac{z^k}{k!}.
and for `|z| \ge 1` by analytic continuation, with a branch cut on `(1, \infty)`
when necessary.
Special cases of this function include many of the orthogonal polynomials as
well as the incomplete beta function and other functions. Properties of the
Gauss hypergeometric function are documented comprehensively in many references,
for example Abramowitz & Stegun, section 15.
The implementation supports the analytic continuation as well as evaluation
close to the unit circle where `|z| \approx 1`. The syntax ``hyp2f1(a,b,c,z)``
is equivalent to ``hyper([a,b],[c],z)``.
**Examples**
Evaluation with `z` inside, outside and on the unit circle, for
fixed parameters::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hyp2f1(2, (1,2), 4, 0.75)
1.303703703703703703703704
>>> hyp2f1(2, (1,2), 4, -1.75)
0.7431290566046919177853916
>>> hyp2f1(2, (1,2), 4, 1.75)
(1.418075801749271137026239 - 1.114976146679907015775102j)
>>> hyp2f1(2, (1,2), 4, 1)
1.6
>>> hyp2f1(2, (1,2), 4, -1)
0.8235498012182875315037882
>>> hyp2f1(2, (1,2), 4, j)
(0.9144026291433065674259078 + 0.2050415770437884900574923j)
>>> hyp2f1(2, (1,2), 4, 2+j)
(0.9274013540258103029011549 + 0.7455257875808100868984496j)
>>> hyp2f1(2, (1,2), 4, 0.25j)
(0.9931169055799728251931672 + 0.06154836525312066938147793j)
Evaluation with complex parameter values::
>>> hyp2f1(1+j, 0.75, 10j, 1+5j)
(0.8834833319713479923389638 + 0.7053886880648105068343509j)
Evaluation with `z = 1`::
>>> hyp2f1(-2.5, 3.5, 1.5, 1)
0.0
>>> hyp2f1(-2.5, 3, 4, 1)
0.06926406926406926406926407
>>> hyp2f1(2, 3, 4, 1)
+inf
Evaluation for huge arguments::
>>> hyp2f1((-1,3), 1.75, 4, '1e100')
(7.883714220959876246415651e+32 + 1.365499358305579597618785e+33j)
>>> hyp2f1((-1,3), 1.75, 4, '1e1000000')
(7.883714220959876246415651e+333332 + 1.365499358305579597618785e+333333j)
>>> hyp2f1((-1,3), 1.75, 4, '1e1000000j')
(1.365499358305579597618785e+333333 - 7.883714220959876246415651e+333332j)
An integral representation::
>>> a,b,c,z = -0.5, 1, 2.5, 0.25
>>> g = lambda t: t**(b-1) * (1-t)**(c-b-1) * (1-t*z)**(-a)
>>> gammaprod([c],[b,c-b]) * quad(g, [0,1])
0.9480458814362824478852618
>>> hyp2f1(a,b,c,z)
0.9480458814362824478852618
Verifying the hypergeometric differential equation::
>>> f = lambda z: hyp2f1(a,b,c,z)
>>> chop(z*(1-z)*diff(f,z,2) + (c-(a+b+1)*z)*diff(f,z) - a*b*f(z))
0.0
"""
hyp3f2 = r"""
Gives the generalized hypergeometric function `\,_3F_2`, defined for `|z| < 1`
as
.. math ::
\,_3F_2(a_1,a_2,a_3,b_1,b_2,z) = \sum_{k=0}^{\infty}
\frac{(a_1)_k (a_2)_k (a_3)_k}{(b_1)_k (b_2)_k} \frac{z^k}{k!}.
and for `|z| \ge 1` by analytic continuation. The analytic structure of this
function is similar to that of `\,_2F_1`, generally with a singularity at
`z = 1` and a branch cut on `(1, \infty)`.
Evaluation is supported inside, on, and outside
the circle of convergence `|z| = 1`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hyp3f2(1,2,3,4,5,0.25)
1.083533123380934241548707
>>> hyp3f2(1,2+2j,3,4,5,-10+10j)
(0.1574651066006004632914361 - 0.03194209021885226400892963j)
>>> hyp3f2(1,2,3,4,5,-10)
0.3071141169208772603266489
>>> hyp3f2(1,2,3,4,5,10)
(-0.4857045320523947050581423 - 0.5988311440454888436888028j)
>>> hyp3f2(0.25,1,1,2,1.5,1)
1.157370995096772047567631
>>> (8-pi-2*ln2)/3
1.157370995096772047567631
>>> hyp3f2(1+j,0.5j,2,1,-2j,-1)
(1.74518490615029486475959 + 0.1454701525056682297614029j)
>>> hyp3f2(1+j,0.5j,2,1,-2j,sqrt(j))
(0.9829816481834277511138055 - 0.4059040020276937085081127j)
>>> hyp3f2(-3,2,1,-5,4,1)
1.41
>>> hyp3f2(-3,2,1,-5,4,2)
2.12
Evaluation very close to the unit circle::
>>> hyp3f2(1,2,3,4,5,'1.0001')
(1.564877796743282766872279 - 3.76821518787438186031973e-11j)
>>> hyp3f2(1,2,3,4,5,'1+0.0001j')
(1.564747153061671573212831 + 0.0001305757570366084557648482j)
>>> hyp3f2(1,2,3,4,5,'0.9999')
1.564616644881686134983664
>>> hyp3f2(1,2,3,4,5,'-0.9999')
0.7823896253461678060196207
.. note ::
Evaluation for `|z-1|` small can currently be inaccurate or slow
for some parameter combinations.
For various parameter combinations, `\,_3F_2` admits representation in terms
of hypergeometric functions of lower degree, or in terms of
simpler functions::
>>> for a, b, z in [(1,2,-1), (2,0.5,1)]:
... hyp2f1(a,b,a+b+0.5,z)**2
... hyp3f2(2*a,a+b,2*b,a+b+0.5,2*a+2*b,z)
...
0.4246104461966439006086308
0.4246104461966439006086308
7.111111111111111111111111
7.111111111111111111111111
>>> z = 2+3j
>>> hyp3f2(0.5,1,1.5,2,2,z)
(0.7621440939243342419729144 + 0.4249117735058037649915723j)
>>> 4*(pi-2*ellipe(z))/(pi*z)
(0.7621440939243342419729144 + 0.4249117735058037649915723j)
"""
hyperu = r"""
Gives the Tricomi confluent hypergeometric function `U`, also known as
the Kummer or confluent hypergeometric function of the second kind. This
function gives a second linearly independent solution to the confluent
hypergeometric differential equation (the first is provided by `\,_1F_1` --
see :func:`~mpmath.hyp1f1`).
**Examples**
Evaluation for arbitrary complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hyperu(2,3,4)
0.0625
>>> hyperu(0.25, 5, 1000)
0.1779949416140579573763523
>>> hyperu(0.25, 5, -1000)
(0.1256256609322773150118907 - 0.1256256609322773150118907j)
The `U` function may be singular at `z = 0`::
>>> hyperu(1.5, 2, 0)
+inf
>>> hyperu(1.5, -2, 0)
0.1719434921288400112603671
Verifying the differential equation::
>>> a, b = 1.5, 2
>>> f = lambda z: hyperu(a,b,z)
>>> for z in [-10, 3, 3+4j]:
... chop(z*diff(f,z,2) + (b-z)*diff(f,z) - a*f(z))
...
0.0
0.0
0.0
An integral representation::
>>> a,b,z = 2, 3.5, 4.25
>>> hyperu(a,b,z)
0.06674960718150520648014567
>>> quad(lambda t: exp(-z*t)*t**(a-1)*(1+t)**(b-a-1),[0,inf]) / gamma(a)
0.06674960718150520648014567
[1] http://people.math.sfu.ca/~cbm/aands/page_504.htm
"""
hyp2f0 = r"""
Gives the hypergeometric function `\,_2F_0`, defined formally by the
series
.. math ::
\,_2F_0(a,b;;z) = \sum_{n=0}^{\infty} (a)_n (b)_n \frac{z^n}{n!}.
This series usually does not converge. For small enough `z`, it can be viewed
as an asymptotic series that may be summed directly with an appropriate
truncation. When this is not the case, :func:`~mpmath.hyp2f0` gives a regularized sum,
or equivalently, it uses a representation in terms of the
hypergeometric U function [1]. The series also converges when either `a` or `b`
is a nonpositive integer, as it then terminates into a polynomial
after `-a` or `-b` terms.
**Examples**
Evaluation is supported for arbitrary complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hyp2f0((2,3), 1.25, -100)
0.07095851870980052763312791
>>> hyp2f0((2,3), 1.25, 100)
(-0.03254379032170590665041131 + 0.07269254613282301012735797j)
>>> hyp2f0(-0.75, 1-j, 4j)
(-0.3579987031082732264862155 - 3.052951783922142735255881j)
Even with real arguments, the regularized value of 2F0 is often complex-valued,
but the imaginary part decreases exponentially as `z \to 0`. In the following
example, the first call uses complex evaluation while the second has a small
enough `z` to evaluate using the direct series and thus the returned value
is strictly real (this should be taken to indicate that the imaginary
part is less than ``eps``)::
>>> mp.dps = 15
>>> hyp2f0(1.5, 0.5, 0.05)
(1.04166637647907 + 8.34584913683906e-8j)
>>> hyp2f0(1.5, 0.5, 0.0005)
1.00037535207621
The imaginary part can be retrieved by increasing the working precision::
>>> mp.dps = 80
>>> nprint(hyp2f0(1.5, 0.5, 0.009).imag)
1.23828e-46
In the polynomial case (the series terminating), 2F0 can evaluate exactly::
>>> mp.dps = 15
>>> hyp2f0(-6,-6,2)
291793.0
>>> identify(hyp2f0(-2,1,0.25))
'(5/8)'
The coefficients of the polynomials can be recovered using Taylor expansion::
>>> nprint(taylor(lambda x: hyp2f0(-3,0.5,x), 0, 10))
[1.0, -1.5, 2.25, -1.875, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
>>> nprint(taylor(lambda x: hyp2f0(-4,0.5,x), 0, 10))
[1.0, -2.0, 4.5, -7.5, 6.5625, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
[1] http://people.math.sfu.ca/~cbm/aands/page_504.htm
"""
gammainc = r"""
``gammainc(z, a=0, b=inf)`` computes the (generalized) incomplete
gamma function with integration limits `[a, b]`:
.. math ::
\Gamma(z,a,b) = \int_a^b t^{z-1} e^{-t} \, dt
The generalized incomplete gamma function reduces to the
following special cases when one or both endpoints are fixed:
* `\Gamma(z,0,\infty)` is the standard ("complete")
gamma function, `\Gamma(z)` (available directly
as the mpmath function :func:`~mpmath.gamma`)
* `\Gamma(z,a,\infty)` is the "upper" incomplete gamma
function, `\Gamma(z,a)`
* `\Gamma(z,0,b)` is the "lower" incomplete gamma
function, `\gamma(z,b)`.
Of course, we have
`\Gamma(z,0,x) + \Gamma(z,x,\infty) = \Gamma(z)`
for all `z` and `x`.
Note however that some authors reverse the order of the
arguments when defining the lower and upper incomplete
gamma function, so one should be careful to get the correct
definition.
If also given the keyword argument ``regularized=True``,
:func:`~mpmath.gammainc` computes the "regularized" incomplete gamma
function
.. math ::
P(z,a,b) = \frac{\Gamma(z,a,b)}{\Gamma(z)}.
**Examples**
We can compare with numerical quadrature to verify that
:func:`~mpmath.gammainc` computes the integral in the definition::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> gammainc(2+3j, 4, 10)
(0.00977212668627705160602312 - 0.0770637306312989892451977j)
>>> quad(lambda t: t**(2+3j-1) * exp(-t), [4, 10])
(0.00977212668627705160602312 - 0.0770637306312989892451977j)
Argument symmetries follow directly from the integral definition::
>>> gammainc(3, 4, 5) + gammainc(3, 5, 4)
0.0
>>> gammainc(3,0,2) + gammainc(3,2,4); gammainc(3,0,4)
1.523793388892911312363331
1.523793388892911312363331
>>> findroot(lambda z: gammainc(2,z,3), 1)
3.0
Evaluation for arbitrarily large arguments::
>>> gammainc(10, 100)
4.083660630910611272288592e-26
>>> gammainc(10, 10000000000000000)
5.290402449901174752972486e-4342944819032375
>>> gammainc(3+4j, 1000000+1000000j)
(-1.257913707524362408877881e-434284 + 2.556691003883483531962095e-434284j)
Evaluation of a generalized incomplete gamma function automatically chooses
the representation that gives a more accurate result, depending on which
parameter is larger::
>>> gammainc(10000000, 3) - gammainc(10000000, 2) # Bad
0.0
>>> gammainc(10000000, 2, 3) # Good
1.755146243738946045873491e+4771204
>>> gammainc(2, 0, 100000001) - gammainc(2, 0, 100000000) # Bad
0.0
>>> gammainc(2, 100000000, 100000001) # Good
4.078258353474186729184421e-43429441
The incomplete gamma functions satisfy simple recurrence
relations::
>>> mp.dps = 25
>>> z, a = mpf(3.5), mpf(2)
>>> gammainc(z+1, a); z*gammainc(z,a) + a**z*exp(-a)
10.60130296933533459267329
10.60130296933533459267329
>>> gammainc(z+1,0,a); z*gammainc(z,0,a) - a**z*exp(-a)
1.030425427232114336470932
1.030425427232114336470932
Evaluation at integers and poles::
>>> gammainc(-3, -4, -5)
(-0.2214577048967798566234192 + 0.0j)
>>> gammainc(-3, 0, 5)
+inf
If `z` is an integer, the recurrence reduces the incomplete gamma
function to `P(a) \exp(-a) + Q(b) \exp(-b)` where `P` and
`Q` are polynomials::
>>> gammainc(1, 2); exp(-2)
0.1353352832366126918939995
0.1353352832366126918939995
>>> mp.dps = 50
>>> identify(gammainc(6, 1, 2), ['exp(-1)', 'exp(-2)'])
'(326*exp(-1) + (-872)*exp(-2))'
The incomplete gamma functions reduce to functions such as
the exponential integral Ei and the error function for special
arguments::
>>> mp.dps = 25
>>> gammainc(0, 4); -ei(-4)
0.00377935240984890647887486
0.00377935240984890647887486
>>> gammainc(0.5, 0, 2); sqrt(pi)*erf(sqrt(2))
1.691806732945198336509541
1.691806732945198336509541
"""
erf = r"""
Computes the error function, `\mathrm{erf}(x)`. The error
function is the normalized antiderivative of the Gaussian function
`\exp(-t^2)`. More precisely,
.. math::
\mathrm{erf}(x) = \frac{2}{\sqrt \pi} \int_0^x \exp(-t^2) \,dt
**Basic examples**
Simple values and limits include::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> erf(0)
0.0
>>> erf(1)
0.842700792949715
>>> erf(-1)
-0.842700792949715
>>> erf(inf)
1.0
>>> erf(-inf)
-1.0
For large real `x`, `\mathrm{erf}(x)` approaches 1 very
rapidly::
>>> erf(3)
0.999977909503001
>>> erf(5)
0.999999999998463
The error function is an odd function::
>>> nprint(chop(taylor(erf, 0, 5)))
[0.0, 1.12838, 0.0, -0.376126, 0.0, 0.112838]
:func:`~mpmath.erf` implements arbitrary-precision evaluation and
supports complex numbers::
>>> mp.dps = 50
>>> erf(0.5)
0.52049987781304653768274665389196452873645157575796
>>> mp.dps = 25
>>> erf(1+j)
(1.316151281697947644880271 + 0.1904534692378346862841089j)
Evaluation is supported for large arguments::
>>> mp.dps = 25
>>> erf('1e1000')
1.0
>>> erf('-1e1000')
-1.0
>>> erf('1e-1000')
1.128379167095512573896159e-1000
>>> erf('1e7j')
(0.0 + 8.593897639029319267398803e+43429448190317j)
>>> erf('1e7+1e7j')
(0.9999999858172446172631323 + 3.728805278735270407053139e-8j)
**Related functions**
See also :func:`~mpmath.erfc`, which is more accurate for large `x`,
and :func:`~mpmath.erfi` which gives the antiderivative of
`\exp(t^2)`.
The Fresnel integrals :func:`~mpmath.fresnels` and :func:`~mpmath.fresnelc`
are also related to the error function.
"""
erfc = r"""
Computes the complementary error function,
`\mathrm{erfc}(x) = 1-\mathrm{erf}(x)`.
This function avoids cancellation that occurs when naively
computing the complementary error function as ``1-erf(x)``::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> 1 - erf(10)
0.0
>>> erfc(10)
2.08848758376254e-45
:func:`~mpmath.erfc` works accurately even for ludicrously large
arguments::
>>> erfc(10**10)
4.3504398860243e-43429448190325182776
Complex arguments are supported::
>>> erfc(500+50j)
(1.19739830969552e-107492 + 1.46072418957528e-107491j)
"""
erfi = r"""
Computes the imaginary error function, `\mathrm{erfi}(x)`.
The imaginary error function is defined in analogy with the
error function, but with a positive sign in the integrand:
.. math ::
\mathrm{erfi}(x) = \frac{2}{\sqrt \pi} \int_0^x \exp(t^2) \,dt
Whereas the error function rapidly converges to 1 as `x` grows,
the imaginary error function rapidly diverges to infinity.
The functions are related as
`\mathrm{erfi}(x) = -i\,\mathrm{erf}(ix)` for all complex
numbers `x`.
**Examples**
Basic values and limits::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> erfi(0)
0.0
>>> erfi(1)
1.65042575879754
>>> erfi(-1)
-1.65042575879754
>>> erfi(inf)
+inf
>>> erfi(-inf)
-inf
Note the symmetry between erf and erfi::
>>> erfi(3j)
(0.0 + 0.999977909503001j)
>>> erf(3)
0.999977909503001
>>> erf(1+2j)
(-0.536643565778565 - 5.04914370344703j)
>>> erfi(2+1j)
(-5.04914370344703 - 0.536643565778565j)
Large arguments are supported::
>>> erfi(1000)
1.71130938718796e+434291
>>> erfi(10**10)
7.3167287567024e+43429448190325182754
>>> erfi(-10**10)
-7.3167287567024e+43429448190325182754
>>> erfi(1000-500j)
(2.49895233563961e+325717 + 2.6846779342253e+325717j)
>>> erfi(100000j)
(0.0 + 1.0j)
>>> erfi(-100000j)
(0.0 - 1.0j)
"""
erfinv = r"""
Computes the inverse error function, satisfying
.. math ::
\mathrm{erf}(\mathrm{erfinv}(x)) =
\mathrm{erfinv}(\mathrm{erf}(x)) = x.
This function is defined only for `-1 \le x \le 1`.
**Examples**
Special values include::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> erfinv(0)
0.0
>>> erfinv(1)
+inf
>>> erfinv(-1)
-inf
The domain is limited to the standard interval::
>>> erfinv(2)
Traceback (most recent call last):
...
ValueError: erfinv(x) is defined only for -1 <= x <= 1
It is simple to check that :func:`~mpmath.erfinv` computes inverse values of
:func:`~mpmath.erf` as promised::
>>> erf(erfinv(0.75))
0.75
>>> erf(erfinv(-0.995))
-0.995
:func:`~mpmath.erfinv` supports arbitrary-precision evaluation::
>>> mp.dps = 50
>>> x = erf(2)
>>> x
0.99532226501895273416206925636725292861089179704006
>>> erfinv(x)
2.0
A definite integral involving the inverse error function::
>>> mp.dps = 15
>>> quad(erfinv, [0, 1])
0.564189583547756
>>> 1/sqrt(pi)
0.564189583547756
The inverse error function can be used to generate random numbers
with a Gaussian distribution (although this is a relatively
inefficient algorithm)::
>>> nprint([erfinv(2*rand()-1) for n in range(6)]) # doctest: +SKIP
[-0.586747, 1.10233, -0.376796, 0.926037, -0.708142, -0.732012]
"""
npdf = r"""
``npdf(x, mu=0, sigma=1)`` evaluates the probability density
function of a normal distribution with mean value `\mu`
and variance `\sigma^2`.
Elementary properties of the probability distribution can
be verified using numerical integration::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> quad(npdf, [-inf, inf])
1.0
>>> quad(lambda x: npdf(x, 3), [3, inf])
0.5
>>> quad(lambda x: npdf(x, 3, 2), [3, inf])
0.5
See also :func:`~mpmath.ncdf`, which gives the cumulative
distribution.
"""
ncdf = r"""
``ncdf(x, mu=0, sigma=1)`` evaluates the cumulative distribution
function of a normal distribution with mean value `\mu`
and variance `\sigma^2`.
See also :func:`~mpmath.npdf`, which gives the probability density.
Elementary properties include::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> ncdf(pi, mu=pi)
0.5
>>> ncdf(-inf)
0.0
>>> ncdf(+inf)
1.0
The cumulative distribution is the integral of the density
function having identical mu and sigma::
>>> mp.dps = 15
>>> diff(ncdf, 2)
0.053990966513188
>>> npdf(2)
0.053990966513188
>>> diff(lambda x: ncdf(x, 1, 0.5), 0)
0.107981933026376
>>> npdf(0, 1, 0.5)
0.107981933026376
"""
expint = r"""
:func:`~mpmath.expint(n,z)` gives the generalized exponential integral
or En-function,
.. math ::
\mathrm{E}_n(z) = \int_1^{\infty} \frac{e^{-zt}}{t^n} dt,
where `n` and `z` may both be complex numbers. The case with `n = 1` is
also given by :func:`~mpmath.e1`.
**Examples**
Evaluation at real and complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> expint(1, 6.25)
0.0002704758872637179088496194
>>> expint(-3, 2+3j)
(0.00299658467335472929656159 + 0.06100816202125885450319632j)
>>> expint(2+3j, 4-5j)
(0.001803529474663565056945248 - 0.002235061547756185403349091j)
At negative integer values of `n`, `E_n(z)` reduces to a
rational-exponential function::
>>> f = lambda n, z: fac(n)*sum(z**k/fac(k-1) for k in range(1,n+2))/\
... exp(z)/z**(n+2)
>>> n = 3
>>> z = 1/pi
>>> expint(-n,z)
584.2604820613019908668219
>>> f(n,z)
584.2604820613019908668219
>>> n = 5
>>> expint(-n,z)
115366.5762594725451811138
>>> f(n,z)
115366.5762594725451811138
"""
e1 = r"""
Computes the exponential integral `\mathrm{E}_1(z)`, given by
.. math ::
\mathrm{E}_1(z) = \int_z^{\infty} \frac{e^{-t}}{t} dt.
This is equivalent to :func:`~mpmath.expint` with `n = 1`.
**Examples**
Two ways to evaluate this function::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> e1(6.25)
0.0002704758872637179088496194
>>> expint(1,6.25)
0.0002704758872637179088496194
The E1-function is essentially the same as the Ei-function (:func:`~mpmath.ei`)
with negated argument, except for an imaginary branch cut term::
>>> e1(2.5)
0.02491491787026973549562801
>>> -ei(-2.5)
0.02491491787026973549562801
>>> e1(-2.5)
(-7.073765894578600711923552 - 3.141592653589793238462643j)
>>> -ei(2.5)
-7.073765894578600711923552
"""
ei = r"""
Computes the exponential integral or Ei-function, `\mathrm{Ei}(x)`.
The exponential integral is defined as
.. math ::
\mathrm{Ei}(x) = \int_{-\infty\,}^x \frac{e^t}{t} \, dt.
When the integration range includes `t = 0`, the exponential
integral is interpreted as providing the Cauchy principal value.
For real `x`, the Ei-function behaves roughly like
`\mathrm{Ei}(x) \approx \exp(x) + \log(|x|)`.
The Ei-function is related to the more general family of exponential
integral functions denoted by `E_n`, which are available as :func:`~mpmath.expint`.
**Basic examples**
Some basic values and limits are::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> ei(0)
-inf
>>> ei(1)
1.89511781635594
>>> ei(inf)
+inf
>>> ei(-inf)
0.0
For `x < 0`, the defining integral can be evaluated
numerically as a reference::
>>> ei(-4)
-0.00377935240984891
>>> quad(lambda t: exp(t)/t, [-inf, -4])
-0.00377935240984891
:func:`~mpmath.ei` supports complex arguments and arbitrary
precision evaluation::
>>> mp.dps = 50
>>> ei(pi)
10.928374389331410348638445906907535171566338835056
>>> mp.dps = 25
>>> ei(3+4j)
(-4.154091651642689822535359 + 4.294418620024357476985535j)
**Related functions**
The exponential integral is closely related to the logarithmic
integral. See :func:`~mpmath.li` for additional information.
The exponential integral is related to the hyperbolic
and trigonometric integrals (see :func:`~mpmath.chi`, :func:`~mpmath.shi`,
:func:`~mpmath.ci`, :func:`~mpmath.si`) similarly to how the ordinary
exponential function is related to the hyperbolic and
trigonometric functions::
>>> mp.dps = 15
>>> ei(3)
9.93383257062542
>>> chi(3) + shi(3)
9.93383257062542
>>> chop(ci(3j) - j*si(3j) - pi*j/2)
9.93383257062542
Beware that logarithmic corrections, as in the last example
above, are required to obtain the correct branch in general.
For details, see [1].
The exponential integral is also a special case of the
hypergeometric function `\,_2F_2`::
>>> z = 0.6
>>> z*hyper([1,1],[2,2],z) + (ln(z)-ln(1/z))/2 + euler
0.769881289937359
>>> ei(z)
0.769881289937359
**References**
1. Relations between Ei and other functions:
http://functions.wolfram.com/GammaBetaErf/ExpIntegralEi/27/01/
2. Abramowitz & Stegun, section 5:
http://www.math.sfu.ca/~cbm/aands/page_228.htm
3. Asymptotic expansion for Ei:
http://mathworld.wolfram.com/En-Function.html
"""
li = r"""
Computes the logarithmic integral or li-function
`\mathrm{li}(x)`, defined by
.. math ::
\mathrm{li}(x) = \int_0^x \frac{1}{\log t} \, dt
The logarithmic integral has a singularity at `x = 1`.
Alternatively, ``li(x, offset=True)`` computes the offset
logarithmic integral (used in number theory)
.. math ::
\mathrm{Li}(x) = \int_2^x \frac{1}{\log t} \, dt.
These two functions are related via the simple identity
`\mathrm{Li}(x) = \mathrm{li}(x) - \mathrm{li}(2)`.
The logarithmic integral should also not be confused with
the polylogarithm (also denoted by Li), which is implemented
as :func:`~mpmath.polylog`.
**Examples**
Some basic values and limits::
>>> from mpmath import *
>>> mp.dps = 30; mp.pretty = True
>>> li(0)
0.0
>>> li(1)
-inf
>>> li(1)
-inf
>>> li(2)
1.04516378011749278484458888919
>>> findroot(li, 2)
1.45136923488338105028396848589
>>> li(inf)
+inf
>>> li(2, offset=True)
0.0
>>> li(1, offset=True)
-inf
>>> li(0, offset=True)
-1.04516378011749278484458888919
>>> li(10, offset=True)
5.12043572466980515267839286347
The logarithmic integral can be evaluated for arbitrary
complex arguments::
>>> mp.dps = 20
>>> li(3+4j)
(3.1343755504645775265 + 2.6769247817778742392j)
The logarithmic integral is related to the exponential integral::
>>> ei(log(3))
2.1635885946671919729
>>> li(3)
2.1635885946671919729
The logarithmic integral grows like `O(x/\log(x))`::
>>> mp.dps = 15
>>> x = 10**100
>>> x/log(x)
4.34294481903252e+97
>>> li(x)
4.3619719871407e+97
The prime number theorem states that the number of primes less
than `x` is asymptotic to `\mathrm{Li}(x)` (equivalently
`\mathrm{li}(x)`). For example, it is known that there are
exactly 1,925,320,391,606,803,968,923 prime numbers less than
`10^{23}` [1]. The logarithmic integral provides a very
accurate estimate::
>>> li(10**23, offset=True)
1.92532039161405e+21
A definite integral is::
>>> quad(li, [0, 1])
-0.693147180559945
>>> -ln(2)
-0.693147180559945
**References**
1. http://mathworld.wolfram.com/PrimeCountingFunction.html
2. http://mathworld.wolfram.com/LogarithmicIntegral.html
"""
ci = r"""
Computes the cosine integral,
.. math ::
\mathrm{Ci}(x) = -\int_x^{\infty} \frac{\cos t}{t}\,dt
= \gamma + \log x + \int_0^x \frac{\cos t - 1}{t}\,dt
**Examples**
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> ci(0)
-inf
>>> ci(1)
0.3374039229009681346626462
>>> ci(pi)
0.07366791204642548599010096
>>> ci(inf)
0.0
>>> ci(-inf)
(0.0 + 3.141592653589793238462643j)
>>> ci(2+3j)
(1.408292501520849518759125 - 2.983617742029605093121118j)
The cosine integral behaves roughly like the sinc function
(see :func:`~mpmath.sinc`) for large real `x`::
>>> ci(10**10)
-4.875060251748226537857298e-11
>>> sinc(10**10)
-4.875060250875106915277943e-11
>>> chop(limit(ci, inf))
0.0
It has infinitely many roots on the positive real axis::
>>> findroot(ci, 1)
0.6165054856207162337971104
>>> findroot(ci, 2)
3.384180422551186426397851
Evaluation is supported for `z` anywhere in the complex plane::
>>> ci(10**6*(1+j))
(4.449410587611035724984376e+434287 + 9.75744874290013526417059e+434287j)
We can evaluate the defining integral as a reference::
>>> mp.dps = 15
>>> -quadosc(lambda t: cos(t)/t, [5, inf], omega=1)
-0.190029749656644
>>> ci(5)
-0.190029749656644
Some infinite series can be evaluated using the
cosine integral::
>>> nsum(lambda k: (-1)**k/(fac(2*k)*(2*k)), [1,inf])
-0.239811742000565
>>> ci(1) - euler
-0.239811742000565
"""
si = r"""
Computes the sine integral,
.. math ::
\mathrm{Si}(x) = \int_0^x \frac{\sin t}{t}\,dt.
The sine integral is thus the antiderivative of the sinc
function (see :func:`~mpmath.sinc`).
**Examples**
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> si(0)
0.0
>>> si(1)
0.9460830703671830149413533
>>> si(-1)
-0.9460830703671830149413533
>>> si(pi)
1.851937051982466170361053
>>> si(inf)
1.570796326794896619231322
>>> si(-inf)
-1.570796326794896619231322
>>> si(2+3j)
(4.547513889562289219853204 + 1.399196580646054789459839j)
The sine integral approaches `\pi/2` for large real `x`::
>>> si(10**10)
1.570796326707584656968511
>>> pi/2
1.570796326794896619231322
Evaluation is supported for `z` anywhere in the complex plane::
>>> si(10**6*(1+j))
(-9.75744874290013526417059e+434287 + 4.449410587611035724984376e+434287j)
We can evaluate the defining integral as a reference::
>>> mp.dps = 15
>>> quad(sinc, [0, 5])
1.54993124494467
>>> si(5)
1.54993124494467
Some infinite series can be evaluated using the
sine integral::
>>> nsum(lambda k: (-1)**k/(fac(2*k+1)*(2*k+1)), [0,inf])
0.946083070367183
>>> si(1)
0.946083070367183
"""
chi = r"""
Computes the hyperbolic cosine integral, defined
in analogy with the cosine integral (see :func:`~mpmath.ci`) as
.. math ::
\mathrm{Chi}(x) = -\int_x^{\infty} \frac{\cosh t}{t}\,dt
= \gamma + \log x + \int_0^x \frac{\cosh t - 1}{t}\,dt
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> chi(0)
-inf
>>> chi(1)
0.8378669409802082408946786
>>> chi(inf)
+inf
>>> findroot(chi, 0.5)
0.5238225713898644064509583
>>> chi(2+3j)
(-0.1683628683277204662429321 + 2.625115880451325002151688j)
Evaluation is supported for `z` anywhere in the complex plane::
>>> chi(10**6*(1+j))
(4.449410587611035724984376e+434287 - 9.75744874290013526417059e+434287j)
"""
shi = r"""
Computes the hyperbolic sine integral, defined
in analogy with the sine integral (see :func:`~mpmath.si`) as
.. math ::
\mathrm{Shi}(x) = \int_0^x \frac{\sinh t}{t}\,dt.
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> shi(0)
0.0
>>> shi(1)
1.057250875375728514571842
>>> shi(-1)
-1.057250875375728514571842
>>> shi(inf)
+inf
>>> shi(2+3j)
(-0.1931890762719198291678095 + 2.645432555362369624818525j)
Evaluation is supported for `z` anywhere in the complex plane::
>>> shi(10**6*(1+j))
(4.449410587611035724984376e+434287 - 9.75744874290013526417059e+434287j)
"""
fresnels = r"""
Computes the Fresnel sine integral
.. math ::
S(x) = \int_0^x \sin\left(\frac{\pi t^2}{2}\right) \,dt
Note that some sources define this function
without the normalization factor `\pi/2`.
**Examples**
Some basic values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> fresnels(0)
0.0
>>> fresnels(inf)
0.5
>>> fresnels(-inf)
-0.5
>>> fresnels(1)
0.4382591473903547660767567
>>> fresnels(1+2j)
(36.72546488399143842838788 + 15.58775110440458732748279j)
Comparing with the definition::
>>> fresnels(3)
0.4963129989673750360976123
>>> quad(lambda t: sin(pi*t**2/2), [0,3])
0.4963129989673750360976123
"""
fresnelc = r"""
Computes the Fresnel cosine integral
.. math ::
C(x) = \int_0^x \cos\left(\frac{\pi t^2}{2}\right) \,dt
Note that some sources define this function
without the normalization factor `\pi/2`.
**Examples**
Some basic values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> fresnelc(0)
0.0
>>> fresnelc(inf)
0.5
>>> fresnelc(-inf)
-0.5
>>> fresnelc(1)
0.7798934003768228294742064
>>> fresnelc(1+2j)
(16.08787137412548041729489 - 36.22568799288165021578758j)
Comparing with the definition::
>>> fresnelc(3)
0.6057207892976856295561611
>>> quad(lambda t: cos(pi*t**2/2), [0,3])
0.6057207892976856295561611
"""
airyai = r"""
Computes the Airy function `\operatorname{Ai}(z)`, which is
the solution of the Airy differential equation `f''(z) - z f(z) = 0`
with initial conditions
.. math ::
\operatorname{Ai}(0) =
\frac{1}{3^{2/3}\Gamma\left(\frac{2}{3}\right)}
\operatorname{Ai}'(0) =
-\frac{1}{3^{1/3}\Gamma\left(\frac{1}{3}\right)}.
Other common ways of defining the Ai-function include
integrals such as
.. math ::
\operatorname{Ai}(x) = \frac{1}{\pi}
\int_0^{\infty} \cos\left(\frac{1}{3}t^3+xt\right) dt
\qquad x \in \mathbb{R}
\operatorname{Ai}(z) = \frac{\sqrt{3}}{2\pi}
\int_0^{\infty}
\exp\left(-\frac{t^3}{3}-\frac{z^3}{3t^3}\right) dt.
The Ai-function is an entire function with a turning point,
behaving roughly like a slowly decaying sine wave for `z < 0` and
like a rapidly decreasing exponential for `z > 0`.
A second solution of the Airy differential equation
is given by `\operatorname{Bi}(z)` (see :func:`~mpmath.airybi`).
Optionally, with *derivative=alpha*, :func:`airyai` can compute the
`\alpha`-th order fractional derivative with respect to `z`.
For `\alpha = n = 1,2,3,\ldots` this gives the derivative
`\operatorname{Ai}^{(n)}(z)`, and for `\alpha = -n = -1,-2,-3,\ldots`
this gives the `n`-fold iterated integral
.. math ::
f_0(z) = \operatorname{Ai}(z)
f_n(z) = \int_0^z f_{n-1}(t) dt.
The Ai-function has infinitely many zeros, all located along the
negative half of the real axis. They can be computed with
:func:`~mpmath.airyaizero`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/ai.py
.. image :: /modules/mpmath/plots/ai.png
.. literalinclude :: /modules/mpmath/plots/ai_c.py
.. image :: /modules/mpmath/plots/ai_c.png
**Basic examples**
Limits and values include::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> airyai(0); 1/(power(3,'2/3')*gamma('2/3'))
0.3550280538878172392600632
0.3550280538878172392600632
>>> airyai(1)
0.1352924163128814155241474
>>> airyai(-1)
0.5355608832923521187995166
>>> airyai(inf); airyai(-inf)
0.0
0.0
Evaluation is supported for large magnitudes of the argument::
>>> airyai(-100)
0.1767533932395528780908311
>>> airyai(100)
2.634482152088184489550553e-291
>>> airyai(50+50j)
(-5.31790195707456404099817e-68 - 1.163588003770709748720107e-67j)
>>> airyai(-50+50j)
(1.041242537363167632587245e+158 + 3.347525544923600321838281e+157j)
Huge arguments are also fine::
>>> airyai(10**10)
1.162235978298741779953693e-289529654602171
>>> airyai(-10**10)
0.0001736206448152818510510181
>>> w = airyai(10**10*(1+j))
>>> w.real
5.711508683721355528322567e-186339621747698
>>> w.imag
1.867245506962312577848166e-186339621747697
The first root of the Ai-function is::
>>> findroot(airyai, -2)
-2.338107410459767038489197
>>> airyaizero(1)
-2.338107410459767038489197
**Properties and relations**
Verifying the Airy differential equation::
>>> for z in [-3.4, 0, 2.5, 1+2j]:
... chop(airyai(z,2) - z*airyai(z))
...
0.0
0.0
0.0
0.0
The first few terms of the Taylor series expansion around `z = 0`
(every third term is zero)::
>>> nprint(taylor(airyai, 0, 5))
[0.355028, -0.258819, 0.0, 0.0591713, -0.0215683, 0.0]
The Airy functions satisfy the Wronskian relation
`\operatorname{Ai}(z) \operatorname{Bi}'(z) -
\operatorname{Ai}'(z) \operatorname{Bi}(z) = 1/\pi`::
>>> z = -0.5
>>> airyai(z)*airybi(z,1) - airyai(z,1)*airybi(z)
0.3183098861837906715377675
>>> 1/pi
0.3183098861837906715377675
The Airy functions can be expressed in terms of Bessel
functions of order `\pm 1/3`. For `\Re[z] \le 0`, we have::
>>> z = -3
>>> airyai(z)
-0.3788142936776580743472439
>>> y = 2*power(-z,'3/2')/3
>>> (sqrt(-z) * (besselj('1/3',y) + besselj('-1/3',y)))/3
-0.3788142936776580743472439
**Derivatives and integrals**
Derivatives of the Ai-function (directly and using :func:`~mpmath.diff`)::
>>> airyai(-3,1); diff(airyai,-3)
0.3145837692165988136507873
0.3145837692165988136507873
>>> airyai(-3,2); diff(airyai,-3,2)
1.136442881032974223041732
1.136442881032974223041732
>>> airyai(1000,1); diff(airyai,1000)
-2.943133917910336090459748e-9156
-2.943133917910336090459748e-9156
Several derivatives at `z = 0`::
>>> airyai(0,0); airyai(0,1); airyai(0,2)
0.3550280538878172392600632
-0.2588194037928067984051836
0.0
>>> airyai(0,3); airyai(0,4); airyai(0,5)
0.3550280538878172392600632
-0.5176388075856135968103671
0.0
>>> airyai(0,15); airyai(0,16); airyai(0,17)
1292.30211615165475090663
-3188.655054727379756351861
0.0
The integral of the Ai-function::
>>> airyai(3,-1); quad(airyai, [0,3])
0.3299203760070217725002701
0.3299203760070217725002701
>>> airyai(-10,-1); quad(airyai, [0,-10])
-0.765698403134212917425148
-0.765698403134212917425148
Integrals of high or fractional order::
>>> airyai(-2,0.5); differint(airyai,-2,0.5,0)
(0.0 + 0.2453596101351438273844725j)
(0.0 + 0.2453596101351438273844725j)
>>> airyai(-2,-4); differint(airyai,-2,-4,0)
0.2939176441636809580339365
0.2939176441636809580339365
>>> airyai(0,-1); airyai(0,-2); airyai(0,-3)
0.0
0.0
0.0
Integrals of the Ai-function can be evaluated at limit points::
>>> airyai(-1000000,-1); airyai(-inf,-1)
-0.6666843728311539978751512
-0.6666666666666666666666667
>>> airyai(10,-1); airyai(+inf,-1)
0.3333333332991690159427932
0.3333333333333333333333333
>>> airyai(+inf,-2); airyai(+inf,-3)
+inf
+inf
>>> airyai(-1000000,-2); airyai(-inf,-2)
666666.4078472650651209742
+inf
>>> airyai(-1000000,-3); airyai(-inf,-3)
-333333074513.7520264995733
-inf
**References**
1. [DLMF]_ Chapter 9: Airy and Related Functions
2. [WolframFunctions]_ section: Bessel-Type Functions
"""
airybi = r"""
Computes the Airy function `\operatorname{Bi}(z)`, which is
the solution of the Airy differential equation `f''(z) - z f(z) = 0`
with initial conditions
.. math ::
\operatorname{Bi}(0) =
\frac{1}{3^{1/6}\Gamma\left(\frac{2}{3}\right)}
\operatorname{Bi}'(0) =
\frac{3^{1/6}}{\Gamma\left(\frac{1}{3}\right)}.
Like the Ai-function (see :func:`~mpmath.airyai`), the Bi-function
is oscillatory for `z < 0`, but it grows rather than decreases
for `z > 0`.
Optionally, as for :func:`~mpmath.airyai`, derivatives, integrals
and fractional derivatives can be computed with the *derivative*
parameter.
The Bi-function has infinitely many zeros along the negative
half-axis, as well as complex zeros, which can all be computed
with :func:`~mpmath.airybizero`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/bi.py
.. image :: /modules/mpmath/plots/bi.png
.. literalinclude :: /modules/mpmath/plots/bi_c.py
.. image :: /modules/mpmath/plots/bi_c.png
**Basic examples**
Limits and values include::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> airybi(0); 1/(power(3,'1/6')*gamma('2/3'))
0.6149266274460007351509224
0.6149266274460007351509224
>>> airybi(1)
1.207423594952871259436379
>>> airybi(-1)
0.10399738949694461188869
>>> airybi(inf); airybi(-inf)
+inf
0.0
Evaluation is supported for large magnitudes of the argument::
>>> airybi(-100)
0.02427388768016013160566747
>>> airybi(100)
6.041223996670201399005265e+288
>>> airybi(50+50j)
(-5.322076267321435669290334e+63 + 1.478450291165243789749427e+65j)
>>> airybi(-50+50j)
(-3.347525544923600321838281e+157 + 1.041242537363167632587245e+158j)
Huge arguments::
>>> airybi(10**10)
1.369385787943539818688433e+289529654602165
>>> airybi(-10**10)
0.001775656141692932747610973
>>> w = airybi(10**10*(1+j))
>>> w.real
-6.559955931096196875845858e+186339621747689
>>> w.imag
-6.822462726981357180929024e+186339621747690
The first real root of the Bi-function is::
>>> findroot(airybi, -1); airybizero(1)
-1.17371322270912792491998
-1.17371322270912792491998
**Properties and relations**
Verifying the Airy differential equation::
>>> for z in [-3.4, 0, 2.5, 1+2j]:
... chop(airybi(z,2) - z*airybi(z))
...
0.0
0.0
0.0
0.0
The first few terms of the Taylor series expansion around `z = 0`
(every third term is zero)::
>>> nprint(taylor(airybi, 0, 5))
[0.614927, 0.448288, 0.0, 0.102488, 0.0373574, 0.0]
The Airy functions can be expressed in terms of Bessel
functions of order `\pm 1/3`. For `\Re[z] \le 0`, we have::
>>> z = -3
>>> airybi(z)
-0.1982896263749265432206449
>>> p = 2*power(-z,'3/2')/3
>>> sqrt(-mpf(z)/3)*(besselj('-1/3',p) - besselj('1/3',p))
-0.1982896263749265432206449
**Derivatives and integrals**
Derivatives of the Bi-function (directly and using :func:`~mpmath.diff`)::
>>> airybi(-3,1); diff(airybi,-3)
-0.675611222685258537668032
-0.675611222685258537668032
>>> airybi(-3,2); diff(airybi,-3,2)
0.5948688791247796296619346
0.5948688791247796296619346
>>> airybi(1000,1); diff(airybi,1000)
1.710055114624614989262335e+9156
1.710055114624614989262335e+9156
Several derivatives at `z = 0`::
>>> airybi(0,0); airybi(0,1); airybi(0,2)
0.6149266274460007351509224
0.4482883573538263579148237
0.0
>>> airybi(0,3); airybi(0,4); airybi(0,5)
0.6149266274460007351509224
0.8965767147076527158296474
0.0
>>> airybi(0,15); airybi(0,16); airybi(0,17)
2238.332923903442675949357
5522.912562599140729510628
0.0
The integral of the Bi-function::
>>> airybi(3,-1); quad(airybi, [0,3])
10.06200303130620056316655
10.06200303130620056316655
>>> airybi(-10,-1); quad(airybi, [0,-10])
-0.01504042480614002045135483
-0.01504042480614002045135483
Integrals of high or fractional order::
>>> airybi(-2,0.5); differint(airybi, -2, 0.5, 0)
(0.0 + 0.5019859055341699223453257j)
(0.0 + 0.5019859055341699223453257j)
>>> airybi(-2,-4); differint(airybi,-2,-4,0)
0.2809314599922447252139092
0.2809314599922447252139092
>>> airybi(0,-1); airybi(0,-2); airybi(0,-3)
0.0
0.0
0.0
Integrals of the Bi-function can be evaluated at limit points::
>>> airybi(-1000000,-1); airybi(-inf,-1)
0.000002191261128063434047966873
0.0
>>> airybi(10,-1); airybi(+inf,-1)
147809803.1074067161675853
+inf
>>> airybi(+inf,-2); airybi(+inf,-3)
+inf
+inf
>>> airybi(-1000000,-2); airybi(-inf,-2)
0.4482883750599908479851085
0.4482883573538263579148237
>>> gamma('2/3')*power(3,'2/3')/(2*pi)
0.4482883573538263579148237
>>> airybi(-100000,-3); airybi(-inf,-3)
-44828.52827206932872493133
-inf
>>> airybi(-100000,-4); airybi(-inf,-4)
2241411040.437759489540248
+inf
"""
airyaizero = r"""
Gives the `k`-th zero of the Airy Ai-function,
i.e. the `k`-th number `a_k` ordered by magnitude for which
`\operatorname{Ai}(a_k) = 0`.
Optionally, with *derivative=1*, the corresponding
zero `a'_k` of the derivative function, i.e.
`\operatorname{Ai}'(a'_k) = 0`, is computed.
**Examples**
Some values of `a_k`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> airyaizero(1)
-2.338107410459767038489197
>>> airyaizero(2)
-4.087949444130970616636989
>>> airyaizero(3)
-5.520559828095551059129856
>>> airyaizero(1000)
-281.0315196125215528353364
Some values of `a'_k`::
>>> airyaizero(1,1)
-1.018792971647471089017325
>>> airyaizero(2,1)
-3.248197582179836537875424
>>> airyaizero(3,1)
-4.820099211178735639400616
>>> airyaizero(1000,1)
-280.9378080358935070607097
Verification::
>>> chop(airyai(airyaizero(1)))
0.0
>>> chop(airyai(airyaizero(1,1),1))
0.0
"""
airybizero = r"""
With *complex=False*, gives the `k`-th real zero of the Airy Bi-function,
i.e. the `k`-th number `b_k` ordered by magnitude for which
`\operatorname{Bi}(b_k) = 0`.
With *complex=True*, gives the `k`-th complex zero in the upper
half plane `\beta_k`. Also the conjugate `\overline{\beta_k}`
is a zero.
Optionally, with *derivative=1*, the corresponding
zero `b'_k` or `\beta'_k` of the derivative function, i.e.
`\operatorname{Bi}'(b'_k) = 0` or `\operatorname{Bi}'(\beta'_k) = 0`,
is computed.
**Examples**
Some values of `b_k`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> airybizero(1)
-1.17371322270912792491998
>>> airybizero(2)
-3.271093302836352715680228
>>> airybizero(3)
-4.830737841662015932667709
>>> airybizero(1000)
-280.9378112034152401578834
Some values of `b_k`::
>>> airybizero(1,1)
-2.294439682614123246622459
>>> airybizero(2,1)
-4.073155089071828215552369
>>> airybizero(3,1)
-5.512395729663599496259593
>>> airybizero(1000,1)
-281.0315164471118527161362
Some values of `\beta_k`::
>>> airybizero(1,complex=True)
(0.9775448867316206859469927 + 2.141290706038744575749139j)
>>> airybizero(2,complex=True)
(1.896775013895336346627217 + 3.627291764358919410440499j)
>>> airybizero(3,complex=True)
(2.633157739354946595708019 + 4.855468179979844983174628j)
>>> airybizero(1000,complex=True)
(140.4978560578493018899793 + 243.3907724215792121244867j)
Some values of `\beta'_k`::
>>> airybizero(1,1,complex=True)
(0.2149470745374305676088329 + 1.100600143302797880647194j)
>>> airybizero(2,1,complex=True)
(1.458168309223507392028211 + 2.912249367458445419235083j)
>>> airybizero(3,1,complex=True)
(2.273760763013482299792362 + 4.254528549217097862167015j)
>>> airybizero(1000,1,complex=True)
(140.4509972835270559730423 + 243.3096175398562811896208j)
Verification::
>>> chop(airybi(airybizero(1)))
0.0
>>> chop(airybi(airybizero(1,1),1))
0.0
>>> u = airybizero(1,complex=True)
>>> chop(airybi(u))
0.0
>>> chop(airybi(conj(u)))
0.0
The complex zeros (in the upper and lower half-planes respectively)
asymptotically approach the rays `z = R \exp(\pm i \pi /3)`::
>>> arg(airybizero(1,complex=True))
1.142532510286334022305364
>>> arg(airybizero(1000,complex=True))
1.047271114786212061583917
>>> arg(airybizero(1000000,complex=True))
1.047197624741816183341355
>>> pi/3
1.047197551196597746154214
"""
ellipk = r"""
Evaluates the complete elliptic integral of the first kind,
`K(m)`, defined by
.. math ::
K(m) = \int_0^{\pi/2} \frac{dt}{\sqrt{1-m \sin^2 t}} \, = \,
\frac{\pi}{2} \,_2F_1\left(\frac{1}{2}, \frac{1}{2}, 1, m\right).
Note that the argument is the parameter `m = k^2`,
not the modulus `k` which is sometimes used.
**Plots**
.. literalinclude :: /modules/mpmath/plots/ellipk.py
.. image :: /modules/mpmath/plots/ellipk.png
**Examples**
Values and limits include::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> ellipk(0)
1.570796326794896619231322
>>> ellipk(inf)
(0.0 + 0.0j)
>>> ellipk(-inf)
0.0
>>> ellipk(1)
+inf
>>> ellipk(-1)
1.31102877714605990523242
>>> ellipk(2)
(1.31102877714605990523242 - 1.31102877714605990523242j)
Verifying the defining integral and hypergeometric
representation::
>>> ellipk(0.5)
1.85407467730137191843385
>>> quad(lambda t: (1-0.5*sin(t)**2)**-0.5, [0, pi/2])
1.85407467730137191843385
>>> pi/2*hyp2f1(0.5,0.5,1,0.5)
1.85407467730137191843385
Evaluation is supported for arbitrary complex `m`::
>>> ellipk(3+4j)
(0.9111955638049650086562171 + 0.6313342832413452438845091j)
A definite integral::
>>> quad(ellipk, [0, 1])
2.0
"""
agm = r"""
``agm(a, b)`` computes the arithmetic-geometric mean of `a` and
`b`, defined as the limit of the following iteration:
.. math ::
a_0 = a
b_0 = b
a_{n+1} = \frac{a_n+b_n}{2}
b_{n+1} = \sqrt{a_n b_n}
This function can be called with a single argument, computing
`\mathrm{agm}(a,1) = \mathrm{agm}(1,a)`.
**Examples**
It is a well-known theorem that the geometric mean of
two distinct positive numbers is less than the arithmetic
mean. It follows that the arithmetic-geometric mean lies
between the two means::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> a = mpf(3)
>>> b = mpf(4)
>>> sqrt(a*b)
3.46410161513775
>>> agm(a,b)
3.48202767635957
>>> (a+b)/2
3.5
The arithmetic-geometric mean is scale-invariant::
>>> agm(10*e, 10*pi)
29.261085515723
>>> 10*agm(e, pi)
29.261085515723
As an order-of-magnitude estimate, `\mathrm{agm}(1,x) \approx x`
for large `x`::
>>> agm(10**10)
643448704.760133
>>> agm(10**50)
1.34814309345871e+48
For tiny `x`, `\mathrm{agm}(1,x) \approx -\pi/(2 \log(x/4))`::
>>> agm('0.01')
0.262166887202249
>>> -pi/2/log('0.0025')
0.262172347753122
The arithmetic-geometric mean can also be computed for complex
numbers::
>>> agm(3, 2+j)
(2.51055133276184 + 0.547394054060638j)
The AGM iteration converges very quickly (each step doubles
the number of correct digits), so :func:`~mpmath.agm` supports efficient
high-precision evaluation::
>>> mp.dps = 10000
>>> a = agm(1,2)
>>> str(a)[-10:]
'1679581912'
**Mathematical relations**
The arithmetic-geometric mean may be used to evaluate the
following two parametric definite integrals:
.. math ::
I_1 = \int_0^{\infty}
\frac{1}{\sqrt{(x^2+a^2)(x^2+b^2)}} \,dx
I_2 = \int_0^{\pi/2}
\frac{1}{\sqrt{a^2 \cos^2(x) + b^2 \sin^2(x)}} \,dx
We have::
>>> mp.dps = 15
>>> a = 3
>>> b = 4
>>> f1 = lambda x: ((x**2+a**2)*(x**2+b**2))**-0.5
>>> f2 = lambda x: ((a*cos(x))**2 + (b*sin(x))**2)**-0.5
>>> quad(f1, [0, inf])
0.451115405388492
>>> quad(f2, [0, pi/2])
0.451115405388492
>>> pi/(2*agm(a,b))
0.451115405388492
A formula for `\Gamma(1/4)`::
>>> gamma(0.25)
3.62560990822191
>>> sqrt(2*sqrt(2*pi**3)/agm(1,sqrt(2)))
3.62560990822191
**Possible issues**
The branch cut chosen for complex `a` and `b` is somewhat
arbitrary.
"""
gegenbauer = r"""
Evaluates the Gegenbauer polynomial, or ultraspherical polynomial,
.. math ::
C_n^{(a)}(z) = {n+2a-1 \choose n} \,_2F_1\left(-n, n+2a;
a+\frac{1}{2}; \frac{1}{2}(1-z)\right).
When `n` is a nonnegative integer, this formula gives a polynomial
in `z` of degree `n`, but all parameters are permitted to be
complex numbers. With `a = 1/2`, the Gegenbauer polynomial
reduces to a Legendre polynomial.
**Examples**
Evaluation for arbitrary arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> gegenbauer(3, 0.5, -10)
-2485.0
>>> gegenbauer(1000, 10, 100)
3.012757178975667428359374e+2322
>>> gegenbauer(2+3j, -0.75, -1000j)
(-5038991.358609026523401901 + 9414549.285447104177860806j)
Evaluation at negative integer orders::
>>> gegenbauer(-4, 2, 1.75)
-1.0
>>> gegenbauer(-4, 3, 1.75)
0.0
>>> gegenbauer(-4, 2j, 1.75)
0.0
>>> gegenbauer(-7, 0.5, 3)
8989.0
The Gegenbauer polynomials solve the differential equation::
>>> n, a = 4.5, 1+2j
>>> f = lambda z: gegenbauer(n, a, z)
>>> for z in [0, 0.75, -0.5j]:
... chop((1-z**2)*diff(f,z,2) - (2*a+1)*z*diff(f,z) + n*(n+2*a)*f(z))
...
0.0
0.0
0.0
The Gegenbauer polynomials have generating function
`(1-2zt+t^2)^{-a}`::
>>> a, z = 2.5, 1
>>> taylor(lambda t: (1-2*z*t+t**2)**(-a), 0, 3)
[1.0, 5.0, 15.0, 35.0]
>>> [gegenbauer(n,a,z) for n in range(4)]
[1.0, 5.0, 15.0, 35.0]
The Gegenbauer polynomials are orthogonal on `[-1, 1]` with respect
to the weight `(1-z^2)^{a-\frac{1}{2}}`::
>>> a, n, m = 2.5, 4, 5
>>> Cn = lambda z: gegenbauer(n, a, z, zeroprec=1000)
>>> Cm = lambda z: gegenbauer(m, a, z, zeroprec=1000)
>>> chop(quad(lambda z: Cn(z)*Cm(z)*(1-z**2)*(a-0.5), [-1, 1]))
0.0
"""
laguerre = r"""
Gives the generalized (associated) Laguerre polynomial, defined by
.. math ::
L_n^a(z) = \frac{\Gamma(n+b+1)}{\Gamma(b+1) \Gamma(n+1)}
\,_1F_1(-n, a+1, z).
With `a = 0` and `n` a nonnegative integer, this reduces to an ordinary
Laguerre polynomial, the sequence of which begins
`L_0(z) = 1, L_1(z) = 1-z, L_2(z) = z^2-2z+1, \ldots`.
The Laguerre polynomials are orthogonal with respect to the weight
`z^a e^{-z}` on `[0, \infty)`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/laguerre.py
.. image :: /modules/mpmath/plots/laguerre.png
**Examples**
Evaluation for arbitrary arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> laguerre(5, 0, 0.25)
0.03726399739583333333333333
>>> laguerre(1+j, 0.5, 2+3j)
(4.474921610704496808379097 - 11.02058050372068958069241j)
>>> laguerre(2, 0, 10000)
49980001.0
>>> laguerre(2.5, 0, 10000)
-9.327764910194842158583189e+4328
The first few Laguerre polynomials, normalized to have integer
coefficients::
>>> for n in range(7):
... chop(taylor(lambda z: fac(n)*laguerre(n, 0, z), 0, n))
...
[1.0]
[1.0, -1.0]
[2.0, -4.0, 1.0]
[6.0, -18.0, 9.0, -1.0]
[24.0, -96.0, 72.0, -16.0, 1.0]
[120.0, -600.0, 600.0, -200.0, 25.0, -1.0]
[720.0, -4320.0, 5400.0, -2400.0, 450.0, -36.0, 1.0]
Verifying orthogonality::
>>> Lm = lambda t: laguerre(m,a,t)
>>> Ln = lambda t: laguerre(n,a,t)
>>> a, n, m = 2.5, 2, 3
>>> chop(quad(lambda t: exp(-t)*t**a*Lm(t)*Ln(t), [0,inf]))
0.0
"""
hermite = r"""
Evaluates the Hermite polynomial `H_n(z)`, which may be defined using
the recurrence
.. math ::
H_0(z) = 1
H_1(z) = 2z
H_{n+1} = 2z H_n(z) - 2n H_{n-1}(z).
The Hermite polynomials are orthogonal on `(-\infty, \infty)` with
respect to the weight `e^{-z^2}`. More generally, allowing arbitrary complex
values of `n`, the Hermite function `H_n(z)` is defined as
.. math ::
H_n(z) = (2z)^n \,_2F_0\left(-\frac{n}{2}, \frac{1-n}{2},
-\frac{1}{z^2}\right)
for `\Re{z} > 0`, or generally
.. math ::
H_n(z) = 2^n \sqrt{\pi} \left(
\frac{1}{\Gamma\left(\frac{1-n}{2}\right)}
\,_1F_1\left(-\frac{n}{2}, \frac{1}{2}, z^2\right) -
\frac{2z}{\Gamma\left(-\frac{n}{2}\right)}
\,_1F_1\left(\frac{1-n}{2}, \frac{3}{2}, z^2\right)
\right).
**Plots**
.. literalinclude :: /modules/mpmath/plots/hermite.py
.. image :: /modules/mpmath/plots/hermite.png
**Examples**
Evaluation for arbitrary arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hermite(0, 10)
1.0
>>> hermite(1, 10); hermite(2, 10)
20.0
398.0
>>> hermite(10000, 2)
4.950440066552087387515653e+19334
>>> hermite(3, -10**8)
-7999999999999998800000000.0
>>> hermite(-3, -10**8)
1.675159751729877682920301e+4342944819032534
>>> hermite(2+3j, -1+2j)
(-0.07652130602993513389421901 - 0.1084662449961914580276007j)
Coefficients of the first few Hermite polynomials are::
>>> for n in range(7):
... chop(taylor(lambda z: hermite(n, z), 0, n))
...
[1.0]
[0.0, 2.0]
[-2.0, 0.0, 4.0]
[0.0, -12.0, 0.0, 8.0]
[12.0, 0.0, -48.0, 0.0, 16.0]
[0.0, 120.0, 0.0, -160.0, 0.0, 32.0]
[-120.0, 0.0, 720.0, 0.0, -480.0, 0.0, 64.0]
Values at `z = 0`::
>>> for n in range(-5, 9):
... hermite(n, 0)
...
0.02769459142039868792653387
0.08333333333333333333333333
0.2215567313631895034122709
0.5
0.8862269254527580136490837
1.0
0.0
-2.0
0.0
12.0
0.0
-120.0
0.0
1680.0
Hermite functions satisfy the differential equation::
>>> n = 4
>>> f = lambda z: hermite(n, z)
>>> z = 1.5
>>> chop(diff(f,z,2) - 2*z*diff(f,z) + 2*n*f(z))
0.0
Verifying orthogonality::
>>> chop(quad(lambda t: hermite(2,t)*hermite(4,t)*exp(-t**2), [-inf,inf]))
0.0
"""
jacobi = r"""
``jacobi(n, a, b, x)`` evaluates the Jacobi polynomial
`P_n^{(a,b)}(x)`. The Jacobi polynomials are a special
case of the hypergeometric function `\,_2F_1` given by:
.. math ::
P_n^{(a,b)}(x) = {n+a \choose n}
\,_2F_1\left(-n,1+a+b+n,a+1,\frac{1-x}{2}\right).
Note that this definition generalizes to nonintegral values
of `n`. When `n` is an integer, the hypergeometric series
terminates after a finite number of terms, giving
a polynomial in `x`.
**Evaluation of Jacobi polynomials**
A special evaluation is `P_n^{(a,b)}(1) = {n+a \choose n}`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> jacobi(4, 0.5, 0.25, 1)
2.4609375
>>> binomial(4+0.5, 4)
2.4609375
A Jacobi polynomial of degree `n` is equal to its
Taylor polynomial of degree `n`. The explicit
coefficients of Jacobi polynomials can therefore
be recovered easily using :func:`~mpmath.taylor`::
>>> for n in range(5):
... nprint(taylor(lambda x: jacobi(n,1,2,x), 0, n))
...
[1.0]
[-0.5, 2.5]
[-0.75, -1.5, 5.25]
[0.5, -3.5, -3.5, 10.5]
[0.625, 2.5, -11.25, -7.5, 20.625]
For nonintegral `n`, the Jacobi "polynomial" is no longer
a polynomial::
>>> nprint(taylor(lambda x: jacobi(0.5,1,2,x), 0, 4))
[0.309983, 1.84119, -1.26933, 1.26699, -1.34808]
**Orthogonality**
The Jacobi polynomials are orthogonal on the interval
`[-1, 1]` with respect to the weight function
`w(x) = (1-x)^a (1+x)^b`. That is,
`w(x) P_n^{(a,b)}(x) P_m^{(a,b)}(x)` integrates to
zero if `m \ne n` and to a nonzero number if `m = n`.
The orthogonality is easy to verify using numerical
quadrature::
>>> P = jacobi
>>> f = lambda x: (1-x)**a * (1+x)**b * P(m,a,b,x) * P(n,a,b,x)
>>> a = 2
>>> b = 3
>>> m, n = 3, 4
>>> chop(quad(f, [-1, 1]), 1)
0.0
>>> m, n = 4, 4
>>> quad(f, [-1, 1])
1.9047619047619
**Differential equation**
The Jacobi polynomials are solutions of the differential
equation
.. math ::
(1-x^2) y'' + (b-a-(a+b+2)x) y' + n (n+a+b+1) y = 0.
We can verify that :func:`~mpmath.jacobi` approximately satisfies
this equation::
>>> from mpmath import *
>>> mp.dps = 15
>>> a = 2.5
>>> b = 4
>>> n = 3
>>> y = lambda x: jacobi(n,a,b,x)
>>> x = pi
>>> A0 = n*(n+a+b+1)*y(x)
>>> A1 = (b-a-(a+b+2)*x)*diff(y,x)
>>> A2 = (1-x**2)*diff(y,x,2)
>>> nprint(A2 + A1 + A0, 1)
4.0e-12
The difference of order `10^{-12}` is as close to zero as
it could be at 15-digit working precision, since the terms
are large::
>>> A0, A1, A2
(26560.2328981879, -21503.7641037294, -5056.46879445852)
"""
legendre = r"""
``legendre(n, x)`` evaluates the Legendre polynomial `P_n(x)`.
The Legendre polynomials are given by the formula
.. math ::
P_n(x) = \frac{1}{2^n n!} \frac{d^n}{dx^n} (x^2 -1)^n.
Alternatively, they can be computed recursively using
.. math ::
P_0(x) = 1
P_1(x) = x
(n+1) P_{n+1}(x) = (2n+1) x P_n(x) - n P_{n-1}(x).
A third definition is in terms of the hypergeometric function
`\,_2F_1`, whereby they can be generalized to arbitrary `n`:
.. math ::
P_n(x) = \,_2F_1\left(-n, n+1, 1, \frac{1-x}{2}\right)
**Plots**
.. literalinclude :: /modules/mpmath/plots/legendre.py
.. image :: /modules/mpmath/plots/legendre.png
**Basic evaluation**
The Legendre polynomials assume fixed values at the points
`x = -1` and `x = 1`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> nprint([legendre(n, 1) for n in range(6)])
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
>>> nprint([legendre(n, -1) for n in range(6)])
[1.0, -1.0, 1.0, -1.0, 1.0, -1.0]
The coefficients of Legendre polynomials can be recovered
using degree-`n` Taylor expansion::
>>> for n in range(5):
... nprint(chop(taylor(lambda x: legendre(n, x), 0, n)))
...
[1.0]
[0.0, 1.0]
[-0.5, 0.0, 1.5]
[0.0, -1.5, 0.0, 2.5]
[0.375, 0.0, -3.75, 0.0, 4.375]
The roots of Legendre polynomials are located symmetrically
on the interval `[-1, 1]`::
>>> for n in range(5):
... nprint(polyroots(taylor(lambda x: legendre(n, x), 0, n)[::-1]))
...
[]
[0.0]
[-0.57735, 0.57735]
[-0.774597, 0.0, 0.774597]
[-0.861136, -0.339981, 0.339981, 0.861136]
An example of an evaluation for arbitrary `n`::
>>> legendre(0.75, 2+4j)
(1.94952805264875 + 2.1071073099422j)
**Orthogonality**
The Legendre polynomials are orthogonal on `[-1, 1]` with respect
to the trivial weight `w(x) = 1`. That is, `P_m(x) P_n(x)`
integrates to zero if `m \ne n` and to `2/(2n+1)` if `m = n`::
>>> m, n = 3, 4
>>> quad(lambda x: legendre(m,x)*legendre(n,x), [-1, 1])
0.0
>>> m, n = 4, 4
>>> quad(lambda x: legendre(m,x)*legendre(n,x), [-1, 1])
0.222222222222222
**Differential equation**
The Legendre polynomials satisfy the differential equation
.. math ::
((1-x^2) y')' + n(n+1) y' = 0.
We can verify this numerically::
>>> n = 3.6
>>> x = 0.73
>>> P = legendre
>>> A = diff(lambda t: (1-t**2)*diff(lambda u: P(n,u), t), x)
>>> B = n*(n+1)*P(n,x)
>>> nprint(A+B,1)
9.0e-16
"""
legenp = r"""
Calculates the (associated) Legendre function of the first kind of
degree *n* and order *m*, `P_n^m(z)`. Taking `m = 0` gives the ordinary
Legendre function of the first kind, `P_n(z)`. The parameters may be
complex numbers.
In terms of the Gauss hypergeometric function, the (associated) Legendre
function is defined as
.. math ::
P_n^m(z) = \frac{1}{\Gamma(1-m)} \frac{(1+z)^{m/2}}{(1-z)^{m/2}}
\,_2F_1\left(-n, n+1, 1-m, \frac{1-z}{2}\right).
With *type=3* instead of *type=2*, the alternative
definition
.. math ::
\hat{P}_n^m(z) = \frac{1}{\Gamma(1-m)} \frac{(z+1)^{m/2}}{(z-1)^{m/2}}
\,_2F_1\left(-n, n+1, 1-m, \frac{1-z}{2}\right).
is used. These functions correspond respectively to ``LegendreP[n,m,2,z]``
and ``LegendreP[n,m,3,z]`` in Mathematica.
The general solution of the (associated) Legendre differential equation
.. math ::
(1-z^2) f''(z) - 2zf'(z) + \left(n(n+1)-\frac{m^2}{1-z^2}\right)f(z) = 0
is given by `C_1 P_n^m(z) + C_2 Q_n^m(z)` for arbitrary constants
`C_1`, `C_2`, where `Q_n^m(z)` is a Legendre function of the
second kind as implemented by :func:`~mpmath.legenq`.
**Examples**
Evaluation for arbitrary parameters and arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> legenp(2, 0, 10); legendre(2, 10)
149.5
149.5
>>> legenp(-2, 0.5, 2.5)
(1.972260393822275434196053 - 1.972260393822275434196053j)
>>> legenp(2+3j, 1-j, -0.5+4j)
(-3.335677248386698208736542 - 5.663270217461022307645625j)
>>> chop(legenp(3, 2, -1.5, type=2))
28.125
>>> chop(legenp(3, 2, -1.5, type=3))
-28.125
Verifying the associated Legendre differential equation::
>>> n, m = 2, -0.5
>>> C1, C2 = 1, -3
>>> f = lambda z: C1*legenp(n,m,z) + C2*legenq(n,m,z)
>>> deq = lambda z: (1-z**2)*diff(f,z,2) - 2*z*diff(f,z) + \
... (n*(n+1)-m**2/(1-z**2))*f(z)
>>> for z in [0, 2, -1.5, 0.5+2j]:
... chop(deq(mpmathify(z)))
...
0.0
0.0
0.0
0.0
"""
legenq = r"""
Calculates the (associated) Legendre function of the second kind of
degree *n* and order *m*, `Q_n^m(z)`. Taking `m = 0` gives the ordinary
Legendre function of the second kind, `Q_n(z)`. The parameters may
complex numbers.
The Legendre functions of the second kind give a second set of
solutions to the (associated) Legendre differential equation.
(See :func:`~mpmath.legenp`.)
Unlike the Legendre functions of the first kind, they are not
polynomials of `z` for integer `n`, `m` but rational or logarithmic
functions with poles at `z = \pm 1`.
There are various ways to define Legendre functions of
the second kind, giving rise to different complex structure.
A version can be selected using the *type* keyword argument.
The *type=2* and *type=3* functions are given respectively by
.. math ::
Q_n^m(z) = \frac{\pi}{2 \sin(\pi m)}
\left( \cos(\pi m) P_n^m(z) -
\frac{\Gamma(1+m+n)}{\Gamma(1-m+n)} P_n^{-m}(z)\right)
\hat{Q}_n^m(z) = \frac{\pi}{2 \sin(\pi m)} e^{\pi i m}
\left( \hat{P}_n^m(z) -
\frac{\Gamma(1+m+n)}{\Gamma(1-m+n)} \hat{P}_n^{-m}(z)\right)
where `P` and `\hat{P}` are the *type=2* and *type=3* Legendre functions
of the first kind. The formulas above should be understood as limits
when `m` is an integer.
These functions correspond to ``LegendreQ[n,m,2,z]`` (or ``LegendreQ[n,m,z]``)
and ``LegendreQ[n,m,3,z]`` in Mathematica. The *type=3* function
is essentially the same as the function defined in
Abramowitz & Stegun (eq. 8.1.3) but with `(z+1)^{m/2}(z-1)^{m/2}` instead
of `(z^2-1)^{m/2}`, giving slightly different branches.
**Examples**
Evaluation for arbitrary parameters and arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> legenq(2, 0, 0.5)
-0.8186632680417568557122028
>>> legenq(-1.5, -2, 2.5)
(0.6655964618250228714288277 + 0.3937692045497259717762649j)
>>> legenq(2-j, 3+4j, -6+5j)
(-10001.95256487468541686564 - 6011.691337610097577791134j)
Different versions of the function::
>>> legenq(2, 1, 0.5)
0.7298060598018049369381857
>>> legenq(2, 1, 1.5)
(-7.902916572420817192300921 + 0.1998650072605976600724502j)
>>> legenq(2, 1, 0.5, type=3)
(2.040524284763495081918338 - 0.7298060598018049369381857j)
>>> chop(legenq(2, 1, 1.5, type=3))
-0.1998650072605976600724502
"""
chebyt = r"""
``chebyt(n, x)`` evaluates the Chebyshev polynomial of the first
kind `T_n(x)`, defined by the identity
.. math ::
T_n(\cos x) = \cos(n x).
The Chebyshev polynomials of the first kind are a special
case of the Jacobi polynomials, and by extension of the
hypergeometric function `\,_2F_1`. They can thus also be
evaluated for nonintegral `n`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/chebyt.py
.. image :: /modules/mpmath/plots/chebyt.png
**Basic evaluation**
The coefficients of the `n`-th polynomial can be recovered
using using degree-`n` Taylor expansion::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(5):
... nprint(chop(taylor(lambda x: chebyt(n, x), 0, n)))
...
[1.0]
[0.0, 1.0]
[-1.0, 0.0, 2.0]
[0.0, -3.0, 0.0, 4.0]
[1.0, 0.0, -8.0, 0.0, 8.0]
**Orthogonality**
The Chebyshev polynomials of the first kind are orthogonal
on the interval `[-1, 1]` with respect to the weight
function `w(x) = 1/\sqrt{1-x^2}`::
>>> f = lambda x: chebyt(m,x)*chebyt(n,x)/sqrt(1-x**2)
>>> m, n = 3, 4
>>> nprint(quad(f, [-1, 1]),1)
0.0
>>> m, n = 4, 4
>>> quad(f, [-1, 1])
1.57079632596448
"""
chebyu = r"""
``chebyu(n, x)`` evaluates the Chebyshev polynomial of the second
kind `U_n(x)`, defined by the identity
.. math ::
U_n(\cos x) = \frac{\sin((n+1)x)}{\sin(x)}.
The Chebyshev polynomials of the second kind are a special
case of the Jacobi polynomials, and by extension of the
hypergeometric function `\,_2F_1`. They can thus also be
evaluated for nonintegral `n`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/chebyu.py
.. image :: /modules/mpmath/plots/chebyu.png
**Basic evaluation**
The coefficients of the `n`-th polynomial can be recovered
using using degree-`n` Taylor expansion::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(5):
... nprint(chop(taylor(lambda x: chebyu(n, x), 0, n)))
...
[1.0]
[0.0, 2.0]
[-1.0, 0.0, 4.0]
[0.0, -4.0, 0.0, 8.0]
[1.0, 0.0, -12.0, 0.0, 16.0]
**Orthogonality**
The Chebyshev polynomials of the second kind are orthogonal
on the interval `[-1, 1]` with respect to the weight
function `w(x) = \sqrt{1-x^2}`::
>>> f = lambda x: chebyu(m,x)*chebyu(n,x)*sqrt(1-x**2)
>>> m, n = 3, 4
>>> quad(f, [-1, 1])
0.0
>>> m, n = 4, 4
>>> quad(f, [-1, 1])
1.5707963267949
"""
besselj = r"""
``besselj(n, x, derivative=0)`` gives the Bessel function of the first kind
`J_n(x)`. Bessel functions of the first kind are defined as
solutions of the differential equation
.. math ::
x^2 y'' + x y' + (x^2 - n^2) y = 0
which appears, among other things, when solving the radial
part of Laplace's equation in cylindrical coordinates. This
equation has two solutions for given `n`, where the
`J_n`-function is the solution that is nonsingular at `x = 0`.
For positive integer `n`, `J_n(x)` behaves roughly like a sine
(odd `n`) or cosine (even `n`) multiplied by a magnitude factor
that decays slowly as `x \to \pm\infty`.
Generally, `J_n` is a special case of the hypergeometric
function `\,_0F_1`:
.. math ::
J_n(x) = \frac{x^n}{2^n \Gamma(n+1)}
\,_0F_1\left(n+1,-\frac{x^2}{4}\right)
With *derivative* = `m \ne 0`, the `m`-th derivative
.. math ::
\frac{d^m}{dx^m} J_n(x)
is computed.
**Plots**
.. literalinclude :: /modules/mpmath/plots/besselj.py
.. image :: /modules/mpmath/plots/besselj.png
.. literalinclude :: /modules/mpmath/plots/besselj_c.py
.. image :: /modules/mpmath/plots/besselj_c.png
**Examples**
Evaluation is supported for arbitrary arguments, and at
arbitrary precision::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> besselj(2, 1000)
-0.024777229528606
>>> besselj(4, 0.75)
0.000801070086542314
>>> besselj(2, 1000j)
(-2.48071721019185e+432 + 6.41567059811949e-437j)
>>> mp.dps = 25
>>> besselj(0.75j, 3+4j)
(-2.778118364828153309919653 - 1.5863603889018621585533j)
>>> mp.dps = 50
>>> besselj(1, pi)
0.28461534317975275734531059968613140570981118184947
Arguments may be large::
>>> mp.dps = 25
>>> besselj(0, 10000)
-0.007096160353388801477265164
>>> besselj(0, 10**10)
0.000002175591750246891726859055
>>> besselj(2, 10**100)
7.337048736538615712436929e-51
>>> besselj(2, 10**5*j)
(-3.540725411970948860173735e+43426 + 4.4949812409615803110051e-43433j)
The Bessel functions of the first kind satisfy simple
symmetries around `x = 0`::
>>> mp.dps = 15
>>> nprint([besselj(n,0) for n in range(5)])
[1.0, 0.0, 0.0, 0.0, 0.0]
>>> nprint([besselj(n,pi) for n in range(5)])
[-0.304242, 0.284615, 0.485434, 0.333458, 0.151425]
>>> nprint([besselj(n,-pi) for n in range(5)])
[-0.304242, -0.284615, 0.485434, -0.333458, 0.151425]
Roots of Bessel functions are often used::
>>> nprint([findroot(j0, k) for k in [2, 5, 8, 11, 14]])
[2.40483, 5.52008, 8.65373, 11.7915, 14.9309]
>>> nprint([findroot(j1, k) for k in [3, 7, 10, 13, 16]])
[3.83171, 7.01559, 10.1735, 13.3237, 16.4706]
The roots are not periodic, but the distance between successive
roots asymptotically approaches `2 \pi`. Bessel functions of
the first kind have the following normalization::
>>> quadosc(j0, [0, inf], period=2*pi)
1.0
>>> quadosc(j1, [0, inf], period=2*pi)
1.0
For `n = 1/2` or `n = -1/2`, the Bessel function reduces to a
trigonometric function::
>>> x = 10
>>> besselj(0.5, x), sqrt(2/(pi*x))*sin(x)
(-0.13726373575505, -0.13726373575505)
>>> besselj(-0.5, x), sqrt(2/(pi*x))*cos(x)
(-0.211708866331398, -0.211708866331398)
Derivatives of any order can be computed (negative orders
correspond to integration)::
>>> mp.dps = 25
>>> besselj(0, 7.5, 1)
-0.1352484275797055051822405
>>> diff(lambda x: besselj(0,x), 7.5)
-0.1352484275797055051822405
>>> besselj(0, 7.5, 10)
-0.1377811164763244890135677
>>> diff(lambda x: besselj(0,x), 7.5, 10)
-0.1377811164763244890135677
>>> besselj(0,7.5,-1) - besselj(0,3.5,-1)
-0.1241343240399987693521378
>>> quad(j0, [3.5, 7.5])
-0.1241343240399987693521378
Differentiation with a noninteger order gives the fractional derivative
in the sense of the Riemann-Liouville differintegral, as computed by
:func:`~mpmath.differint`::
>>> mp.dps = 15
>>> besselj(1, 3.5, 0.75)
-0.385977722939384
>>> differint(lambda x: besselj(1, x), 3.5, 0.75)
-0.385977722939384
"""
besseli = r"""
``besseli(n, x, derivative=0)`` gives the modified Bessel function of the
first kind,
.. math ::
I_n(x) = i^{-n} J_n(ix).
With *derivative* = `m \ne 0`, the `m`-th derivative
.. math ::
\frac{d^m}{dx^m} I_n(x)
is computed.
**Plots**
.. literalinclude :: /modules/mpmath/plots/besseli.py
.. image :: /modules/mpmath/plots/besseli.png
.. literalinclude :: /modules/mpmath/plots/besseli_c.py
.. image :: /modules/mpmath/plots/besseli_c.png
**Examples**
Some values of `I_n(x)`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> besseli(0,0)
1.0
>>> besseli(1,0)
0.0
>>> besseli(0,1)
1.266065877752008335598245
>>> besseli(3.5, 2+3j)
(-0.2904369752642538144289025 - 0.4469098397654815837307006j)
Arguments may be large::
>>> besseli(2, 1000)
2.480717210191852440616782e+432
>>> besseli(2, 10**10)
4.299602851624027900335391e+4342944813
>>> besseli(2, 6000+10000j)
(-2.114650753239580827144204e+2603 + 4.385040221241629041351886e+2602j)
For integers `n`, the following integral representation holds::
>>> mp.dps = 15
>>> n = 3
>>> x = 2.3
>>> quad(lambda t: exp(x*cos(t))*cos(n*t), [0,pi])/pi
0.349223221159309
>>> besseli(n,x)
0.349223221159309
Derivatives and antiderivatives of any order can be computed::
>>> mp.dps = 25
>>> besseli(2, 7.5, 1)
195.8229038931399062565883
>>> diff(lambda x: besseli(2,x), 7.5)
195.8229038931399062565883
>>> besseli(2, 7.5, 10)
153.3296508971734525525176
>>> diff(lambda x: besseli(2,x), 7.5, 10)
153.3296508971734525525176
>>> besseli(2,7.5,-1) - besseli(2,3.5,-1)
202.5043900051930141956876
>>> quad(lambda x: besseli(2,x), [3.5, 7.5])
202.5043900051930141956876
"""
bessely = r"""
``bessely(n, x, derivative=0)`` gives the Bessel function of the second kind,
.. math ::
Y_n(x) = \frac{J_n(x) \cos(\pi n) - J_{-n}(x)}{\sin(\pi n)}.
For `n` an integer, this formula should be understood as a
limit. With *derivative* = `m \ne 0`, the `m`-th derivative
.. math ::
\frac{d^m}{dx^m} Y_n(x)
is computed.
**Plots**
.. literalinclude :: /modules/mpmath/plots/bessely.py
.. image :: /modules/mpmath/plots/bessely.png
.. literalinclude :: /modules/mpmath/plots/bessely_c.py
.. image :: /modules/mpmath/plots/bessely_c.png
**Examples**
Some values of `Y_n(x)`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> bessely(0,0), bessely(1,0), bessely(2,0)
(-inf, -inf, -inf)
>>> bessely(1, pi)
0.3588729167767189594679827
>>> bessely(0.5, 3+4j)
(9.242861436961450520325216 - 3.085042824915332562522402j)
Arguments may be large::
>>> bessely(0, 10000)
0.00364780555898660588668872
>>> bessely(2.5, 10**50)
-4.8952500412050989295774e-26
>>> bessely(2.5, -10**50)
(0.0 + 4.8952500412050989295774e-26j)
Derivatives and antiderivatives of any order can be computed::
>>> bessely(2, 3.5, 1)
0.3842618820422660066089231
>>> diff(lambda x: bessely(2, x), 3.5)
0.3842618820422660066089231
>>> bessely(0.5, 3.5, 1)
-0.2066598304156764337900417
>>> diff(lambda x: bessely(0.5, x), 3.5)
-0.2066598304156764337900417
>>> diff(lambda x: bessely(2, x), 0.5, 10)
-208173867409.5547350101511
>>> bessely(2, 0.5, 10)
-208173867409.5547350101511
>>> bessely(2, 100.5, 100)
0.02668487547301372334849043
>>> quad(lambda x: bessely(2,x), [1,3])
-1.377046859093181969213262
>>> bessely(2,3,-1) - bessely(2,1,-1)
-1.377046859093181969213262
"""
besselk = r"""
``besselk(n, x)`` gives the modified Bessel function of the
second kind,
.. math ::
K_n(x) = \frac{\pi}{2} \frac{I_{-n}(x)-I_{n}(x)}{\sin(\pi n)}
For `n` an integer, this formula should be understood as a
limit.
**Plots**
.. literalinclude :: /modules/mpmath/plots/besselk.py
.. image :: /modules/mpmath/plots/besselk.png
.. literalinclude :: /modules/mpmath/plots/besselk_c.py
.. image :: /modules/mpmath/plots/besselk_c.png
**Examples**
Evaluation is supported for arbitrary complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> besselk(0,1)
0.4210244382407083333356274
>>> besselk(0, -1)
(0.4210244382407083333356274 - 3.97746326050642263725661j)
>>> besselk(3.5, 2+3j)
(-0.02090732889633760668464128 + 0.2464022641351420167819697j)
>>> besselk(2+3j, 0.5)
(0.9615816021726349402626083 + 0.1918250181801757416908224j)
Arguments may be large::
>>> besselk(0, 100)
4.656628229175902018939005e-45
>>> besselk(1, 10**6)
4.131967049321725588398296e-434298
>>> besselk(1, 10**6*j)
(0.001140348428252385844876706 - 0.0005200017201681152909000961j)
>>> besselk(4.5, fmul(10**50, j, exact=True))
(1.561034538142413947789221e-26 + 1.243554598118700063281496e-25j)
The point `x = 0` is a singularity (logarithmic if `n = 0`)::
>>> besselk(0,0)
+inf
>>> besselk(1,0)
+inf
>>> for n in range(-4, 5):
... print(besselk(n, '1e-1000'))
...
4.8e+4001
8.0e+3000
2.0e+2000
1.0e+1000
2302.701024509704096466802
1.0e+1000
2.0e+2000
8.0e+3000
4.8e+4001
"""
hankel1 = r"""
``hankel1(n,x)`` computes the Hankel function of the first kind,
which is the complex combination of Bessel functions given by
.. math ::
H_n^{(1)}(x) = J_n(x) + i Y_n(x).
**Plots**
.. literalinclude :: /modules/mpmath/plots/hankel1.py
.. image :: /modules/mpmath/plots/hankel1.png
.. literalinclude :: /modules/mpmath/plots/hankel1_c.py
.. image :: /modules/mpmath/plots/hankel1_c.png
**Examples**
The Hankel function is generally complex-valued::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hankel1(2, pi)
(0.4854339326315091097054957 - 0.0999007139290278787734903j)
>>> hankel1(3.5, pi)
(0.2340002029630507922628888 - 0.6419643823412927142424049j)
"""
hankel2 = r"""
``hankel2(n,x)`` computes the Hankel function of the second kind,
which is the complex combination of Bessel functions given by
.. math ::
H_n^{(2)}(x) = J_n(x) - i Y_n(x).
**Plots**
.. literalinclude :: /modules/mpmath/plots/hankel2.py
.. image :: /modules/mpmath/plots/hankel2.png
.. literalinclude :: /modules/mpmath/plots/hankel2_c.py
.. image :: /modules/mpmath/plots/hankel2_c.png
**Examples**
The Hankel function is generally complex-valued::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> hankel2(2, pi)
(0.4854339326315091097054957 + 0.0999007139290278787734903j)
>>> hankel2(3.5, pi)
(0.2340002029630507922628888 + 0.6419643823412927142424049j)
"""
lambertw = r"""
The Lambert W function `W(z)` is defined as the inverse function
of `w \exp(w)`. In other words, the value of `W(z)` is such that
`z = W(z) \exp(W(z))` for any complex number `z`.
The Lambert W function is a multivalued function with infinitely
many branches `W_k(z)`, indexed by `k \in \mathbb{Z}`. Each branch
gives a different solution `w` of the equation `z = w \exp(w)`.
All branches are supported by :func:`~mpmath.lambertw`:
* ``lambertw(z)`` gives the principal solution (branch 0)
* ``lambertw(z, k)`` gives the solution on branch `k`
The Lambert W function has two partially real branches: the
principal branch (`k = 0`) is real for real `z > -1/e`, and the
`k = -1` branch is real for `-1/e < z < 0`. All branches except
`k = 0` have a logarithmic singularity at `z = 0`.
The definition, implementation and choice of branches
is based on [Corless]_.
**Plots**
.. literalinclude :: /modules/mpmath/plots/lambertw.py
.. image :: /modules/mpmath/plots/lambertw.png
.. literalinclude :: /modules/mpmath/plots/lambertw_c.py
.. image :: /modules/mpmath/plots/lambertw_c.png
**Basic examples**
The Lambert W function is the inverse of `w \exp(w)`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> w = lambertw(1)
>>> w
0.5671432904097838729999687
>>> w*exp(w)
1.0
Any branch gives a valid inverse::
>>> w = lambertw(1, k=3)
>>> w
(-2.853581755409037807206819 + 17.11353553941214591260783j)
>>> w = lambertw(1, k=25)
>>> w
(-5.047020464221569709378686 + 155.4763860949415867162066j)
>>> chop(w*exp(w))
1.0
**Applications to equation-solving**
The Lambert W function may be used to solve various kinds of
equations, such as finding the value of the infinite power
tower `z^{z^{z^{\ldots}}}`::
>>> def tower(z, n):
... if n == 0:
... return z
... return z ** tower(z, n-1)
...
>>> tower(mpf(0.5), 100)
0.6411857445049859844862005
>>> -lambertw(-log(0.5))/log(0.5)
0.6411857445049859844862005
**Properties**
The Lambert W function grows roughly like the natural logarithm
for large arguments::
>>> lambertw(1000); log(1000)
5.249602852401596227126056
6.907755278982137052053974
>>> lambertw(10**100); log(10**100)
224.8431064451185015393731
230.2585092994045684017991
The principal branch of the Lambert W function has a rational
Taylor series expansion around `z = 0`::
>>> nprint(taylor(lambertw, 0, 6), 10)
[0.0, 1.0, -1.0, 1.5, -2.666666667, 5.208333333, -10.8]
Some special values and limits are::
>>> lambertw(0)
0.0
>>> lambertw(1)
0.5671432904097838729999687
>>> lambertw(e)
1.0
>>> lambertw(inf)
+inf
>>> lambertw(0, k=-1)
-inf
>>> lambertw(0, k=3)
-inf
>>> lambertw(inf, k=2)
(+inf + 12.56637061435917295385057j)
>>> lambertw(inf, k=3)
(+inf + 18.84955592153875943077586j)
>>> lambertw(-inf, k=3)
(+inf + 21.9911485751285526692385j)
The `k = 0` and `k = -1` branches join at `z = -1/e` where
`W(z) = -1` for both branches. Since `-1/e` can only be represented
approximately with binary floating-point numbers, evaluating the
Lambert W function at this point only gives `-1` approximately::
>>> lambertw(-1/e, 0)
-0.9999999999998371330228251
>>> lambertw(-1/e, -1)
-1.000000000000162866977175
If `-1/e` happens to round in the negative direction, there might be
a small imaginary part::
>>> mp.dps = 15
>>> lambertw(-1/e)
(-1.0 + 8.22007971483662e-9j)
>>> lambertw(-1/e+eps)
-0.999999966242188
**References**
1. [Corless]_
"""
barnesg = r"""
Evaluates the Barnes G-function, which generalizes the
superfactorial (:func:`~mpmath.superfac`) and by extension also the
hyperfactorial (:func:`~mpmath.hyperfac`) to the complex numbers
in an analogous way to how the gamma function generalizes
the ordinary factorial.
The Barnes G-function may be defined in terms of a Weierstrass
product:
.. math ::
G(z+1) = (2\pi)^{z/2} e^{-[z(z+1)+\gamma z^2]/2}
\prod_{n=1}^\infty
\left[\left(1+\frac{z}{n}\right)^ne^{-z+z^2/(2n)}\right]
For positive integers `n`, we have have relation to superfactorials
`G(n) = \mathrm{sf}(n-2) = 0! \cdot 1! \cdots (n-2)!`.
**Examples**
Some elementary values and limits of the Barnes G-function::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> barnesg(1), barnesg(2), barnesg(3)
(1.0, 1.0, 1.0)
>>> barnesg(4)
2.0
>>> barnesg(5)
12.0
>>> barnesg(6)
288.0
>>> barnesg(7)
34560.0
>>> barnesg(8)
24883200.0
>>> barnesg(inf)
+inf
>>> barnesg(0), barnesg(-1), barnesg(-2)
(0.0, 0.0, 0.0)
Closed-form values are known for some rational arguments::
>>> barnesg('1/2')
0.603244281209446
>>> sqrt(exp(0.25+log(2)/12)/sqrt(pi)/glaisher**3)
0.603244281209446
>>> barnesg('1/4')
0.29375596533861
>>> nthroot(exp('3/8')/exp(catalan/pi)/
... gamma(0.25)**3/sqrt(glaisher)**9, 4)
0.29375596533861
The Barnes G-function satisfies the functional equation
`G(z+1) = \Gamma(z) G(z)`::
>>> z = pi
>>> barnesg(z+1)
2.39292119327948
>>> gamma(z)*barnesg(z)
2.39292119327948
The asymptotic growth rate of the Barnes G-function is related to
the Glaisher-Kinkelin constant::
>>> limit(lambda n: barnesg(n+1)/(n**(n**2/2-mpf(1)/12)*
... (2*pi)**(n/2)*exp(-3*n**2/4)), inf)
0.847536694177301
>>> exp('1/12')/glaisher
0.847536694177301
The Barnes G-function can be differentiated in closed form::
>>> z = 3
>>> diff(barnesg, z)
0.264507203401607
>>> barnesg(z)*((z-1)*psi(0,z)-z+(log(2*pi)+1)/2)
0.264507203401607
Evaluation is supported for arbitrary arguments and at arbitrary
precision::
>>> barnesg(6.5)
2548.7457695685
>>> barnesg(-pi)
0.00535976768353037
>>> barnesg(3+4j)
(-0.000676375932234244 - 4.42236140124728e-5j)
>>> mp.dps = 50
>>> barnesg(1/sqrt(2))
0.81305501090451340843586085064413533788206204124732
>>> q = barnesg(10j)
>>> q.real
0.000000000021852360840356557241543036724799812371995850552234
>>> q.imag
-0.00000000000070035335320062304849020654215545839053210041457588
>>> mp.dps = 15
>>> barnesg(100)
3.10361006263698e+6626
>>> barnesg(-101)
0.0
>>> barnesg(-10.5)
5.94463017605008e+25
>>> barnesg(-10000.5)
-6.14322868174828e+167480422
>>> barnesg(1000j)
(5.21133054865546e-1173597 + 4.27461836811016e-1173597j)
>>> barnesg(-1000+1000j)
(2.43114569750291e+1026623 + 2.24851410674842e+1026623j)
**References**
1. Whittaker & Watson, *A Course of Modern Analysis*,
Cambridge University Press, 4th edition (1927), p.264
2. http://en.wikipedia.org/wiki/Barnes_G-function
3. http://mathworld.wolfram.com/BarnesG-Function.html
"""
superfac = r"""
Computes the superfactorial, defined as the product of
consecutive factorials
.. math ::
\mathrm{sf}(n) = \prod_{k=1}^n k!
For general complex `z`, `\mathrm{sf}(z)` is defined
in terms of the Barnes G-function (see :func:`~mpmath.barnesg`).
**Examples**
The first few superfactorials are (OEIS A000178)::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(10):
... print("%s %s" % (n, superfac(n)))
...
0 1.0
1 1.0
2 2.0
3 12.0
4 288.0
5 34560.0
6 24883200.0
7 125411328000.0
8 5.05658474496e+15
9 1.83493347225108e+21
Superfactorials grow very rapidly::
>>> superfac(1000)
3.24570818422368e+1177245
>>> superfac(10**10)
2.61398543581249e+467427913956904067453
Evaluation is supported for arbitrary arguments::
>>> mp.dps = 25
>>> superfac(pi)
17.20051550121297985285333
>>> superfac(2+3j)
(-0.005915485633199789627466468 + 0.008156449464604044948738263j)
>>> diff(superfac, 1)
0.2645072034016070205673056
**References**
1. http://www.research.att.com/~njas/sequences/A000178
"""
hyperfac = r"""
Computes the hyperfactorial, defined for integers as the product
.. math ::
H(n) = \prod_{k=1}^n k^k.
The hyperfactorial satisfies the recurrence formula `H(z) = z^z H(z-1)`.
It can be defined more generally in terms of the Barnes G-function (see
:func:`~mpmath.barnesg`) and the gamma function by the formula
.. math ::
H(z) = \frac{\Gamma(z+1)^z}{G(z)}.
The extension to complex numbers can also be done via
the integral representation
.. math ::
H(z) = (2\pi)^{-z/2} \exp \left[
{z+1 \choose 2} + \int_0^z \log(t!)\,dt
\right].
**Examples**
The rapidly-growing sequence of hyperfactorials begins
(OEIS A002109)::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(10):
... print("%s %s" % (n, hyperfac(n)))
...
0 1.0
1 1.0
2 4.0
3 108.0
4 27648.0
5 86400000.0
6 4031078400000.0
7 3.3197663987712e+18
8 5.56964379417266e+25
9 2.15779412229419e+34
Some even larger hyperfactorials are::
>>> hyperfac(1000)
5.46458120882585e+1392926
>>> hyperfac(10**10)
4.60408207642219e+489142638002418704309
The hyperfactorial can be evaluated for arbitrary arguments::
>>> hyperfac(0.5)
0.880449235173423
>>> diff(hyperfac, 1)
0.581061466795327
>>> hyperfac(pi)
205.211134637462
>>> hyperfac(-10+1j)
(3.01144471378225e+46 - 2.45285242480185e+46j)
The recurrence property of the hyperfactorial holds
generally::
>>> z = 3-4*j
>>> hyperfac(z)
(-4.49795891462086e-7 - 6.33262283196162e-7j)
>>> z**z * hyperfac(z-1)
(-4.49795891462086e-7 - 6.33262283196162e-7j)
>>> z = mpf(-0.6)
>>> chop(z**z * hyperfac(z-1))
1.28170142849352
>>> hyperfac(z)
1.28170142849352
The hyperfactorial may also be computed using the integral
definition::
>>> z = 2.5
>>> hyperfac(z)
15.9842119922237
>>> (2*pi)**(-z/2)*exp(binomial(z+1,2) +
... quad(lambda t: loggamma(t+1), [0, z]))
15.9842119922237
:func:`~mpmath.hyperfac` supports arbitrary-precision evaluation::
>>> mp.dps = 50
>>> hyperfac(10)
215779412229418562091680268288000000000000000.0
>>> hyperfac(1/sqrt(2))
0.89404818005227001975423476035729076375705084390942
**References**
1. http://www.research.att.com/~njas/sequences/A002109
2. http://mathworld.wolfram.com/Hyperfactorial.html
"""
rgamma = r"""
Computes the reciprocal of the gamma function, `1/\Gamma(z)`. This
function evaluates to zero at the poles
of the gamma function, `z = 0, -1, -2, \ldots`.
**Examples**
Basic examples::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> rgamma(1)
1.0
>>> rgamma(4)
0.1666666666666666666666667
>>> rgamma(0); rgamma(-1)
0.0
0.0
>>> rgamma(1000)
2.485168143266784862783596e-2565
>>> rgamma(inf)
0.0
A definite integral that can be evaluated in terms of elementary
integrals::
>>> quad(rgamma, [0,inf])
2.807770242028519365221501
>>> e + quad(lambda t: exp(-t)/(pi**2+log(t)**2), [0,inf])
2.807770242028519365221501
"""
loggamma = r"""
Computes the principal branch of the log-gamma function,
`\ln \Gamma(z)`. Unlike `\ln(\Gamma(z))`, which has infinitely many
complex branch cuts, the principal log-gamma function only has a single
branch cut along the negative half-axis. The principal branch
continuously matches the asymptotic Stirling expansion
.. math ::
\ln \Gamma(z) \sim \frac{\ln(2 \pi)}{2} +
\left(z-\frac{1}{2}\right) \ln(z) - z + O(z^{-1}).
The real parts of both functions agree, but their imaginary
parts generally differ by `2 n \pi` for some `n \in \mathbb{Z}`.
They coincide for `z \in \mathbb{R}, z > 0`.
Computationally, it is advantageous to use :func:`~mpmath.loggamma`
instead of :func:`~mpmath.gamma` for extremely large arguments.
**Examples**
Comparing with `\ln(\Gamma(z))`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> loggamma('13.2'); log(gamma('13.2'))
20.49400419456603678498394
20.49400419456603678498394
>>> loggamma(3+4j)
(-1.756626784603784110530604 + 4.742664438034657928194889j)
>>> log(gamma(3+4j))
(-1.756626784603784110530604 - 1.540520869144928548730397j)
>>> log(gamma(3+4j)) + 2*pi*j
(-1.756626784603784110530604 + 4.742664438034657928194889j)
Note the imaginary parts for negative arguments::
>>> loggamma(-0.5); loggamma(-1.5); loggamma(-2.5)
(1.265512123484645396488946 - 3.141592653589793238462643j)
(0.8600470153764810145109327 - 6.283185307179586476925287j)
(-0.05624371649767405067259453 - 9.42477796076937971538793j)
Some special values::
>>> loggamma(1); loggamma(2)
0.0
0.0
>>> loggamma(3); +ln2
0.6931471805599453094172321
0.6931471805599453094172321
>>> loggamma(3.5); log(15*sqrt(pi)/8)
1.200973602347074224816022
1.200973602347074224816022
>>> loggamma(inf)
+inf
Huge arguments are permitted::
>>> loggamma('1e30')
6.807755278982137052053974e+31
>>> loggamma('1e300')
6.897755278982137052053974e+302
>>> loggamma('1e3000')
6.906755278982137052053974e+3003
>>> loggamma('1e100000000000000000000')
2.302585092994045684007991e+100000000000000000020
>>> loggamma('1e30j')
(-1.570796326794896619231322e+30 + 6.807755278982137052053974e+31j)
>>> loggamma('1e300j')
(-1.570796326794896619231322e+300 + 6.897755278982137052053974e+302j)
>>> loggamma('1e3000j')
(-1.570796326794896619231322e+3000 + 6.906755278982137052053974e+3003j)
The log-gamma function can be integrated analytically
on any interval of unit length::
>>> z = 0
>>> quad(loggamma, [z,z+1]); log(2*pi)/2
0.9189385332046727417803297
0.9189385332046727417803297
>>> z = 3+4j
>>> quad(loggamma, [z,z+1]); (log(z)-1)*z + log(2*pi)/2
(-0.9619286014994750641314421 + 5.219637303741238195688575j)
(-0.9619286014994750641314421 + 5.219637303741238195688575j)
The derivatives of the log-gamma function are given by the
polygamma function (:func:`~mpmath.psi`)::
>>> diff(loggamma, -4+3j); psi(0, -4+3j)
(1.688493531222971393607153 + 2.554898911356806978892748j)
(1.688493531222971393607153 + 2.554898911356806978892748j)
>>> diff(loggamma, -4+3j, 2); psi(1, -4+3j)
(-0.1539414829219882371561038 - 0.1020485197430267719746479j)
(-0.1539414829219882371561038 - 0.1020485197430267719746479j)
The log-gamma function satisfies an additive form of the
recurrence relation for the ordinary gamma function::
>>> z = 2+3j
>>> loggamma(z); loggamma(z+1) - log(z)
(-2.092851753092733349564189 + 2.302396543466867626153708j)
(-2.092851753092733349564189 + 2.302396543466867626153708j)
"""
siegeltheta = r"""
Computes the Riemann-Siegel theta function,
.. math ::
\theta(t) = \frac{
\log\Gamma\left(\frac{1+2it}{4}\right) -
\log\Gamma\left(\frac{1-2it}{4}\right)
}{2i} - \frac{\log \pi}{2} t.
The Riemann-Siegel theta function is important in
providing the phase factor for the Z-function
(see :func:`~mpmath.siegelz`). Evaluation is supported for real and
complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> siegeltheta(0)
0.0
>>> siegeltheta(inf)
+inf
>>> siegeltheta(-inf)
-inf
>>> siegeltheta(1)
-1.767547952812290388302216
>>> siegeltheta(10+0.25j)
(-3.068638039426838572528867 + 0.05804937947429712998395177j)
Arbitrary derivatives may be computed with derivative = k
>>> siegeltheta(1234, derivative=2)
0.0004051864079114053109473741
>>> diff(siegeltheta, 1234, n=2)
0.0004051864079114053109473741
The Riemann-Siegel theta function has odd symmetry around `t = 0`,
two local extreme points and three real roots including 0 (located
symmetrically)::
>>> nprint(chop(taylor(siegeltheta, 0, 5)))
[0.0, -2.68609, 0.0, 2.69433, 0.0, -6.40218]
>>> findroot(diffun(siegeltheta), 7)
6.28983598883690277966509
>>> findroot(siegeltheta, 20)
17.84559954041086081682634
For large `t`, there is a famous asymptotic formula
for `\theta(t)`, to first order given by::
>>> t = mpf(10**6)
>>> siegeltheta(t)
5488816.353078403444882823
>>> -t*log(2*pi/t)/2-t/2
5488816.745777464310273645
"""
grampoint = r"""
Gives the `n`-th Gram point `g_n`, defined as the solution
to the equation `\theta(g_n) = \pi n` where `\theta(t)`
is the Riemann-Siegel theta function (:func:`~mpmath.siegeltheta`).
The first few Gram points are::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> grampoint(0)
17.84559954041086081682634
>>> grampoint(1)
23.17028270124630927899664
>>> grampoint(2)
27.67018221781633796093849
>>> grampoint(3)
31.71797995476405317955149
Checking the definition::
>>> siegeltheta(grampoint(3))
9.42477796076937971538793
>>> 3*pi
9.42477796076937971538793
A large Gram point::
>>> grampoint(10**10)
3293531632.728335454561153
Gram points are useful when studying the Z-function
(:func:`~mpmath.siegelz`). See the documentation of that function
for additional examples.
:func:`~mpmath.grampoint` can solve the defining equation for
nonintegral `n`. There is a fixed point where `g(x) = x`::
>>> findroot(lambda x: grampoint(x) - x, 10000)
9146.698193171459265866198
**References**
1. http://mathworld.wolfram.com/GramPoint.html
"""
siegelz = r"""
Computes the Z-function, also known as the Riemann-Siegel Z function,
.. math ::
Z(t) = e^{i \theta(t)} \zeta(1/2+it)
where `\zeta(s)` is the Riemann zeta function (:func:`~mpmath.zeta`)
and where `\theta(t)` denotes the Riemann-Siegel theta function
(see :func:`~mpmath.siegeltheta`).
Evaluation is supported for real and complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> siegelz(1)
-0.7363054628673177346778998
>>> siegelz(3+4j)
(-0.1852895764366314976003936 - 0.2773099198055652246992479j)
The first four derivatives are supported, using the
optional *derivative* keyword argument::
>>> siegelz(1234567, derivative=3)
56.89689348495089294249178
>>> diff(siegelz, 1234567, n=3)
56.89689348495089294249178
The Z-function has a Maclaurin expansion::
>>> nprint(chop(taylor(siegelz, 0, 4)))
[-1.46035, 0.0, 2.73588, 0.0, -8.39357]
The Z-function `Z(t)` is equal to `\pm |\zeta(s)|` on the
critical line `s = 1/2+it` (i.e. for real arguments `t`
to `Z`). Its zeros coincide with those of the Riemann zeta
function::
>>> findroot(siegelz, 14)
14.13472514173469379045725
>>> findroot(siegelz, 20)
21.02203963877155499262848
>>> findroot(zeta, 0.5+14j)
(0.5 + 14.13472514173469379045725j)
>>> findroot(zeta, 0.5+20j)
(0.5 + 21.02203963877155499262848j)
Since the Z-function is real-valued on the critical line
(and unlike `|\zeta(s)|` analytic), it is useful for
investigating the zeros of the Riemann zeta function.
For example, one can use a root-finding algorithm based
on sign changes::
>>> findroot(siegelz, [100, 200], solver='bisect')
176.4414342977104188888926
To locate roots, Gram points `g_n` which can be computed
by :func:`~mpmath.grampoint` are useful. If `(-1)^n Z(g_n)` is
positive for two consecutive `n`, then `Z(t)` must have
a zero between those points::
>>> g10 = grampoint(10)
>>> g11 = grampoint(11)
>>> (-1)**10 * siegelz(g10) > 0
True
>>> (-1)**11 * siegelz(g11) > 0
True
>>> findroot(siegelz, [g10, g11], solver='bisect')
56.44624769706339480436776
>>> g10, g11
(54.67523744685325626632663, 57.54516517954725443703014)
"""
riemannr = r"""
Evaluates the Riemann R function, a smooth approximation of the
prime counting function `\pi(x)` (see :func:`~mpmath.primepi`). The Riemann
R function gives a fast numerical approximation useful e.g. to
roughly estimate the number of primes in a given interval.
The Riemann R function is computed using the rapidly convergent Gram
series,
.. math ::
R(x) = 1 + \sum_{k=1}^{\infty}
\frac{\log^k x}{k k! \zeta(k+1)}.
From the Gram series, one sees that the Riemann R function is a
well-defined analytic function (except for a branch cut along
the negative real half-axis); it can be evaluated for arbitrary
real or complex arguments.
The Riemann R function gives a very accurate approximation
of the prime counting function. For example, it is wrong by at
most 2 for `x < 1000`, and for `x = 10^9` differs from the exact
value of `\pi(x)` by 79, or less than two parts in a million.
It is about 10 times more accurate than the logarithmic integral
estimate (see :func:`~mpmath.li`), which however is even faster to evaluate.
It is orders of magnitude more accurate than the extremely
fast `x/\log x` estimate.
**Examples**
For small arguments, the Riemann R function almost exactly
gives the prime counting function if rounded to the nearest
integer::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> primepi(50), riemannr(50)
(15, 14.9757023241462)
>>> max(abs(primepi(n)-int(round(riemannr(n)))) for n in range(100))
1
>>> max(abs(primepi(n)-int(round(riemannr(n)))) for n in range(300))
2
The Riemann R function can be evaluated for arguments far too large
for exact determination of `\pi(x)` to be computationally
feasible with any presently known algorithm::
>>> riemannr(10**30)
1.46923988977204e+28
>>> riemannr(10**100)
4.3619719871407e+97
>>> riemannr(10**1000)
4.3448325764012e+996
A comparison of the Riemann R function and logarithmic integral estimates
for `\pi(x)` using exact values of `\pi(10^n)` up to `n = 9`.
The fractional error is shown in parentheses::
>>> exact = [4,25,168,1229,9592,78498,664579,5761455,50847534]
>>> for n, p in enumerate(exact):
... n += 1
... r, l = riemannr(10**n), li(10**n)
... rerr, lerr = nstr((r-p)/p,3), nstr((l-p)/p,3)
... print("%i %i %s(%s) %s(%s)" % (n, p, r, rerr, l, lerr))
...
1 4 4.56458314100509(0.141) 6.1655995047873(0.541)
2 25 25.6616332669242(0.0265) 30.1261415840796(0.205)
3 168 168.359446281167(0.00214) 177.609657990152(0.0572)
4 1229 1226.93121834343(-0.00168) 1246.13721589939(0.0139)
5 9592 9587.43173884197(-0.000476) 9629.8090010508(0.00394)
6 78498 78527.3994291277(0.000375) 78627.5491594622(0.00165)
7 664579 664667.447564748(0.000133) 664918.405048569(0.000511)
8 5761455 5761551.86732017(1.68e-5) 5762209.37544803(0.000131)
9 50847534 50847455.4277214(-1.55e-6) 50849234.9570018(3.35e-5)
The derivative of the Riemann R function gives the approximate
probability for a number of magnitude `x` to be prime::
>>> diff(riemannr, 1000)
0.141903028110784
>>> mpf(primepi(1050) - primepi(950)) / 100
0.15
Evaluation is supported for arbitrary arguments and at arbitrary
precision::
>>> mp.dps = 30
>>> riemannr(7.5)
3.72934743264966261918857135136
>>> riemannr(-4+2j)
(-0.551002208155486427591793957644 + 2.16966398138119450043195899746j)
"""
primepi = r"""
Evaluates the prime counting function, `\pi(x)`, which gives
the number of primes less than or equal to `x`. The argument
`x` may be fractional.
The prime counting function is very expensive to evaluate
precisely for large `x`, and the present implementation is
not optimized in any way. For numerical approximation of the
prime counting function, it is better to use :func:`~mpmath.primepi2`
or :func:`~mpmath.riemannr`.
Some values of the prime counting function::
>>> from mpmath import *
>>> [primepi(k) for k in range(20)]
[0, 0, 1, 2, 2, 3, 3, 4, 4, 4, 4, 5, 5, 6, 6, 6, 6, 7, 7, 8]
>>> primepi(3.5)
2
>>> primepi(100000)
9592
"""
primepi2 = r"""
Returns an interval (as an ``mpi`` instance) providing bounds
for the value of the prime counting function `\pi(x)`. For small
`x`, :func:`~mpmath.primepi2` returns an exact interval based on
the output of :func:`~mpmath.primepi`. For `x > 2656`, a loose interval
based on Schoenfeld's inequality
.. math ::
|\pi(x) - \mathrm{li}(x)| < \frac{\sqrt x \log x}{8 \pi}
is returned. This estimate is rigorous assuming the truth of
the Riemann hypothesis, and can be computed very quickly.
**Examples**
Exact values of the prime counting function for small `x`::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> iv.dps = 15; iv.pretty = True
>>> primepi2(10)
[4.0, 4.0]
>>> primepi2(100)
[25.0, 25.0]
>>> primepi2(1000)
[168.0, 168.0]
Loose intervals are generated for moderately large `x`:
>>> primepi2(10000), primepi(10000)
([1209.0, 1283.0], 1229)
>>> primepi2(50000), primepi(50000)
([5070.0, 5263.0], 5133)
As `x` increases, the absolute error gets worse while the relative
error improves. The exact value of `\pi(10^{23})` is
1925320391606803968923, and :func:`~mpmath.primepi2` gives 9 significant
digits::
>>> p = primepi2(10**23)
>>> p
[1.9253203909477020467e+21, 1.925320392280406229e+21]
>>> mpf(p.delta) / mpf(p.a)
6.9219865355293e-10
A more precise, nonrigorous estimate for `\pi(x)` can be
obtained using the Riemann R function (:func:`~mpmath.riemannr`).
For large enough `x`, the value returned by :func:`~mpmath.primepi2`
essentially amounts to a small perturbation of the value returned by
:func:`~mpmath.riemannr`::
>>> primepi2(10**100)
[4.3619719871407024816e+97, 4.3619719871407032404e+97]
>>> riemannr(10**100)
4.3619719871407e+97
"""
primezeta = r"""
Computes the prime zeta function, which is defined
in analogy with the Riemann zeta function (:func:`~mpmath.zeta`)
as
.. math ::
P(s) = \sum_p \frac{1}{p^s}
where the sum is taken over all prime numbers `p`. Although
this sum only converges for `\mathrm{Re}(s) > 1`, the
function is defined by analytic continuation in the
half-plane `\mathrm{Re}(s) > 0`.
**Examples**
Arbitrary-precision evaluation for real and complex arguments is
supported::
>>> from mpmath import *
>>> mp.dps = 30; mp.pretty = True
>>> primezeta(2)
0.452247420041065498506543364832
>>> primezeta(pi)
0.15483752698840284272036497397
>>> mp.dps = 50
>>> primezeta(3)
0.17476263929944353642311331466570670097541212192615
>>> mp.dps = 20
>>> primezeta(3+4j)
(-0.12085382601645763295 - 0.013370403397787023602j)
The prime zeta function has a logarithmic pole at `s = 1`,
with residue equal to the difference of the Mertens and
Euler constants::
>>> primezeta(1)
+inf
>>> extradps(25)(lambda x: primezeta(1+x)+log(x))(+eps)
-0.31571845205389007685
>>> mertens-euler
-0.31571845205389007685
The analytic continuation to `0 < \mathrm{Re}(s) \le 1`
is implemented. In this strip the function exhibits
very complex behavior; on the unit interval, it has poles at
`1/n` for every squarefree integer `n`::
>>> primezeta(0.5) # Pole at s = 1/2
(-inf + 3.1415926535897932385j)
>>> primezeta(0.25)
(-1.0416106801757269036 + 0.52359877559829887308j)
>>> primezeta(0.5+10j)
(0.54892423556409790529 + 0.45626803423487934264j)
Although evaluation works in principle for any `\mathrm{Re}(s) > 0`,
it should be noted that the evaluation time increases exponentially
as `s` approaches the imaginary axis.
For large `\mathrm{Re}(s)`, `P(s)` is asymptotic to `2^{-s}`::
>>> primezeta(inf)
0.0
>>> primezeta(10), mpf(2)**-10
(0.00099360357443698021786, 0.0009765625)
>>> primezeta(1000)
9.3326361850321887899e-302
>>> primezeta(1000+1000j)
(-3.8565440833654995949e-302 - 8.4985390447553234305e-302j)
**References**
Carl-Erik Froberg, "On the prime zeta function",
BIT 8 (1968), pp. 187-202.
"""
bernpoly = r"""
Evaluates the Bernoulli polynomial `B_n(z)`.
The first few Bernoulli polynomials are::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(6):
... nprint(chop(taylor(lambda x: bernpoly(n,x), 0, n)))
...
[1.0]
[-0.5, 1.0]
[0.166667, -1.0, 1.0]
[0.0, 0.5, -1.5, 1.0]
[-0.0333333, 0.0, 1.0, -2.0, 1.0]
[0.0, -0.166667, 0.0, 1.66667, -2.5, 1.0]
At `z = 0`, the Bernoulli polynomial evaluates to a
Bernoulli number (see :func:`~mpmath.bernoulli`)::
>>> bernpoly(12, 0), bernoulli(12)
(-0.253113553113553, -0.253113553113553)
>>> bernpoly(13, 0), bernoulli(13)
(0.0, 0.0)
Evaluation is accurate for large `n` and small `z`::
>>> mp.dps = 25
>>> bernpoly(100, 0.5)
2.838224957069370695926416e+78
>>> bernpoly(1000, 10.5)
5.318704469415522036482914e+1769
"""
polylog = r"""
Computes the polylogarithm, defined by the sum
.. math ::
\mathrm{Li}_s(z) = \sum_{k=1}^{\infty} \frac{z^k}{k^s}.
This series is convergent only for `|z| < 1`, so elsewhere
the analytic continuation is implied.
The polylogarithm should not be confused with the logarithmic
integral (also denoted by Li or li), which is implemented
as :func:`~mpmath.li`.
**Examples**
The polylogarithm satisfies a huge number of functional identities.
A sample of polylogarithm evaluations is shown below::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> polylog(1,0.5), log(2)
(0.693147180559945, 0.693147180559945)
>>> polylog(2,0.5), (pi**2-6*log(2)**2)/12
(0.582240526465012, 0.582240526465012)
>>> polylog(2,-phi), -log(phi)**2-pi**2/10
(-1.21852526068613, -1.21852526068613)
>>> polylog(3,0.5), 7*zeta(3)/8-pi**2*log(2)/12+log(2)**3/6
(0.53721319360804, 0.53721319360804)
:func:`~mpmath.polylog` can evaluate the analytic continuation of the
polylogarithm when `s` is an integer::
>>> polylog(2, 10)
(0.536301287357863 - 7.23378441241546j)
>>> polylog(2, -10)
-4.1982778868581
>>> polylog(2, 10j)
(-3.05968879432873 + 3.71678149306807j)
>>> polylog(-2, 10)
-0.150891632373114
>>> polylog(-2, -10)
0.067618332081142
>>> polylog(-2, 10j)
(0.0384353698579347 + 0.0912451798066779j)
Some more examples, with arguments on the unit circle (note that
the series definition cannot be used for computation here)::
>>> polylog(2,j)
(-0.205616758356028 + 0.915965594177219j)
>>> j*catalan-pi**2/48
(-0.205616758356028 + 0.915965594177219j)
>>> polylog(3,exp(2*pi*j/3))
(-0.534247512515375 + 0.765587078525922j)
>>> -4*zeta(3)/9 + 2*j*pi**3/81
(-0.534247512515375 + 0.765587078525921j)
Polylogarithms of different order are related by integration
and differentiation::
>>> s, z = 3, 0.5
>>> polylog(s+1, z)
0.517479061673899
>>> quad(lambda t: polylog(s,t)/t, [0, z])
0.517479061673899
>>> z*diff(lambda t: polylog(s+2,t), z)
0.517479061673899
Taylor series expansions around `z = 0` are::
>>> for n in range(-3, 4):
... nprint(taylor(lambda x: polylog(n,x), 0, 5))
...
[0.0, 1.0, 8.0, 27.0, 64.0, 125.0]
[0.0, 1.0, 4.0, 9.0, 16.0, 25.0]
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0]
[0.0, 1.0, 1.0, 1.0, 1.0, 1.0]
[0.0, 1.0, 0.5, 0.333333, 0.25, 0.2]
[0.0, 1.0, 0.25, 0.111111, 0.0625, 0.04]
[0.0, 1.0, 0.125, 0.037037, 0.015625, 0.008]
The series defining the polylogarithm is simultaneously
a Taylor series and an L-series. For certain values of `z`, the
polylogarithm reduces to a pure zeta function::
>>> polylog(pi, 1), zeta(pi)
(1.17624173838258, 1.17624173838258)
>>> polylog(pi, -1), -altzeta(pi)
(-0.909670702980385, -0.909670702980385)
Evaluation for arbitrary, nonintegral `s` is supported
for `z` within the unit circle:
>>> polylog(3+4j, 0.25)
(0.24258605789446 - 0.00222938275488344j)
>>> nsum(lambda k: 0.25**k / k**(3+4j), [1,inf])
(0.24258605789446 - 0.00222938275488344j)
It is also currently supported outside of the unit circle for `z`
not too large in magnitude::
>>> polylog(1+j, 20+40j)
(-7.1421172179728 - 3.92726697721369j)
>>> polylog(1+j, 200+400j)
Traceback (most recent call last):
...
NotImplementedError: polylog for arbitrary s and z
**References**
1. Richard Crandall, "Note on fast polylogarithm computation"
http://people.reed.edu/~crandall/papers/Polylog.pdf
2. http://en.wikipedia.org/wiki/Polylogarithm
3. http://mathworld.wolfram.com/Polylogarithm.html
"""
bell = r"""
For `n` a nonnegative integer, ``bell(n,x)`` evaluates the Bell
polynomial `B_n(x)`, the first few of which are
.. math ::
B_0(x) = 1
B_1(x) = x
B_2(x) = x^2+x
B_3(x) = x^3+3x^2+x
If `x = 1` or :func:`~mpmath.bell` is called with only one argument, it
gives the `n`-th Bell number `B_n`, which is the number of
partitions of a set with `n` elements. By setting the precision to
at least `\log_{10} B_n` digits, :func:`~mpmath.bell` provides fast
calculation of exact Bell numbers.
In general, :func:`~mpmath.bell` computes
.. math ::
B_n(x) = e^{-x} \left(\mathrm{sinc}(\pi n) + E_n(x)\right)
where `E_n(x)` is the generalized exponential function implemented
by :func:`~mpmath.polyexp`. This is an extension of Dobinski's formula [1],
where the modification is the sinc term ensuring that `B_n(x)` is
continuous in `n`; :func:`~mpmath.bell` can thus be evaluated,
differentiated, etc for arbitrary complex arguments.
**Examples**
Simple evaluations::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> bell(0, 2.5)
1.0
>>> bell(1, 2.5)
2.5
>>> bell(2, 2.5)
8.75
Evaluation for arbitrary complex arguments::
>>> bell(5.75+1j, 2-3j)
(-10767.71345136587098445143 - 15449.55065599872579097221j)
The first few Bell polynomials::
>>> for k in range(7):
... nprint(taylor(lambda x: bell(k,x), 0, k))
...
[1.0]
[0.0, 1.0]
[0.0, 1.0, 1.0]
[0.0, 1.0, 3.0, 1.0]
[0.0, 1.0, 7.0, 6.0, 1.0]
[0.0, 1.0, 15.0, 25.0, 10.0, 1.0]
[0.0, 1.0, 31.0, 90.0, 65.0, 15.0, 1.0]
The first few Bell numbers and complementary Bell numbers::
>>> [int(bell(k)) for k in range(10)]
[1, 1, 2, 5, 15, 52, 203, 877, 4140, 21147]
>>> [int(bell(k,-1)) for k in range(10)]
[1, -1, 0, 1, 1, -2, -9, -9, 50, 267]
Large Bell numbers::
>>> mp.dps = 50
>>> bell(50)
185724268771078270438257767181908917499221852770.0
>>> bell(50,-1)
-29113173035759403920216141265491160286912.0
Some even larger values::
>>> mp.dps = 25
>>> bell(1000,-1)
-1.237132026969293954162816e+1869
>>> bell(1000)
2.989901335682408421480422e+1927
>>> bell(1000,2)
6.591553486811969380442171e+1987
>>> bell(1000,100.5)
9.101014101401543575679639e+2529
A determinant identity satisfied by Bell numbers::
>>> mp.dps = 15
>>> N = 8
>>> det([[bell(k+j) for j in range(N)] for k in range(N)])
125411328000.0
>>> superfac(N-1)
125411328000.0
**References**
1. http://mathworld.wolfram.com/DobinskisFormula.html
"""
polyexp = r"""
Evaluates the polyexponential function, defined for arbitrary
complex `s`, `z` by the series
.. math ::
E_s(z) = \sum_{k=1}^{\infty} \frac{k^s}{k!} z^k.
`E_s(z)` is constructed from the exponential function analogously
to how the polylogarithm is constructed from the ordinary
logarithm; as a function of `s` (with `z` fixed), `E_s` is an L-series
It is an entire function of both `s` and `z`.
The polyexponential function provides a generalization of the
Bell polynomials `B_n(x)` (see :func:`~mpmath.bell`) to noninteger orders `n`.
In terms of the Bell polynomials,
.. math ::
E_s(z) = e^z B_s(z) - \mathrm{sinc}(\pi s).
Note that `B_n(x)` and `e^{-x} E_n(x)` are identical if `n`
is a nonzero integer, but not otherwise. In particular, they differ
at `n = 0`.
**Examples**
Evaluating a series::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> nsum(lambda k: sqrt(k)/fac(k), [1,inf])
2.101755547733791780315904
>>> polyexp(0.5,1)
2.101755547733791780315904
Evaluation for arbitrary arguments::
>>> polyexp(-3-4j, 2.5+2j)
(2.351660261190434618268706 + 1.202966666673054671364215j)
Evaluation is accurate for tiny function values::
>>> polyexp(4, -100)
3.499471750566824369520223e-36
If `n` is a nonpositive integer, `E_n` reduces to a special
instance of the hypergeometric function `\,_pF_q`::
>>> n = 3
>>> x = pi
>>> polyexp(-n,x)
4.042192318847986561771779
>>> x*hyper([1]*(n+1), [2]*(n+1), x)
4.042192318847986561771779
"""
cyclotomic = r"""
Evaluates the cyclotomic polynomial `\Phi_n(x)`, defined by
.. math ::
\Phi_n(x) = \prod_{\zeta} (x - \zeta)
where `\zeta` ranges over all primitive `n`-th roots of unity
(see :func:`~mpmath.unitroots`). An equivalent representation, used
for computation, is
.. math ::
\Phi_n(x) = \prod_{d\mid n}(x^d-1)^{\mu(n/d)} = \Phi_n(x)
where `\mu(m)` denotes the Moebius function. The cyclotomic
polynomials are integer polynomials, the first of which can be
written explicitly as
.. math ::
\Phi_0(x) = 1
\Phi_1(x) = x - 1
\Phi_2(x) = x + 1
\Phi_3(x) = x^3 + x^2 + 1
\Phi_4(x) = x^2 + 1
\Phi_5(x) = x^4 + x^3 + x^2 + x + 1
\Phi_6(x) = x^2 - x + 1
**Examples**
The coefficients of low-order cyclotomic polynomials can be recovered
using Taylor expansion::
>>> from mpmath import *
>>> mp.dps = 15; mp.pretty = True
>>> for n in range(9):
... p = chop(taylor(lambda x: cyclotomic(n,x), 0, 10))
... print("%s %s" % (n, nstr(p[:10+1-p[::-1].index(1)])))
...
0 [1.0]
1 [-1.0, 1.0]
2 [1.0, 1.0]
3 [1.0, 1.0, 1.0]
4 [1.0, 0.0, 1.0]
5 [1.0, 1.0, 1.0, 1.0, 1.0]
6 [1.0, -1.0, 1.0]
7 [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]
8 [1.0, 0.0, 0.0, 0.0, 1.0]
The definition as a product over primitive roots may be checked
by computing the product explicitly (for a real argument, this
method will generally introduce numerical noise in the imaginary
part)::
>>> mp.dps = 25
>>> z = 3+4j
>>> cyclotomic(10, z)
(-419.0 - 360.0j)
>>> fprod(z-r for r in unitroots(10, primitive=True))
(-419.0 - 360.0j)
>>> z = 3
>>> cyclotomic(10, z)
61.0
>>> fprod(z-r for r in unitroots(10, primitive=True))
(61.0 - 3.146045605088568607055454e-25j)
Up to permutation, the roots of a given cyclotomic polynomial
can be checked to agree with the list of primitive roots::
>>> p = taylor(lambda x: cyclotomic(6,x), 0, 6)[:3]
>>> for r in polyroots(p[::-1]):
... print(r)
...
(0.5 - 0.8660254037844386467637232j)
(0.5 + 0.8660254037844386467637232j)
>>>
>>> for r in unitroots(6, primitive=True):
... print(r)
...
(0.5 + 0.8660254037844386467637232j)
(0.5 - 0.8660254037844386467637232j)
"""
meijerg = r"""
Evaluates the Meijer G-function, defined as
.. math ::
G^{m,n}_{p,q} \left( \left. \begin{matrix}
a_1, \dots, a_n ; a_{n+1} \dots a_p \\
b_1, \dots, b_m ; b_{m+1} \dots b_q
\end{matrix}\; \right| \; z ; r \right) =
\frac{1}{2 \pi i} \int_L
\frac{\prod_{j=1}^m \Gamma(b_j+s) \prod_{j=1}^n\Gamma(1-a_j-s)}
{\prod_{j=n+1}^{p}\Gamma(a_j+s) \prod_{j=m+1}^q \Gamma(1-b_j-s)}
z^{-s/r} ds
for an appropriate choice of the contour `L` (see references).
There are `p` elements `a_j`.
The argument *a_s* should be a pair of lists, the first containing the
`n` elements `a_1, \ldots, a_n` and the second containing
the `p-n` elements `a_{n+1}, \ldots a_p`.
There are `q` elements `b_j`.
The argument *b_s* should be a pair of lists, the first containing the
`m` elements `b_1, \ldots, b_m` and the second containing
the `q-m` elements `b_{m+1}, \ldots b_q`.
The implicit tuple `(m, n, p, q)` constitutes the order or degree of the
Meijer G-function, and is determined by the lengths of the coefficient
vectors. Confusingly, the indices in this tuple appear in a different order
from the coefficients, but this notation is standard. The many examples
given below should hopefully clear up any potential confusion.
**Algorithm**
The Meijer G-function is evaluated as a combination of hypergeometric series.
There are two versions of the function, which can be selected with
the optional *series* argument.
*series=1* uses a sum of `m` `\,_pF_{q-1}` functions of `z`
*series=2* uses a sum of `n` `\,_qF_{p-1}` functions of `1/z`
The default series is chosen based on the degree and `|z|` in order
to be consistent with Mathematica's. This definition of the Meijer G-function
has a discontinuity at `|z| = 1` for some orders, which can
be avoided by explicitly specifying a series.
Keyword arguments are forwarded to :func:`~mpmath.hypercomb`.
**Examples**
Many standard functions are special cases of the Meijer G-function
(possibly rescaled and/or with branch cut corrections). We define
some test parameters::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> a = mpf(0.75)
>>> b = mpf(1.5)
>>> z = mpf(2.25)
The exponential function:
`e^z = G^{1,0}_{0,1} \left( \left. \begin{matrix} - \\ 0 \end{matrix} \;
\right| \; -z \right)`
>>> meijerg([[],[]], [[0],[]], -z)
9.487735836358525720550369
>>> exp(z)
9.487735836358525720550369
The natural logarithm:
`\log(1+z) = G^{1,2}_{2,2} \left( \left. \begin{matrix} 1, 1 \\ 1, 0
\end{matrix} \; \right| \; -z \right)`
>>> meijerg([[1,1],[]], [[1],[0]], z)
1.178654996341646117219023
>>> log(1+z)
1.178654996341646117219023
A rational function:
`\frac{z}{z+1} = G^{1,2}_{2,2} \left( \left. \begin{matrix} 1, 1 \\ 1, 1
\end{matrix} \; \right| \; z \right)`
>>> meijerg([[1,1],[]], [[1],[1]], z)
0.6923076923076923076923077
>>> z/(z+1)
0.6923076923076923076923077
The sine and cosine functions:
`\frac{1}{\sqrt \pi} \sin(2 \sqrt z) = G^{1,0}_{0,2} \left( \left. \begin{matrix}
- \\ \frac{1}{2}, 0 \end{matrix} \; \right| \; z \right)`
`\frac{1}{\sqrt \pi} \cos(2 \sqrt z) = G^{1,0}_{0,2} \left( \left. \begin{matrix}
- \\ 0, \frac{1}{2} \end{matrix} \; \right| \; z \right)`
>>> meijerg([[],[]], [[0.5],[0]], (z/2)**2)
0.4389807929218676682296453
>>> sin(z)/sqrt(pi)
0.4389807929218676682296453
>>> meijerg([[],[]], [[0],[0.5]], (z/2)**2)
-0.3544090145996275423331762
>>> cos(z)/sqrt(pi)
-0.3544090145996275423331762
Bessel functions:
`J_a(2 \sqrt z) = G^{1,0}_{0,2} \left( \left.
\begin{matrix} - \\ \frac{a}{2}, -\frac{a}{2}
\end{matrix} \; \right| \; z \right)`
`Y_a(2 \sqrt z) = G^{2,0}_{1,3} \left( \left.
\begin{matrix} \frac{-a-1}{2} \\ \frac{a}{2}, -\frac{a}{2}, \frac{-a-1}{2}
\end{matrix} \; \right| \; z \right)`
`(-z)^{a/2} z^{-a/2} I_a(2 \sqrt z) = G^{1,0}_{0,2} \left( \left.
\begin{matrix} - \\ \frac{a}{2}, -\frac{a}{2}
\end{matrix} \; \right| \; -z \right)`
`2 K_a(2 \sqrt z) = G^{2,0}_{0,2} \left( \left.
\begin{matrix} - \\ \frac{a}{2}, -\frac{a}{2}
\end{matrix} \; \right| \; z \right)`
As the example with the Bessel *I* function shows, a branch
factor is required for some arguments when inverting the square root.
>>> meijerg([[],[]], [[a/2],[-a/2]], (z/2)**2)
0.5059425789597154858527264
>>> besselj(a,z)
0.5059425789597154858527264
>>> meijerg([[],[(-a-1)/2]], [[a/2,-a/2],[(-a-1)/2]], (z/2)**2)
0.1853868950066556941442559
>>> bessely(a, z)
0.1853868950066556941442559
>>> meijerg([[],[]], [[a/2],[-a/2]], -(z/2)**2)
(0.8685913322427653875717476 + 2.096964974460199200551738j)
>>> (-z)**(a/2) / z**(a/2) * besseli(a, z)
(0.8685913322427653875717476 + 2.096964974460199200551738j)
>>> 0.5*meijerg([[],[]], [[a/2,-a/2],[]], (z/2)**2)
0.09334163695597828403796071
>>> besselk(a,z)
0.09334163695597828403796071
Error functions:
`\sqrt{\pi} z^{2(a-1)} \mathrm{erfc}(z) = G^{2,0}_{1,2} \left( \left.
\begin{matrix} a \\ a-1, a-\frac{1}{2}
\end{matrix} \; \right| \; z, \frac{1}{2} \right)`
>>> meijerg([[],[a]], [[a-1,a-0.5],[]], z, 0.5)
0.00172839843123091957468712
>>> sqrt(pi) * z**(2*a-2) * erfc(z)
0.00172839843123091957468712
A Meijer G-function of higher degree, (1,1,2,3):
>>> meijerg([[a],[b]], [[a],[b,a-1]], z)
1.55984467443050210115617
>>> sin((b-a)*pi)/pi*(exp(z)-1)*z**(a-1)
1.55984467443050210115617
A Meijer G-function of still higher degree, (4,1,2,4), that can
be expanded as a messy combination of exponential integrals:
>>> meijerg([[a],[2*b-a]], [[b,a,b-0.5,-1-a+2*b],[]], z)
0.3323667133658557271898061
>>> chop(4**(a-b+1)*sqrt(pi)*gamma(2*b-2*a)*z**a*\
... expint(2*b-2*a, -2*sqrt(-z))*expint(2*b-2*a, 2*sqrt(-z)))
0.3323667133658557271898061
In the following case, different series give different values::
>>> chop(meijerg([[1],[0.25]],[[3],[0.5]],-2))
-0.06417628097442437076207337
>>> meijerg([[1],[0.25]],[[3],[0.5]],-2,series=1)
0.1428699426155117511873047
>>> chop(meijerg([[1],[0.25]],[[3],[0.5]],-2,series=2))
-0.06417628097442437076207337
**References**
1. http://en.wikipedia.org/wiki/Meijer_G-function
2. http://mathworld.wolfram.com/MeijerG-Function.html
3. http://functions.wolfram.com/HypergeometricFunctions/MeijerG/
4. http://functions.wolfram.com/HypergeometricFunctions/MeijerG1/
"""
clsin = r"""
Computes the Clausen sine function, defined formally by the series
.. math ::
\mathrm{Cl}_s(z) = \sum_{k=1}^{\infty} \frac{\sin(kz)}{k^s}.
The special case `\mathrm{Cl}_2(z)` (i.e. ``clsin(2,z)``) is the classical
"Clausen function". More generally, the Clausen function is defined for
complex `s` and `z`, even when the series does not converge. The
Clausen function is related to the polylogarithm (:func:`~mpmath.polylog`) as
.. math ::
\mathrm{Cl}_s(z) = \frac{1}{2i}\left(\mathrm{Li}_s\left(e^{iz}\right) -
\mathrm{Li}_s\left(e^{-iz}\right)\right)
= \mathrm{Im}\left[\mathrm{Li}_s(e^{iz})\right] \quad (s, z \in \mathbb{R}),
and this representation can be taken to provide the analytic continuation of the
series. The complementary function :func:`~mpmath.clcos` gives the corresponding
cosine sum.
**Examples**
Evaluation for arbitrarily chosen `s` and `z`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> s, z = 3, 4
>>> clsin(s, z); nsum(lambda k: sin(z*k)/k**s, [1,inf])
-0.6533010136329338746275795
-0.6533010136329338746275795
Using `z + \pi` instead of `z` gives an alternating series::
>>> clsin(s, z+pi)
0.8860032351260589402871624
>>> nsum(lambda k: (-1)**k*sin(z*k)/k**s, [1,inf])
0.8860032351260589402871624
With `s = 1`, the sum can be expressed in closed form
using elementary functions::
>>> z = 1 + sqrt(3)
>>> clsin(1, z)
0.2047709230104579724675985
>>> chop((log(1-exp(-j*z)) - log(1-exp(j*z)))/(2*j))
0.2047709230104579724675985
>>> nsum(lambda k: sin(k*z)/k, [1,inf])
0.2047709230104579724675985
The classical Clausen function `\mathrm{Cl}_2(\theta)` gives the
value of the integral `\int_0^{\theta} -\ln(2\sin(x/2)) dx` for
`0 < \theta < 2 \pi`::
>>> cl2 = lambda t: clsin(2, t)
>>> cl2(3.5)
-0.2465045302347694216534255
>>> -quad(lambda x: ln(2*sin(0.5*x)), [0, 3.5])
-0.2465045302347694216534255
This function is symmetric about `\theta = \pi` with zeros and extreme
points::
>>> cl2(0); cl2(pi/3); chop(cl2(pi)); cl2(5*pi/3); chop(cl2(2*pi))
0.0
1.014941606409653625021203
0.0
-1.014941606409653625021203
0.0
Catalan's constant is a special value::
>>> cl2(pi/2)
0.9159655941772190150546035
>>> +catalan
0.9159655941772190150546035
The Clausen sine function can be expressed in closed form when
`s` is an odd integer (becoming zero when `s` < 0)::
>>> z = 1 + sqrt(2)
>>> clsin(1, z); (pi-z)/2
0.3636895456083490948304773
0.3636895456083490948304773
>>> clsin(3, z); pi**2/6*z - pi*z**2/4 + z**3/12
0.5661751584451144991707161
0.5661751584451144991707161
>>> clsin(-1, z)
0.0
>>> clsin(-3, z)
0.0
It can also be expressed in closed form for even integer `s \le 0`,
providing a finite sum for series such as
`\sin(z) + \sin(2z) + \sin(3z) + \ldots`::
>>> z = 1 + sqrt(2)
>>> clsin(0, z)
0.1903105029507513881275865
>>> cot(z/2)/2
0.1903105029507513881275865
>>> clsin(-2, z)
-0.1089406163841548817581392
>>> -cot(z/2)*csc(z/2)**2/4
-0.1089406163841548817581392
Call with ``pi=True`` to multiply `z` by `\pi` exactly::
>>> clsin(3, 3*pi)
-8.892316224968072424732898e-26
>>> clsin(3, 3, pi=True)
0.0
Evaluation for complex `s`, `z` in a nonconvergent case::
>>> s, z = -1-j, 1+2j
>>> clsin(s, z)
(-0.593079480117379002516034 + 0.9038644233367868273362446j)
>>> extraprec(20)(nsum)(lambda k: sin(k*z)/k**s, [1,inf])
(-0.593079480117379002516034 + 0.9038644233367868273362446j)
"""
clcos = r"""
Computes the Clausen cosine function, defined formally by the series
.. math ::
\mathrm{\widetilde{Cl}}_s(z) = \sum_{k=1}^{\infty} \frac{\cos(kz)}{k^s}.
This function is complementary to the Clausen sine function
:func:`~mpmath.clsin`. In terms of the polylogarithm,
.. math ::
\mathrm{\widetilde{Cl}}_s(z) =
\frac{1}{2}\left(\mathrm{Li}_s\left(e^{iz}\right) +
\mathrm{Li}_s\left(e^{-iz}\right)\right)
= \mathrm{Re}\left[\mathrm{Li}_s(e^{iz})\right] \quad (s, z \in \mathbb{R}).
**Examples**
Evaluation for arbitrarily chosen `s` and `z`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> s, z = 3, 4
>>> clcos(s, z); nsum(lambda k: cos(z*k)/k**s, [1,inf])
-0.6518926267198991308332759
-0.6518926267198991308332759
Using `z + \pi` instead of `z` gives an alternating series::
>>> s, z = 3, 0.5
>>> clcos(s, z+pi)
-0.8155530586502260817855618
>>> nsum(lambda k: (-1)**k*cos(z*k)/k**s, [1,inf])
-0.8155530586502260817855618
With `s = 1`, the sum can be expressed in closed form
using elementary functions::
>>> z = 1 + sqrt(3)
>>> clcos(1, z)
-0.6720334373369714849797918
>>> chop(-0.5*(log(1-exp(j*z))+log(1-exp(-j*z))))
-0.6720334373369714849797918
>>> -log(abs(2*sin(0.5*z))) # Equivalent to above when z is real
-0.6720334373369714849797918
>>> nsum(lambda k: cos(k*z)/k, [1,inf])
-0.6720334373369714849797918
It can also be expressed in closed form when `s` is an even integer.
For example,
>>> clcos(2,z)
-0.7805359025135583118863007
>>> pi**2/6 - pi*z/2 + z**2/4
-0.7805359025135583118863007
The case `s = 0` gives the renormalized sum of
`\cos(z) + \cos(2z) + \cos(3z) + \ldots` (which happens to be the same for
any value of `z`)::
>>> clcos(0, z)
-0.5
>>> nsum(lambda k: cos(k*z), [1,inf])
-0.5
Also the sums
.. math ::
\cos(z) + 2\cos(2z) + 3\cos(3z) + \ldots
and
.. math ::
\cos(z) + 2^n \cos(2z) + 3^n \cos(3z) + \ldots
for higher integer powers `n = -s` can be done in closed form. They are zero
when `n` is positive and even (`s` negative and even)::
>>> clcos(-1, z); 1/(2*cos(z)-2)
-0.2607829375240542480694126
-0.2607829375240542480694126
>>> clcos(-3, z); (2+cos(z))*csc(z/2)**4/8
0.1472635054979944390848006
0.1472635054979944390848006
>>> clcos(-2, z); clcos(-4, z); clcos(-6, z)
0.0
0.0
0.0
With `z = \pi`, the series reduces to that of the Riemann zeta function
(more generally, if `z = p \pi/q`, it is a finite sum over Hurwitz zeta
function values)::
>>> clcos(2.5, 0); zeta(2.5)
1.34148725725091717975677
1.34148725725091717975677
>>> clcos(2.5, pi); -altzeta(2.5)
-0.8671998890121841381913472
-0.8671998890121841381913472
Call with ``pi=True`` to multiply `z` by `\pi` exactly::
>>> clcos(-3, 2*pi)
2.997921055881167659267063e+102
>>> clcos(-3, 2, pi=True)
0.008333333333333333333333333
Evaluation for complex `s`, `z` in a nonconvergent case::
>>> s, z = -1-j, 1+2j
>>> clcos(s, z)
(0.9407430121562251476136807 + 0.715826296033590204557054j)
>>> extraprec(20)(nsum)(lambda k: cos(k*z)/k**s, [1,inf])
(0.9407430121562251476136807 + 0.715826296033590204557054j)
"""
whitm = r"""
Evaluates the Whittaker function `M(k,m,z)`, which gives a solution
to the Whittaker differential equation
.. math ::
\frac{d^2f}{dz^2} + \left(-\frac{1}{4}+\frac{k}{z}+
\frac{(\frac{1}{4}-m^2)}{z^2}\right) f = 0.
A second solution is given by :func:`~mpmath.whitw`.
The Whittaker functions are defined in Abramowitz & Stegun, section 13.1.
They are alternate forms of the confluent hypergeometric functions
`\,_1F_1` and `U`:
.. math ::
M(k,m,z) = e^{-\frac{1}{2}z} z^{\frac{1}{2}+m}
\,_1F_1(\tfrac{1}{2}+m-k, 1+2m, z)
W(k,m,z) = e^{-\frac{1}{2}z} z^{\frac{1}{2}+m}
U(\tfrac{1}{2}+m-k, 1+2m, z).
**Examples**
Evaluation for arbitrary real and complex arguments is supported::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> whitm(1, 1, 1)
0.7302596799460411820509668
>>> whitm(1, 1, -1)
(0.0 - 1.417977827655098025684246j)
>>> whitm(j, j/2, 2+3j)
(3.245477713363581112736478 - 0.822879187542699127327782j)
>>> whitm(2, 3, 100000)
4.303985255686378497193063e+21707
Evaluation at zero::
>>> whitm(1,-1,0); whitm(1,-0.5,0); whitm(1,0,0)
+inf
nan
0.0
We can verify that :func:`~mpmath.whitm` numerically satisfies the
differential equation for arbitrarily chosen values::
>>> k = mpf(0.25)
>>> m = mpf(1.5)
>>> f = lambda z: whitm(k,m,z)
>>> for z in [-1, 2.5, 3, 1+2j]:
... chop(diff(f,z,2) + (-0.25 + k/z + (0.25-m**2)/z**2)*f(z))
...
0.0
0.0
0.0
0.0
An integral involving both :func:`~mpmath.whitm` and :func:`~mpmath.whitw`,
verifying evaluation along the real axis::
>>> quad(lambda x: exp(-x)*whitm(3,2,x)*whitw(1,-2,x), [0,inf])
3.438869842576800225207341
>>> 128/(21*sqrt(pi))
3.438869842576800225207341
"""
whitw = r"""
Evaluates the Whittaker function `W(k,m,z)`, which gives a second
solution to the Whittaker differential equation. (See :func:`~mpmath.whitm`.)
**Examples**
Evaluation for arbitrary real and complex arguments is supported::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> whitw(1, 1, 1)
1.19532063107581155661012
>>> whitw(1, 1, -1)
(-0.9424875979222187313924639 - 0.2607738054097702293308689j)
>>> whitw(j, j/2, 2+3j)
(0.1782899315111033879430369 - 0.01609578360403649340169406j)
>>> whitw(2, 3, 100000)
1.887705114889527446891274e-21705
>>> whitw(-1, -1, 100)
1.905250692824046162462058e-24
Evaluation at zero::
>>> for m in [-1, -0.5, 0, 0.5, 1]:
... whitw(1, m, 0)
...
+inf
nan
0.0
nan
+inf
We can verify that :func:`~mpmath.whitw` numerically satisfies the
differential equation for arbitrarily chosen values::
>>> k = mpf(0.25)
>>> m = mpf(1.5)
>>> f = lambda z: whitw(k,m,z)
>>> for z in [-1, 2.5, 3, 1+2j]:
... chop(diff(f,z,2) + (-0.25 + k/z + (0.25-m**2)/z**2)*f(z))
...
0.0
0.0
0.0
0.0
"""
ber = r"""
Computes the Kelvin function ber, which for real arguments gives the real part
of the Bessel J function of a rotated argument
.. math ::
J_n\left(x e^{3\pi i/4}\right) = \mathrm{ber}_n(x) + i \mathrm{bei}_n(x).
The imaginary part is given by :func:`~mpmath.bei`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/ber.py
.. image :: /modules/mpmath/plots/ber.png
**Examples**
Verifying the defining relation::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> n, x = 2, 3.5
>>> ber(n,x)
1.442338852571888752631129
>>> bei(n,x)
-0.948359035324558320217678
>>> besselj(n, x*root(1,8,3))
(1.442338852571888752631129 - 0.948359035324558320217678j)
The ber and bei functions are also defined by analytic continuation
for complex arguments::
>>> ber(1+j, 2+3j)
(4.675445984756614424069563 - 15.84901771719130765656316j)
>>> bei(1+j, 2+3j)
(15.83886679193707699364398 + 4.684053288183046528703611j)
"""
bei = r"""
Computes the Kelvin function bei, which for real arguments gives the
imaginary part of the Bessel J function of a rotated argument.
See :func:`~mpmath.ber`.
"""
ker = r"""
Computes the Kelvin function ker, which for real arguments gives the real part
of the (rescaled) Bessel K function of a rotated argument
.. math ::
e^{-\pi i/2} K_n\left(x e^{3\pi i/4}\right) = \mathrm{ker}_n(x) + i \mathrm{kei}_n(x).
The imaginary part is given by :func:`~mpmath.kei`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/ker.py
.. image :: /modules/mpmath/plots/ker.png
**Examples**
Verifying the defining relation::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> n, x = 2, 4.5
>>> ker(n,x)
0.02542895201906369640249801
>>> kei(n,x)
-0.02074960467222823237055351
>>> exp(-n*pi*j/2) * besselk(n, x*root(1,8,1))
(0.02542895201906369640249801 - 0.02074960467222823237055351j)
The ker and kei functions are also defined by analytic continuation
for complex arguments::
>>> ker(1+j, 3+4j)
(1.586084268115490421090533 - 2.939717517906339193598719j)
>>> kei(1+j, 3+4j)
(-2.940403256319453402690132 - 1.585621643835618941044855j)
"""
kei = r"""
Computes the Kelvin function kei, which for real arguments gives the
imaginary part of the (rescaled) Bessel K function of a rotated argument.
See :func:`~mpmath.ker`.
"""
struveh = r"""
Gives the Struve function
.. math ::
\,\mathbf{H}_n(z) =
\sum_{k=0}^\infty \frac{(-1)^k}{\Gamma(k+\frac{3}{2})
\Gamma(k+n+\frac{3}{2})} {\left({\frac{z}{2}}\right)}^{2k+n+1}
which is a solution to the Struve differential equation
.. math ::
z^2 f''(z) + z f'(z) + (z^2-n^2) f(z) = \frac{2 z^{n+1}}{\pi (2n-1)!!}.
**Examples**
Evaluation for arbitrary real and complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> struveh(0, 3.5)
0.3608207733778295024977797
>>> struveh(-1, 10)
-0.255212719726956768034732
>>> struveh(1, -100.5)
0.5819566816797362287502246
>>> struveh(2.5, 10000000000000)
3153915652525200060.308937
>>> struveh(2.5, -10000000000000)
(0.0 - 3153915652525200060.308937j)
>>> struveh(1+j, 1000000+4000000j)
(-3.066421087689197632388731e+1737173 - 1.596619701076529803290973e+1737173j)
A Struve function of half-integer order is elementary; for example:
>>> z = 3
>>> struveh(0.5, 3)
0.9167076867564138178671595
>>> sqrt(2/(pi*z))*(1-cos(z))
0.9167076867564138178671595
Numerically verifying the differential equation::
>>> z = mpf(4.5)
>>> n = 3
>>> f = lambda z: struveh(n,z)
>>> lhs = z**2*diff(f,z,2) + z*diff(f,z) + (z**2-n**2)*f(z)
>>> rhs = 2*z**(n+1)/fac2(2*n-1)/pi
>>> lhs
17.40359302709875496632744
>>> rhs
17.40359302709875496632744
"""
struvel = r"""
Gives the modified Struve function
.. math ::
\,\mathbf{L}_n(z) = -i e^{-n\pi i/2} \mathbf{H}_n(i z)
which solves to the modified Struve differential equation
.. math ::
z^2 f''(z) + z f'(z) - (z^2+n^2) f(z) = \frac{2 z^{n+1}}{\pi (2n-1)!!}.
**Examples**
Evaluation for arbitrary real and complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> struvel(0, 3.5)
7.180846515103737996249972
>>> struvel(-1, 10)
2670.994904980850550721511
>>> struvel(1, -100.5)
1.757089288053346261497686e+42
>>> struvel(2.5, 10000000000000)
4.160893281017115450519948e+4342944819025
>>> struvel(2.5, -10000000000000)
(0.0 - 4.160893281017115450519948e+4342944819025j)
>>> struvel(1+j, 700j)
(-0.1721150049480079451246076 + 0.1240770953126831093464055j)
>>> struvel(1+j, 1000000+4000000j)
(-2.973341637511505389128708e+434290 - 5.164633059729968297147448e+434290j)
Numerically verifying the differential equation::
>>> z = mpf(3.5)
>>> n = 3
>>> f = lambda z: struvel(n,z)
>>> lhs = z**2*diff(f,z,2) + z*diff(f,z) - (z**2+n**2)*f(z)
>>> rhs = 2*z**(n+1)/fac2(2*n-1)/pi
>>> lhs
6.368850306060678353018165
>>> rhs
6.368850306060678353018165
"""
appellf1 = r"""
Gives the Appell F1 hypergeometric function of two variables,
.. math ::
F_1(a,b_1,b_2,c,x,y) = \sum_{m=0}^{\infty} \sum_{n=0}^{\infty}
\frac{(a)_{m+n} (b_1)_m (b_2)_n}{(c)_{m+n}}
\frac{x^m y^n}{m! n!}.
This series is only generally convergent when `|x| < 1` and `|y| < 1`,
although :func:`~mpmath.appellf1` can evaluate an analytic continuation
with respecto to either variable, and sometimes both.
**Examples**
Evaluation is supported for real and complex parameters::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> appellf1(1,0,0.5,1,0.5,0.25)
1.154700538379251529018298
>>> appellf1(1,1+j,0.5,1,0.5,0.5j)
(1.138403860350148085179415 + 1.510544741058517621110615j)
For some integer parameters, the F1 series reduces to a polynomial::
>>> appellf1(2,-4,-3,1,2,5)
-816.0
>>> appellf1(-5,1,2,1,4,5)
-20528.0
The analytic continuation with respect to either `x` or `y`,
and sometimes with respect to both, can be evaluated::
>>> appellf1(2,3,4,5,100,0.5)
(0.0006231042714165329279738662 + 0.0000005769149277148425774499857j)
>>> appellf1('1.1', '0.3', '0.2+2j', '0.4', '0.2', 1.5+3j)
(-0.1782604566893954897128702 + 0.002472407104546216117161499j)
>>> appellf1(1,2,3,4,10,12)
-0.07122993830066776374929313
For certain arguments, F1 reduces to an ordinary hypergeometric function::
>>> appellf1(1,2,3,5,0.5,0.25)
1.547902270302684019335555
>>> 4*hyp2f1(1,2,5,'1/3')/3
1.547902270302684019335555
>>> appellf1(1,2,3,4,0,1.5)
(-1.717202506168937502740238 - 2.792526803190927323077905j)
>>> hyp2f1(1,3,4,1.5)
(-1.717202506168937502740238 - 2.792526803190927323077905j)
The F1 function satisfies a system of partial differential equations::
>>> a,b1,b2,c,x,y = map(mpf, [1,0.5,0.25,1.125,0.25,-0.25])
>>> F = lambda x,y: appellf1(a,b1,b2,c,x,y)
>>> chop(x*(1-x)*diff(F,(x,y),(2,0)) +
... y*(1-x)*diff(F,(x,y),(1,1)) +
... (c-(a+b1+1)*x)*diff(F,(x,y),(1,0)) -
... b1*y*diff(F,(x,y),(0,1)) -
... a*b1*F(x,y))
0.0
>>>
>>> chop(y*(1-y)*diff(F,(x,y),(0,2)) +
... x*(1-y)*diff(F,(x,y),(1,1)) +
... (c-(a+b2+1)*y)*diff(F,(x,y),(0,1)) -
... b2*x*diff(F,(x,y),(1,0)) -
... a*b2*F(x,y))
0.0
The Appell F1 function allows for closed-form evaluation of various
integrals, such as any integral of the form
`\int x^r (x+a)^p (x+b)^q dx`::
>>> def integral(a,b,p,q,r,x1,x2):
... a,b,p,q,r,x1,x2 = map(mpmathify, [a,b,p,q,r,x1,x2])
... f = lambda x: x**r * (x+a)**p * (x+b)**q
... def F(x):
... v = x**(r+1)/(r+1) * (a+x)**p * (b+x)**q
... v *= (1+x/a)**(-p)
... v *= (1+x/b)**(-q)
... v *= appellf1(r+1,-p,-q,2+r,-x/a,-x/b)
... return v
... print("Num. quad: %s" % quad(f, [x1,x2]))
... print("Appell F1: %s" % (F(x2)-F(x1)))
...
>>> integral('1/5','4/3','-2','3','1/2',0,1)
Num. quad: 9.073335358785776206576981
Appell F1: 9.073335358785776206576981
>>> integral('3/2','4/3','-2','3','1/2',0,1)
Num. quad: 1.092829171999626454344678
Appell F1: 1.092829171999626454344678
>>> integral('3/2','4/3','-2','3','1/2',12,25)
Num. quad: 1106.323225040235116498927
Appell F1: 1106.323225040235116498927
Also incomplete elliptic integrals fall into this category [1]::
>>> def E(z, m):
... if (pi/2).ae(z):
... return ellipe(m)
... return 2*round(re(z)/pi)*ellipe(m) + mpf(-1)**round(re(z)/pi)*\
... sin(z)*appellf1(0.5,0.5,-0.5,1.5,sin(z)**2,m*sin(z)**2)
...
>>> z, m = 1, 0.5
>>> E(z,m); quad(lambda t: sqrt(1-m*sin(t)**2), [0,pi/4,3*pi/4,z])
0.9273298836244400669659042
0.9273298836244400669659042
>>> z, m = 3, 2
>>> E(z,m); quad(lambda t: sqrt(1-m*sin(t)**2), [0,pi/4,3*pi/4,z])
(1.057495752337234229715836 + 1.198140234735592207439922j)
(1.057495752337234229715836 + 1.198140234735592207439922j)
**References**
1. [WolframFunctions]_ http://functions.wolfram.com/EllipticIntegrals/EllipticE2/26/01/
2. [SrivastavaKarlsson]_
3. [CabralRosetti]_
4. [Vidunas]_
5. [Slater]_
"""
angerj = r"""
Gives the Anger function
.. math ::
\mathbf{J}_{\nu}(z) = \frac{1}{\pi}
\int_0^{\pi} \cos(\nu t - z \sin t) dt
which is an entire function of both the parameter `\nu` and
the argument `z`. It solves the inhomogeneous Bessel differential
equation
.. math ::
f''(z) + \frac{1}{z}f'(z) + \left(1-\frac{\nu^2}{z^2}\right) f(z)
= \frac{(z-\nu)}{\pi z^2} \sin(\pi \nu).
**Examples**
Evaluation for real and complex parameter and argument::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> angerj(2,3)
0.4860912605858910769078311
>>> angerj(-3+4j, 2+5j)
(-5033.358320403384472395612 + 585.8011892476145118551756j)
>>> angerj(3.25, 1e6j)
(4.630743639715893346570743e+434290 - 1.117960409887505906848456e+434291j)
>>> angerj(-1.5, 1e6)
0.0002795719747073879393087011
The Anger function coincides with the Bessel J-function when `\nu`
is an integer::
>>> angerj(1,3); besselj(1,3)
0.3390589585259364589255146
0.3390589585259364589255146
>>> angerj(1.5,3); besselj(1.5,3)
0.4088969848691080859328847
0.4777182150870917715515015
Verifying the differential equation::
>>> v,z = mpf(2.25), 0.75
>>> f = lambda z: angerj(v,z)
>>> diff(f,z,2) + diff(f,z)/z + (1-(v/z)**2)*f(z)
-0.6002108774380707130367995
>>> (z-v)/(pi*z**2) * sinpi(v)
-0.6002108774380707130367995
Verifying the integral representation::
>>> angerj(v,z)
0.1145380759919333180900501
>>> quad(lambda t: cos(v*t-z*sin(t))/pi, [0,pi])
0.1145380759919333180900501
**References**
1. [DLMF]_ section 11.10: Anger-Weber Functions
"""
webere = r"""
Gives the Weber function
.. math ::
\mathbf{E}_{\nu}(z) = \frac{1}{\pi}
\int_0^{\pi} \sin(\nu t - z \sin t) dt
which is an entire function of both the parameter `\nu` and
the argument `z`. It solves the inhomogeneous Bessel differential
equation
.. math ::
f''(z) + \frac{1}{z}f'(z) + \left(1-\frac{\nu^2}{z^2}\right) f(z)
= -\frac{1}{\pi z^2} (z+\nu+(z-\nu)\cos(\pi \nu)).
**Examples**
Evaluation for real and complex parameter and argument::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> webere(2,3)
-0.1057668973099018425662646
>>> webere(-3+4j, 2+5j)
(-585.8081418209852019290498 - 5033.314488899926921597203j)
>>> webere(3.25, 1e6j)
(-1.117960409887505906848456e+434291 - 4.630743639715893346570743e+434290j)
>>> webere(3.25, 1e6)
-0.00002812518265894315604914453
Up to addition of a rational function of `z`, the Weber function coincides
with the Struve H-function when `\nu` is an integer::
>>> webere(1,3); 2/pi-struveh(1,3)
-0.3834897968188690177372881
-0.3834897968188690177372881
>>> webere(5,3); 26/(35*pi)-struveh(5,3)
0.2009680659308154011878075
0.2009680659308154011878075
Verifying the differential equation::
>>> v,z = mpf(2.25), 0.75
>>> f = lambda z: webere(v,z)
>>> diff(f,z,2) + diff(f,z)/z + (1-(v/z)**2)*f(z)
-1.097441848875479535164627
>>> -(z+v+(z-v)*cospi(v))/(pi*z**2)
-1.097441848875479535164627
Verifying the integral representation::
>>> webere(v,z)
0.1486507351534283744485421
>>> quad(lambda t: sin(v*t-z*sin(t))/pi, [0,pi])
0.1486507351534283744485421
**References**
1. [DLMF]_ section 11.10: Anger-Weber Functions
"""
lommels1 = r"""
Gives the Lommel function `s_{\mu,\nu}` or `s^{(1)}_{\mu,\nu}`
.. math ::
s_{\mu,\nu}(z) = \frac{z^{\mu+1}}{(\mu-\nu+1)(\mu+\nu+1)}
\,_1F_2\left(1; \frac{\mu-\nu+3}{2}, \frac{\mu+\nu+3}{2};
-\frac{z^2}{4} \right)
which solves the inhomogeneous Bessel equation
.. math ::
z^2 f''(z) + z f'(z) + (z^2-\nu^2) f(z) = z^{\mu+1}.
A second solution is given by :func:`~mpmath.lommels2`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/lommels1.py
.. image :: /modules/mpmath/plots/lommels1.png
**Examples**
An integral representation::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> u,v,z = 0.25, 0.125, mpf(0.75)
>>> lommels1(u,v,z)
0.4276243877565150372999126
>>> (bessely(v,z)*quad(lambda t: t**u*besselj(v,t), [0,z]) - \
... besselj(v,z)*quad(lambda t: t**u*bessely(v,t), [0,z]))*(pi/2)
0.4276243877565150372999126
A special value::
>>> lommels1(v,v,z)
0.5461221367746048054932553
>>> gamma(v+0.5)*sqrt(pi)*power(2,v-1)*struveh(v,z)
0.5461221367746048054932553
Verifying the differential equation::
>>> f = lambda z: lommels1(u,v,z)
>>> z**2*diff(f,z,2) + z*diff(f,z) + (z**2-v**2)*f(z)
0.6979536443265746992059141
>>> z**(u+1)
0.6979536443265746992059141
**References**
1. [GradshteynRyzhik]_
2. [Weisstein]_ http://mathworld.wolfram.com/LommelFunction.html
"""
lommels2 = r"""
Gives the second Lommel function `S_{\mu,\nu}` or `s^{(2)}_{\mu,\nu}`
.. math ::
S_{\mu,\nu}(z) = s_{\mu,\nu}(z) + 2^{\mu-1}
\Gamma\left(\tfrac{1}{2}(\mu-\nu+1)\right)
\Gamma\left(\tfrac{1}{2}(\mu+\nu+1)\right) \times
\left[\sin(\tfrac{1}{2}(\mu-\nu)\pi) J_{\nu}(z) -
\cos(\tfrac{1}{2}(\mu-\nu)\pi) Y_{\nu}(z)
\right]
which solves the same differential equation as
:func:`~mpmath.lommels1`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/lommels2.py
.. image :: /modules/mpmath/plots/lommels2.png
**Examples**
For large `|z|`, `S_{\mu,\nu} \sim z^{\mu-1}`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> lommels2(10,2,30000)
1.968299831601008419949804e+40
>>> power(30000,9)
1.9683e+40
A special value::
>>> u,v,z = 0.5, 0.125, mpf(0.75)
>>> lommels2(v,v,z)
0.9589683199624672099969765
>>> (struveh(v,z)-bessely(v,z))*power(2,v-1)*sqrt(pi)*gamma(v+0.5)
0.9589683199624672099969765
Verifying the differential equation::
>>> f = lambda z: lommels2(u,v,z)
>>> z**2*diff(f,z,2) + z*diff(f,z) + (z**2-v**2)*f(z)
0.6495190528383289850727924
>>> z**(u+1)
0.6495190528383289850727924
**References**
1. [GradshteynRyzhik]_
2. [Weisstein]_ http://mathworld.wolfram.com/LommelFunction.html
"""
appellf2 = r"""
Gives the Appell F2 hypergeometric function of two variables
.. math ::
F_2(a,b_1,b_2,c_1,c_2,x,y) = \sum_{m=0}^{\infty} \sum_{n=0}^{\infty}
\frac{(a)_{m+n} (b_1)_m (b_2)_n}{(c_1)_m (c_2)_n}
\frac{x^m y^n}{m! n!}.
The series is generally absolutely convergent for `|x| + |y| < 1`.
**Examples**
Evaluation for real and complex arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> appellf2(1,2,3,4,5,0.25,0.125)
1.257417193533135344785602
>>> appellf2(1,-3,-4,2,3,2,3)
-42.8
>>> appellf2(0.5,0.25,-0.25,2,3,0.25j,0.25)
(0.9880539519421899867041719 + 0.01497616165031102661476978j)
>>> chop(appellf2(1,1+j,1-j,3j,-3j,0.25,0.25))
1.201311219287411337955192
>>> appellf2(1,1,1,4,6,0.125,16)
(-0.09455532250274744282125152 - 0.7647282253046207836769297j)
A transformation formula::
>>> a,b1,b2,c1,c2,x,y = map(mpf, [1,2,0.5,0.25,1.625,-0.125,0.125])
>>> appellf2(a,b1,b2,c1,c2,x,y)
0.2299211717841180783309688
>>> (1-x)**(-a)*appellf2(a,c1-b1,b2,c1,c2,x/(x-1),y/(1-x))
0.2299211717841180783309688
A system of partial differential equations satisfied by F2::
>>> a,b1,b2,c1,c2,x,y = map(mpf, [1,0.5,0.25,1.125,1.5,0.0625,-0.0625])
>>> F = lambda x,y: appellf2(a,b1,b2,c1,c2,x,y)
>>> chop(x*(1-x)*diff(F,(x,y),(2,0)) -
... x*y*diff(F,(x,y),(1,1)) +
... (c1-(a+b1+1)*x)*diff(F,(x,y),(1,0)) -
... b1*y*diff(F,(x,y),(0,1)) -
... a*b1*F(x,y))
0.0
>>> chop(y*(1-y)*diff(F,(x,y),(0,2)) -
... x*y*diff(F,(x,y),(1,1)) +
... (c2-(a+b2+1)*y)*diff(F,(x,y),(0,1)) -
... b2*x*diff(F,(x,y),(1,0)) -
... a*b2*F(x,y))
0.0
**References**
See references for :func:`~mpmath.appellf1`.
"""
appellf3 = r"""
Gives the Appell F3 hypergeometric function of two variables
.. math ::
F_3(a_1,a_2,b_1,b_2,c,x,y) = \sum_{m=0}^{\infty} \sum_{n=0}^{\infty}
\frac{(a_1)_m (a_2)_n (b_1)_m (b_2)_n}{(c)_{m+n}}
\frac{x^m y^n}{m! n!}.
The series is generally absolutely convergent for `|x| < 1, |y| < 1`.
**Examples**
Evaluation for various parameters and variables::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> appellf3(1,2,3,4,5,0.5,0.25)
2.221557778107438938158705
>>> appellf3(1,2,3,4,5,6,0); hyp2f1(1,3,5,6)
(-0.5189554589089861284537389 - 0.1454441043328607980769742j)
(-0.5189554589089861284537389 - 0.1454441043328607980769742j)
>>> appellf3(1,-2,-3,1,1,4,6)
-17.4
>>> appellf3(1,2,-3,1,1,4,6)
(17.7876136773677356641825 + 19.54768762233649126154534j)
>>> appellf3(1,2,-3,1,1,6,4)
(85.02054175067929402953645 + 148.4402528821177305173599j)
>>> chop(appellf3(1+j,2,1-j,2,3,0.25,0.25))
1.719992169545200286696007
Many transformations and evaluations for special combinations
of the parameters are possible, e.g.:
>>> a,b,c,x,y = map(mpf, [0.5,0.25,0.125,0.125,-0.125])
>>> appellf3(a,c-a,b,c-b,c,x,y)
1.093432340896087107444363
>>> (1-y)**(a+b-c)*hyp2f1(a,b,c,x+y-x*y)
1.093432340896087107444363
>>> x**2*appellf3(1,1,1,1,3,x,-x)
0.01568646277445385390945083
>>> polylog(2,x**2)
0.01568646277445385390945083
>>> a1,a2,b1,b2,c,x = map(mpf, [0.5,0.25,0.125,0.5,4.25,0.125])
>>> appellf3(a1,a2,b1,b2,c,x,1)
1.03947361709111140096947
>>> gammaprod([c,c-a2-b2],[c-a2,c-b2])*hyp3f2(a1,b1,c-a2-b2,c-a2,c-b2,x)
1.03947361709111140096947
The Appell F3 function satisfies a pair of partial
differential equations::
>>> a1,a2,b1,b2,c,x,y = map(mpf, [0.5,0.25,0.125,0.5,0.625,0.0625,-0.0625])
>>> F = lambda x,y: appellf3(a1,a2,b1,b2,c,x,y)
>>> chop(x*(1-x)*diff(F,(x,y),(2,0)) +
... y*diff(F,(x,y),(1,1)) +
... (c-(a1+b1+1)*x)*diff(F,(x,y),(1,0)) -
... a1*b1*F(x,y))
0.0
>>> chop(y*(1-y)*diff(F,(x,y),(0,2)) +
... x*diff(F,(x,y),(1,1)) +
... (c-(a2+b2+1)*y)*diff(F,(x,y),(0,1)) -
... a2*b2*F(x,y))
0.0
**References**
See references for :func:`~mpmath.appellf1`.
"""
appellf4 = r"""
Gives the Appell F4 hypergeometric function of two variables
.. math ::
F_4(a,b,c_1,c_2,x,y) = \sum_{m=0}^{\infty} \sum_{n=0}^{\infty}
\frac{(a)_{m+n} (b)_{m+n}}{(c_1)_m (c_2)_n}
\frac{x^m y^n}{m! n!}.
The series is generally absolutely convergent for
`\sqrt{|x|} + \sqrt{|y|} < 1`.
**Examples**
Evaluation for various parameters and arguments::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> appellf4(1,1,2,2,0.25,0.125)
1.286182069079718313546608
>>> appellf4(-2,-3,4,5,4,5)
34.8
>>> appellf4(5,4,2,3,0.25j,-0.125j)
(-0.2585967215437846642163352 + 2.436102233553582711818743j)
Reduction to `\,_2F_1` in a special case::
>>> a,b,c,x,y = map(mpf, [0.5,0.25,0.125,0.125,-0.125])
>>> appellf4(a,b,c,a+b-c+1,x*(1-y),y*(1-x))
1.129143488466850868248364
>>> hyp2f1(a,b,c,x)*hyp2f1(a,b,a+b-c+1,y)
1.129143488466850868248364
A system of partial differential equations satisfied by F4::
>>> a,b,c1,c2,x,y = map(mpf, [1,0.5,0.25,1.125,0.0625,-0.0625])
>>> F = lambda x,y: appellf4(a,b,c1,c2,x,y)
>>> chop(x*(1-x)*diff(F,(x,y),(2,0)) -
... y**2*diff(F,(x,y),(0,2)) -
... 2*x*y*diff(F,(x,y),(1,1)) +
... (c1-(a+b+1)*x)*diff(F,(x,y),(1,0)) -
... ((a+b+1)*y)*diff(F,(x,y),(0,1)) -
... a*b*F(x,y))
0.0
>>> chop(y*(1-y)*diff(F,(x,y),(0,2)) -
... x**2*diff(F,(x,y),(2,0)) -
... 2*x*y*diff(F,(x,y),(1,1)) +
... (c2-(a+b+1)*y)*diff(F,(x,y),(0,1)) -
... ((a+b+1)*x)*diff(F,(x,y),(1,0)) -
... a*b*F(x,y))
0.0
**References**
See references for :func:`~mpmath.appellf1`.
"""
zeta = r"""
Computes the Riemann zeta function
.. math ::
\zeta(s) = 1+\frac{1}{2^s}+\frac{1}{3^s}+\frac{1}{4^s}+\ldots
or, with `a \ne 1`, the more general Hurwitz zeta function
.. math ::
\zeta(s,a) = \sum_{k=0}^\infty \frac{1}{(a+k)^s}.
Optionally, ``zeta(s, a, n)`` computes the `n`-th derivative with
respect to `s`,
.. math ::
\zeta^{(n)}(s,a) = (-1)^n \sum_{k=0}^\infty \frac{\log^n(a+k)}{(a+k)^s}.
Although these series only converge for `\Re(s) > 1`, the Riemann and Hurwitz
zeta functions are defined through analytic continuation for arbitrary
complex `s \ne 1` (`s = 1` is a pole).
The implementation uses three algorithms: the Borwein algorithm for
the Riemann zeta function when `s` is close to the real line;
the Riemann-Siegel formula for the Riemann zeta function when `s` is
large imaginary, and Euler-Maclaurin summation in all other cases.
The reflection formula for `\Re(s) < 0` is implemented in some cases.
The algorithm can be chosen with ``method = 'borwein'``,
``method='riemann-siegel'`` or ``method = 'euler-maclaurin'``.
The parameter `a` is usually a rational number `a = p/q`, and may be specified
as such by passing an integer tuple `(p, q)`. Evaluation is supported for
arbitrary complex `a`, but may be slow and/or inaccurate when `\Re(s) < 0` for
nonrational `a` or when computing derivatives.
**Examples**
Some values of the Riemann zeta function::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> zeta(2); pi**2 / 6
1.644934066848226436472415
1.644934066848226436472415
>>> zeta(0)
-0.5
>>> zeta(-1)
-0.08333333333333333333333333
>>> zeta(-2)
0.0
For large positive `s`, `\zeta(s)` rapidly approaches 1::
>>> zeta(50)
1.000000000000000888178421
>>> zeta(100)
1.0
>>> zeta(inf)
1.0
>>> 1-sum((zeta(k)-1)/k for k in range(2,85)); +euler
0.5772156649015328606065121
0.5772156649015328606065121
>>> nsum(lambda k: zeta(k)-1, [2, inf])
1.0
Evaluation is supported for complex `s` and `a`:
>>> zeta(-3+4j)
(-0.03373057338827757067584698 + 0.2774499251557093745297677j)
>>> zeta(2+3j, -1+j)
(389.6841230140842816370741 + 295.2674610150305334025962j)
The Riemann zeta function has so-called nontrivial zeros on
the critical line `s = 1/2 + it`::
>>> findroot(zeta, 0.5+14j); zetazero(1)
(0.5 + 14.13472514173469379045725j)
(0.5 + 14.13472514173469379045725j)
>>> findroot(zeta, 0.5+21j); zetazero(2)
(0.5 + 21.02203963877155499262848j)
(0.5 + 21.02203963877155499262848j)
>>> findroot(zeta, 0.5+25j); zetazero(3)
(0.5 + 25.01085758014568876321379j)
(0.5 + 25.01085758014568876321379j)
>>> chop(zeta(zetazero(10)))
0.0
Evaluation on and near the critical line is supported for large
heights `t` by means of the Riemann-Siegel formula (currently
for `a = 1`, `n \le 4`)::
>>> zeta(0.5+100000j)
(1.073032014857753132114076 + 5.780848544363503984261041j)
>>> zeta(0.75+1000000j)
(0.9535316058375145020351559 + 0.9525945894834273060175651j)
>>> zeta(0.5+10000000j)
(11.45804061057709254500227 - 8.643437226836021723818215j)
>>> zeta(0.5+100000000j, derivative=1)
(51.12433106710194942681869 + 43.87221167872304520599418j)
>>> zeta(0.5+100000000j, derivative=2)
(-444.2760822795430400549229 - 896.3789978119185981665403j)
>>> zeta(0.5+100000000j, derivative=3)
(3230.72682687670422215339 + 14374.36950073615897616781j)
>>> zeta(0.5+100000000j, derivative=4)
(-11967.35573095046402130602 - 218945.7817789262839266148j)
>>> zeta(1+10000000j) # off the line
(2.859846483332530337008882 + 0.491808047480981808903986j)
>>> zeta(1+10000000j, derivative=1)
(-4.333835494679647915673205 - 0.08405337962602933636096103j)
>>> zeta(1+10000000j, derivative=4)
(453.2764822702057701894278 - 581.963625832768189140995j)
For investigation of the zeta function zeros, the Riemann-Siegel
Z-function is often more convenient than working with the Riemann
zeta function directly (see :func:`~mpmath.siegelz`).
Some values of the Hurwitz zeta function::
>>> zeta(2, 3); -5./4 + pi**2/6
0.3949340668482264364724152
0.3949340668482264364724152
>>> zeta(2, (3,4)); pi**2 - 8*catalan
2.541879647671606498397663
2.541879647671606498397663
For positive integer values of `s`, the Hurwitz zeta function is
equivalent to a polygamma function (except for a normalizing factor)::
>>> zeta(4, (1,5)); psi(3, '1/5')/6
625.5408324774542966919938
625.5408324774542966919938
Evaluation of derivatives::
>>> zeta(0, 3+4j, 1); loggamma(3+4j) - ln(2*pi)/2
(-2.675565317808456852310934 + 4.742664438034657928194889j)
(-2.675565317808456852310934 + 4.742664438034657928194889j)
>>> zeta(2, 1, 20)
2432902008176640000.000242
>>> zeta(3+4j, 5.5+2j, 4)
(-0.140075548947797130681075 - 0.3109263360275413251313634j)
>>> zeta(0.5+100000j, 1, 4)
(-10407.16081931495861539236 + 13777.78669862804508537384j)
>>> zeta(-100+0.5j, (1,3), derivative=4)
(4.007180821099823942702249e+79 + 4.916117957092593868321778e+78j)
Generating a Taylor series at `s = 2` using derivatives::
>>> for k in range(11): print("%s * (s-2)^%i" % (zeta(2,1,k)/fac(k), k))
...
1.644934066848226436472415 * (s-2)^0
-0.9375482543158437537025741 * (s-2)^1
0.9946401171494505117104293 * (s-2)^2
-1.000024300473840810940657 * (s-2)^3
1.000061933072352565457512 * (s-2)^4
-1.000006869443931806408941 * (s-2)^5
1.000000173233769531820592 * (s-2)^6
-0.9999999569989868493432399 * (s-2)^7
0.9999999937218844508684206 * (s-2)^8
-0.9999999996355013916608284 * (s-2)^9
1.000000000004610645020747 * (s-2)^10
Evaluation at zero and for negative integer `s`::
>>> zeta(0, 10)
-9.5
>>> zeta(-2, (2,3)); mpf(1)/81
0.01234567901234567901234568
0.01234567901234567901234568
>>> zeta(-3+4j, (5,4))
(0.2899236037682695182085988 + 0.06561206166091757973112783j)
>>> zeta(-3.25, 1/pi)
-0.0005117269627574430494396877
>>> zeta(-3.5, pi, 1)
11.156360390440003294709
>>> zeta(-100.5, (8,3))
-4.68162300487989766727122e+77
>>> zeta(-10.5, (-8,3))
(-0.01521913704446246609237979 + 29907.72510874248161608216j)
>>> zeta(-1000.5, (-8,3))
(1.031911949062334538202567e+1770 + 1.519555750556794218804724e+426j)
>>> zeta(-1+j, 3+4j)
(-16.32988355630802510888631 - 22.17706465801374033261383j)
>>> zeta(-1+j, 3+4j, 2)
(32.48985276392056641594055 - 51.11604466157397267043655j)
>>> diff(lambda s: zeta(s, 3+4j), -1+j, 2)
(32.48985276392056641594055 - 51.11604466157397267043655j)
**References**
1. http://mathworld.wolfram.com/RiemannZetaFunction.html
2. http://mathworld.wolfram.com/HurwitzZetaFunction.html
3. http://www.cecm.sfu.ca/personal/pborwein/PAPERS/P155.pdf
"""
dirichlet = r"""
Evaluates the Dirichlet L-function
.. math ::
L(s,\chi) = \sum_{k=1}^\infty \frac{\chi(k)}{k^s}.
where `\chi` is a periodic sequence of length `q` which should be supplied
in the form of a list `[\chi(0), \chi(1), \ldots, \chi(q-1)]`.
Strictly, `\chi` should be a Dirichlet character, but any periodic
sequence will work.
For example, ``dirichlet(s, [1])`` gives the ordinary
Riemann zeta function and ``dirichlet(s, [-1,1])`` gives
the alternating zeta function (Dirichlet eta function).
Also the derivative with respect to `s` (currently only a first
derivative) can be evaluated.
**Examples**
The ordinary Riemann zeta function::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> dirichlet(3, [1]); zeta(3)
1.202056903159594285399738
1.202056903159594285399738
>>> dirichlet(1, [1])
+inf
The alternating zeta function::
>>> dirichlet(1, [-1,1]); ln(2)
0.6931471805599453094172321
0.6931471805599453094172321
The following defines the Dirichlet beta function
`\beta(s) = \sum_{k=0}^\infty \frac{(-1)^k}{(2k+1)^s}` and verifies
several values of this function::
>>> B = lambda s, d=0: dirichlet(s, [0, 1, 0, -1], d)
>>> B(0); 1./2
0.5
0.5
>>> B(1); pi/4
0.7853981633974483096156609
0.7853981633974483096156609
>>> B(2); +catalan
0.9159655941772190150546035
0.9159655941772190150546035
>>> B(2,1); diff(B, 2)
0.08158073611659279510291217
0.08158073611659279510291217
>>> B(-1,1); 2*catalan/pi
0.5831218080616375602767689
0.5831218080616375602767689
>>> B(0,1); log(gamma(0.25)**2/(2*pi*sqrt(2)))
0.3915943927068367764719453
0.3915943927068367764719454
>>> B(1,1); 0.25*pi*(euler+2*ln2+3*ln(pi)-4*ln(gamma(0.25)))
0.1929013167969124293631898
0.1929013167969124293631898
A custom L-series of period 3::
>>> dirichlet(2, [2,0,1])
0.7059715047839078092146831
>>> 2*nsum(lambda k: (3*k)**-2, [1,inf]) + \
... nsum(lambda k: (3*k+2)**-2, [0,inf])
0.7059715047839078092146831
"""
coulombf = r"""
Calculates the regular Coulomb wave function
.. math ::
F_l(\eta,z) = C_l(\eta) z^{l+1} e^{-iz} \,_1F_1(l+1-i\eta, 2l+2, 2iz)
where the normalization constant `C_l(\eta)` is as calculated by
:func:`~mpmath.coulombc`. This function solves the differential equation
.. math ::
f''(z) + \left(1-\frac{2\eta}{z}-\frac{l(l+1)}{z^2}\right) f(z) = 0.
A second linearly independent solution is given by the irregular
Coulomb wave function `G_l(\eta,z)` (see :func:`~mpmath.coulombg`)
and thus the general solution is
`f(z) = C_1 F_l(\eta,z) + C_2 G_l(\eta,z)` for arbitrary
constants `C_1`, `C_2`.
Physically, the Coulomb wave functions give the radial solution
to the Schrodinger equation for a point particle in a `1/z` potential; `z` is
then the radius and `l`, `\eta` are quantum numbers.
The Coulomb wave functions with real parameters are defined
in Abramowitz & Stegun, section 14. However, all parameters are permitted
to be complex in this implementation (see references).
**Plots**
.. literalinclude :: /modules/mpmath/plots/coulombf.py
.. image :: /modules/mpmath/plots/coulombf.png
.. literalinclude :: /modules/mpmath/plots/coulombf_c.py
.. image :: /modules/mpmath/plots/coulombf_c.png
**Examples**
Evaluation is supported for arbitrary magnitudes of `z`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> coulombf(2, 1.5, 3.5)
0.4080998961088761187426445
>>> coulombf(-2, 1.5, 3.5)
0.7103040849492536747533465
>>> coulombf(2, 1.5, '1e-10')
4.143324917492256448770769e-33
>>> coulombf(2, 1.5, 1000)
0.4482623140325567050716179
>>> coulombf(2, 1.5, 10**10)
-0.066804196437694360046619
Verifying the differential equation::
>>> l, eta, z = 2, 3, mpf(2.75)
>>> A, B = 1, 2
>>> f = lambda z: A*coulombf(l,eta,z) + B*coulombg(l,eta,z)
>>> chop(diff(f,z,2) + (1-2*eta/z - l*(l+1)/z**2)*f(z))
0.0
A Wronskian relation satisfied by the Coulomb wave functions::
>>> l = 2
>>> eta = 1.5
>>> F = lambda z: coulombf(l,eta,z)
>>> G = lambda z: coulombg(l,eta,z)
>>> for z in [3.5, -1, 2+3j]:
... chop(diff(F,z)*G(z) - F(z)*diff(G,z))
...
1.0
1.0
1.0
Another Wronskian relation::
>>> F = coulombf
>>> G = coulombg
>>> for z in [3.5, -1, 2+3j]:
... chop(F(l-1,eta,z)*G(l,eta,z)-F(l,eta,z)*G(l-1,eta,z) - l/sqrt(l**2+eta**2))
...
0.0
0.0
0.0
An integral identity connecting the regular and irregular wave functions::
>>> l, eta, z = 4+j, 2-j, 5+2j
>>> coulombf(l,eta,z) + j*coulombg(l,eta,z)
(0.7997977752284033239714479 + 0.9294486669502295512503127j)
>>> g = lambda t: exp(-t)*t**(l-j*eta)*(t+2*j*z)**(l+j*eta)
>>> j*exp(-j*z)*z**(-l)/fac(2*l+1)/coulombc(l,eta)*quad(g, [0,inf])
(0.7997977752284033239714479 + 0.9294486669502295512503127j)
Some test case with complex parameters, taken from Michel [2]::
>>> mp.dps = 15
>>> coulombf(1+0.1j, 50+50j, 100.156)
(-1.02107292320897e+15 - 2.83675545731519e+15j)
>>> coulombg(1+0.1j, 50+50j, 100.156)
(2.83675545731519e+15 - 1.02107292320897e+15j)
>>> coulombf(1e-5j, 10+1e-5j, 0.1+1e-6j)
(4.30566371247811e-14 - 9.03347835361657e-19j)
>>> coulombg(1e-5j, 10+1e-5j, 0.1+1e-6j)
(778709182061.134 + 18418936.2660553j)
The following reproduces a table in Abramowitz & Stegun, at twice
the precision::
>>> mp.dps = 10
>>> eta = 2; z = 5
>>> for l in [5, 4, 3, 2, 1, 0]:
... print("%s %s %s" % (l, coulombf(l,eta,z),
... diff(lambda z: coulombf(l,eta,z), z)))
...
5 0.09079533488 0.1042553261
4 0.2148205331 0.2029591779
3 0.4313159311 0.320534053
2 0.7212774133 0.3952408216
1 0.9935056752 0.3708676452
0 1.143337392 0.2937960375
**References**
1. I.J. Thompson & A.R. Barnett, "Coulomb and Bessel Functions of Complex
Arguments and Order", J. Comp. Phys., vol 64, no. 2, June 1986.
2. N. Michel, "Precise Coulomb wave functions for a wide range of
complex `l`, `\eta` and `z`", http://arxiv.org/abs/physics/0702051v1
"""
coulombg = r"""
Calculates the irregular Coulomb wave function
.. math ::
G_l(\eta,z) = \frac{F_l(\eta,z) \cos(\chi) - F_{-l-1}(\eta,z)}{\sin(\chi)}
where `\chi = \sigma_l - \sigma_{-l-1} - (l+1/2) \pi`
and `\sigma_l(\eta) = (\ln \Gamma(1+l+i\eta)-\ln \Gamma(1+l-i\eta))/(2i)`.
See :func:`~mpmath.coulombf` for additional information.
**Plots**
.. literalinclude :: /modules/mpmath/plots/coulombg.py
.. image :: /modules/mpmath/plots/coulombg.png
.. literalinclude :: /modules/mpmath/plots/coulombg_c.py
.. image :: /modules/mpmath/plots/coulombg_c.png
**Examples**
Evaluation is supported for arbitrary magnitudes of `z`::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> coulombg(-2, 1.5, 3.5)
1.380011900612186346255524
>>> coulombg(2, 1.5, 3.5)
1.919153700722748795245926
>>> coulombg(-2, 1.5, '1e-10')
201126715824.7329115106793
>>> coulombg(-2, 1.5, 1000)
0.1802071520691149410425512
>>> coulombg(-2, 1.5, 10**10)
0.652103020061678070929794
The following reproduces a table in Abramowitz & Stegun,
at twice the precision::
>>> mp.dps = 10
>>> eta = 2; z = 5
>>> for l in [1, 2, 3, 4, 5]:
... print("%s %s %s" % (l, coulombg(l,eta,z),
... -diff(lambda z: coulombg(l,eta,z), z)))
...
1 1.08148276 0.6028279961
2 1.496877075 0.5661803178
3 2.048694714 0.7959909551
4 3.09408669 1.731802374
5 5.629840456 4.549343289
Evaluation close to the singularity at `z = 0`::
>>> mp.dps = 15
>>> coulombg(0,10,1)
3088184933.67358
>>> coulombg(0,10,'1e-10')
5554866000719.8
>>> coulombg(0,10,'1e-100')
5554866221524.1
Evaluation with a half-integer value for `l`::
>>> coulombg(1.5, 1, 10)
0.852320038297334
"""
coulombc = r"""
Gives the normalizing Gamow constant for Coulomb wave functions,
.. math ::
C_l(\eta) = 2^l \exp\left(-\pi \eta/2 + [\ln \Gamma(1+l+i\eta) +
\ln \Gamma(1+l-i\eta)]/2 - \ln \Gamma(2l+2)\right),
where the log gamma function with continuous imaginary part
away from the negative half axis (see :func:`~mpmath.loggamma`) is implied.
This function is used internally for the calculation of
Coulomb wave functions, and automatically cached to make multiple
evaluations with fixed `l`, `\eta` fast.
"""
ellipfun = r"""
Computes any of the Jacobi elliptic functions, defined
in terms of Jacobi theta functions as
.. math ::
\mathrm{sn}(u,m) = \frac{\vartheta_3(0,q)}{\vartheta_2(0,q)}
\frac{\vartheta_1(t,q)}{\vartheta_4(t,q)}
\mathrm{cn}(u,m) = \frac{\vartheta_4(0,q)}{\vartheta_2(0,q)}
\frac{\vartheta_2(t,q)}{\vartheta_4(t,q)}
\mathrm{dn}(u,m) = \frac{\vartheta_4(0,q)}{\vartheta_3(0,q)}
\frac{\vartheta_3(t,q)}{\vartheta_4(t,q)},
or more generally computes a ratio of two such functions. Here
`t = u/\vartheta_3(0,q)^2`, and `q = q(m)` denotes the nome (see
:func:`~mpmath.nome`). Optionally, you can specify the nome directly
instead of `m` by passing ``q=<value>``, or you can directly
specify the elliptic parameter `k` with ``k=<value>``.
The first argument should be a two-character string specifying the
function using any combination of ``'s'``, ``'c'``, ``'d'``, ``'n'``. These
letters respectively denote the basic functions
`\mathrm{sn}(u,m)`, `\mathrm{cn}(u,m)`, `\mathrm{dn}(u,m)`, and `1`.
The identifier specifies the ratio of two such functions.
For example, ``'ns'`` identifies the function
.. math ::
\mathrm{ns}(u,m) = \frac{1}{\mathrm{sn}(u,m)}
and ``'cd'`` identifies the function
.. math ::
\mathrm{cd}(u,m) = \frac{\mathrm{cn}(u,m)}{\mathrm{dn}(u,m)}.
If called with only the first argument, a function object
evaluating the chosen function for given arguments is returned.
**Examples**
Basic evaluation::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> ellipfun('cd', 3.5, 0.5)
-0.9891101840595543931308394
>>> ellipfun('cd', 3.5, q=0.25)
0.07111979240214668158441418
The sn-function is doubly periodic in the complex plane with periods
`4 K(m)` and `2 i K(1-m)` (see :func:`~mpmath.ellipk`)::
>>> sn = ellipfun('sn')
>>> sn(2, 0.25)
0.9628981775982774425751399
>>> sn(2+4*ellipk(0.25), 0.25)
0.9628981775982774425751399
>>> chop(sn(2+2*j*ellipk(1-0.25), 0.25))
0.9628981775982774425751399
The cn-function is doubly periodic with periods `4 K(m)` and `4 i K(1-m)`::
>>> cn = ellipfun('cn')
>>> cn(2, 0.25)
-0.2698649654510865792581416
>>> cn(2+4*ellipk(0.25), 0.25)
-0.2698649654510865792581416
>>> chop(cn(2+4*j*ellipk(1-0.25), 0.25))
-0.2698649654510865792581416
The dn-function is doubly periodic with periods `2 K(m)` and `4 i K(1-m)`::
>>> dn = ellipfun('dn')
>>> dn(2, 0.25)
0.8764740583123262286931578
>>> dn(2+2*ellipk(0.25), 0.25)
0.8764740583123262286931578
>>> chop(dn(2+4*j*ellipk(1-0.25), 0.25))
0.8764740583123262286931578
"""
jtheta = r"""
Computes the Jacobi theta function `\vartheta_n(z, q)`, where
`n = 1, 2, 3, 4`, defined by the infinite series:
.. math ::
\vartheta_1(z,q) = 2 q^{1/4} \sum_{n=0}^{\infty}
(-1)^n q^{n^2+n\,} \sin((2n+1)z)
\vartheta_2(z,q) = 2 q^{1/4} \sum_{n=0}^{\infty}
q^{n^{2\,} + n} \cos((2n+1)z)
\vartheta_3(z,q) = 1 + 2 \sum_{n=1}^{\infty}
q^{n^2\,} \cos(2 n z)
\vartheta_4(z,q) = 1 + 2 \sum_{n=1}^{\infty}
(-q)^{n^2\,} \cos(2 n z)
The theta functions are functions of two variables:
* `z` is the *argument*, an arbitrary real or complex number
* `q` is the *nome*, which must be a real or complex number
in the unit disk (i.e. `|q| < 1`). For `|q| \ll 1`, the
series converge very quickly, so the Jacobi theta functions
can efficiently be evaluated to high precision.
The compact notations `\vartheta_n(q) = \vartheta_n(0,q)`
and `\vartheta_n = \vartheta_n(0,q)` are also frequently
encountered. Finally, Jacobi theta functions are frequently
considered as functions of the half-period ratio `\tau`
and then usually denoted by `\vartheta_n(z|\tau)`.
Optionally, ``jtheta(n, z, q, derivative=d)`` with `d > 0` computes
a `d`-th derivative with respect to `z`.
**Examples and basic properties**
Considered as functions of `z`, the Jacobi theta functions may be
viewed as generalizations of the ordinary trigonometric functions
cos and sin. They are periodic functions::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> jtheta(1, 0.25, '0.2')
0.2945120798627300045053104
>>> jtheta(1, 0.25 + 2*pi, '0.2')
0.2945120798627300045053104
Indeed, the series defining the theta functions are essentially
trigonometric Fourier series. The coefficients can be retrieved
using :func:`~mpmath.fourier`::
>>> mp.dps = 10
>>> nprint(fourier(lambda x: jtheta(2, x, 0.5), [-pi, pi], 4))
([0.0, 1.68179, 0.0, 0.420448, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0])
The Jacobi theta functions are also so-called quasiperiodic
functions of `z` and `\tau`, meaning that for fixed `\tau`,
`\vartheta_n(z, q)` and `\vartheta_n(z+\pi \tau, q)` are the same
except for an exponential factor::
>>> mp.dps = 25
>>> tau = 3*j/10
>>> q = exp(pi*j*tau)
>>> z = 10
>>> jtheta(4, z+tau*pi, q)
(-0.682420280786034687520568 + 1.526683999721399103332021j)
>>> -exp(-2*j*z)/q * jtheta(4, z, q)
(-0.682420280786034687520568 + 1.526683999721399103332021j)
The Jacobi theta functions satisfy a huge number of other
functional equations, such as the following identity (valid for
any `q`)::
>>> q = mpf(3)/10
>>> jtheta(3,0,q)**4
6.823744089352763305137427
>>> jtheta(2,0,q)**4 + jtheta(4,0,q)**4
6.823744089352763305137427
Extensive listings of identities satisfied by the Jacobi theta
functions can be found in standard reference works.
The Jacobi theta functions are related to the gamma function
for special arguments::
>>> jtheta(3, 0, exp(-pi))
1.086434811213308014575316
>>> pi**(1/4.) / gamma(3/4.)
1.086434811213308014575316
:func:`~mpmath.jtheta` supports arbitrary precision evaluation and complex
arguments::
>>> mp.dps = 50
>>> jtheta(4, sqrt(2), 0.5)
2.0549510717571539127004115835148878097035750653737
>>> mp.dps = 25
>>> jtheta(4, 1+2j, (1+j)/5)
(7.180331760146805926356634 - 1.634292858119162417301683j)
Evaluation of derivatives::
>>> mp.dps = 25
>>> jtheta(1, 7, 0.25, 1); diff(lambda z: jtheta(1, z, 0.25), 7)
1.209857192844475388637236
1.209857192844475388637236
>>> jtheta(1, 7, 0.25, 2); diff(lambda z: jtheta(1, z, 0.25), 7, 2)
-0.2598718791650217206533052
-0.2598718791650217206533052
>>> jtheta(2, 7, 0.25, 1); diff(lambda z: jtheta(2, z, 0.25), 7)
-1.150231437070259644461474
-1.150231437070259644461474
>>> jtheta(2, 7, 0.25, 2); diff(lambda z: jtheta(2, z, 0.25), 7, 2)
-0.6226636990043777445898114
-0.6226636990043777445898114
>>> jtheta(3, 7, 0.25, 1); diff(lambda z: jtheta(3, z, 0.25), 7)
-0.9990312046096634316587882
-0.9990312046096634316587882
>>> jtheta(3, 7, 0.25, 2); diff(lambda z: jtheta(3, z, 0.25), 7, 2)
-0.1530388693066334936151174
-0.1530388693066334936151174
>>> jtheta(4, 7, 0.25, 1); diff(lambda z: jtheta(4, z, 0.25), 7)
0.9820995967262793943571139
0.9820995967262793943571139
>>> jtheta(4, 7, 0.25, 2); diff(lambda z: jtheta(4, z, 0.25), 7, 2)
0.3936902850291437081667755
0.3936902850291437081667755
**Possible issues**
For `|q| \ge 1` or `\Im(\tau) \le 0`, :func:`~mpmath.jtheta` raises
``ValueError``. This exception is also raised for `|q|` extremely
close to 1 (or equivalently `\tau` very close to 0), since the
series would converge too slowly::
>>> jtheta(1, 10, 0.99999999 * exp(0.5*j))
Traceback (most recent call last):
...
ValueError: abs(q) > THETA_Q_LIM = 1.000000
"""
eulernum = r"""
Gives the `n`-th Euler number, defined as the `n`-th derivative of
`\mathrm{sech}(t) = 1/\cosh(t)` evaluated at `t = 0`. Equivalently, the
Euler numbers give the coefficients of the Taylor series
.. math ::
\mathrm{sech}(t) = \sum_{n=0}^{\infty} \frac{E_n}{n!} t^n.
The Euler numbers are closely related to Bernoulli numbers
and Bernoulli polynomials. They can also be evaluated in terms of
Euler polynomials (see :func:`~mpmath.eulerpoly`) as `E_n = 2^n E_n(1/2)`.
**Examples**
Computing the first few Euler numbers and verifying that they
agree with the Taylor series::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> [eulernum(n) for n in range(11)]
[1.0, 0.0, -1.0, 0.0, 5.0, 0.0, -61.0, 0.0, 1385.0, 0.0, -50521.0]
>>> chop(diffs(sech, 0, 10))
[1.0, 0.0, -1.0, 0.0, 5.0, 0.0, -61.0, 0.0, 1385.0, 0.0, -50521.0]
Euler numbers grow very rapidly. :func:`~mpmath.eulernum` efficiently
computes numerical approximations for large indices::
>>> eulernum(50)
-6.053285248188621896314384e+54
>>> eulernum(1000)
3.887561841253070615257336e+2371
>>> eulernum(10**20)
4.346791453661149089338186e+1936958564106659551331
Comparing with an asymptotic formula for the Euler numbers::
>>> n = 10**5
>>> (-1)**(n//2) * 8 * sqrt(n/(2*pi)) * (2*n/(pi*e))**n
3.69919063017432362805663e+436961
>>> eulernum(n)
3.699193712834466537941283e+436961
Pass ``exact=True`` to obtain exact values of Euler numbers as integers::
>>> print(eulernum(50, exact=True))
-6053285248188621896314383785111649088103498225146815121
>>> print(eulernum(200, exact=True) % 10**10)
1925859625
>>> eulernum(1001, exact=True)
0
"""
eulerpoly = r"""
Evaluates the Euler polynomial `E_n(z)`, defined by the generating function
representation
.. math ::
\frac{2e^{zt}}{e^t+1} = \sum_{n=0}^\infty E_n(z) \frac{t^n}{n!}.
The Euler polynomials may also be represented in terms of
Bernoulli polynomials (see :func:`~mpmath.bernpoly`) using various formulas, for
example
.. math ::
E_n(z) = \frac{2}{n+1} \left(
B_n(z)-2^{n+1}B_n\left(\frac{z}{2}\right)
\right).
Special values include the Euler numbers `E_n = 2^n E_n(1/2)` (see
:func:`~mpmath.eulernum`).
**Examples**
Computing the coefficients of the first few Euler polynomials::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> for n in range(6):
... chop(taylor(lambda z: eulerpoly(n,z), 0, n))
...
[1.0]
[-0.5, 1.0]
[0.0, -1.0, 1.0]
[0.25, 0.0, -1.5, 1.0]
[0.0, 1.0, 0.0, -2.0, 1.0]
[-0.5, 0.0, 2.5, 0.0, -2.5, 1.0]
Evaluation for arbitrary `z`::
>>> eulerpoly(2,3)
6.0
>>> eulerpoly(5,4)
423.5
>>> eulerpoly(35, 11111111112)
3.994957561486776072734601e+351
>>> eulerpoly(4, 10+20j)
(-47990.0 - 235980.0j)
>>> eulerpoly(2, '-3.5e-5')
0.000035001225
>>> eulerpoly(3, 0.5)
0.0
>>> eulerpoly(55, -10**80)
-1.0e+4400
>>> eulerpoly(5, -inf)
-inf
>>> eulerpoly(6, -inf)
+inf
Computing Euler numbers::
>>> 2**26 * eulerpoly(26,0.5)
-4087072509293123892361.0
>>> eulernum(26)
-4087072509293123892361.0
Evaluation is accurate for large `n` and small `z`::
>>> eulerpoly(100, 0.5)
2.29047999988194114177943e+108
>>> eulerpoly(1000, 10.5)
3.628120031122876847764566e+2070
>>> eulerpoly(10000, 10.5)
1.149364285543783412210773e+30688
"""
spherharm = r"""
Evaluates the spherical harmonic `Y_l^m(\theta,\phi)`,
.. math ::
Y_l^m(\theta,\phi) = \sqrt{\frac{2l+1}{4\pi}\frac{(l-m)!}{(l+m)!}}
P_l^m(\cos \theta) e^{i m \phi}
where `P_l^m` is an associated Legendre function (see :func:`~mpmath.legenp`).
Here `\theta \in [0, \pi]` denotes the polar coordinate (ranging
from the north pole to the south pole) and `\phi \in [0, 2 \pi]` denotes the
azimuthal coordinate on a sphere. Care should be used since many different
conventions for spherical coordinate variables are used.
Usually spherical harmonics are considered for `l \in \mathbb{N}`,
`m \in \mathbb{Z}`, `|m| \le l`. More generally, `l,m,\theta,\phi`
are permitted to be complex numbers.
.. note ::
:func:`~mpmath.spherharm` returns a complex number, even the value is
purely real.
**Plots**
.. literalinclude :: /modules/mpmath/plots/spherharm40.py
`Y_{4,0}`:
.. image :: /modules/mpmath/plots/spherharm40.png
`Y_{4,1}`:
.. image :: /modules/mpmath/plots/spherharm41.png
`Y_{4,2}`:
.. image :: /modules/mpmath/plots/spherharm42.png
`Y_{4,3}`:
.. image :: /modules/mpmath/plots/spherharm43.png
`Y_{4,4}`:
.. image :: /modules/mpmath/plots/spherharm44.png
**Examples**
Some low-order spherical harmonics with reference values::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> theta = pi/4
>>> phi = pi/3
>>> spherharm(0,0,theta,phi); 0.5*sqrt(1/pi)*expj(0)
(0.2820947917738781434740397 + 0.0j)
(0.2820947917738781434740397 + 0.0j)
>>> spherharm(1,-1,theta,phi); 0.5*sqrt(3/(2*pi))*expj(-phi)*sin(theta)
(0.1221506279757299803965962 - 0.2115710938304086076055298j)
(0.1221506279757299803965962 - 0.2115710938304086076055298j)
>>> spherharm(1,0,theta,phi); 0.5*sqrt(3/pi)*cos(theta)*expj(0)
(0.3454941494713354792652446 + 0.0j)
(0.3454941494713354792652446 + 0.0j)
>>> spherharm(1,1,theta,phi); -0.5*sqrt(3/(2*pi))*expj(phi)*sin(theta)
(-0.1221506279757299803965962 - 0.2115710938304086076055298j)
(-0.1221506279757299803965962 - 0.2115710938304086076055298j)
With the normalization convention used, the spherical harmonics are orthonormal
on the unit sphere::
>>> sphere = [0,pi], [0,2*pi]
>>> dS = lambda t,p: fp.sin(t) # differential element
>>> Y1 = lambda t,p: fp.spherharm(l1,m1,t,p)
>>> Y2 = lambda t,p: fp.conj(fp.spherharm(l2,m2,t,p))
>>> l1 = l2 = 3; m1 = m2 = 2
>>> print(fp.quad(lambda t,p: Y1(t,p)*Y2(t,p)*dS(t,p), *sphere))
(1+0j)
>>> m2 = 1 # m1 != m2
>>> print(fp.chop(fp.quad(lambda t,p: Y1(t,p)*Y2(t,p)*dS(t,p), *sphere)))
0.0
Evaluation is accurate for large orders::
>>> spherharm(1000,750,0.5,0.25)
(3.776445785304252879026585e-102 - 5.82441278771834794493484e-102j)
Evaluation works with complex parameter values::
>>> spherharm(1+j, 2j, 2+3j, -0.5j)
(64.44922331113759992154992 + 1981.693919841408089681743j)
"""
scorergi = r"""
Evaluates the Scorer function
.. math ::
\operatorname{Gi}(z) =
\operatorname{Ai}(z) \int_0^z \operatorname{Bi}(t) dt +
\operatorname{Bi}(z) \int_z^{\infty} \operatorname{Ai}(t) dt
which gives a particular solution to the inhomogeneous Airy
differential equation `f''(z) - z f(z) = 1/\pi`. Another
particular solution is given by the Scorer Hi-function
(:func:`~mpmath.scorerhi`). The two functions are related as
`\operatorname{Gi}(z) + \operatorname{Hi}(z) = \operatorname{Bi}(z)`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/gi.py
.. image :: /modules/mpmath/plots/gi.png
.. literalinclude :: /modules/mpmath/plots/gi_c.py
.. image :: /modules/mpmath/plots/gi_c.png
**Examples**
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> scorergi(0); 1/(power(3,'7/6')*gamma('2/3'))
0.2049755424820002450503075
0.2049755424820002450503075
>>> diff(scorergi, 0); 1/(power(3,'5/6')*gamma('1/3'))
0.1494294524512754526382746
0.1494294524512754526382746
>>> scorergi(+inf); scorergi(-inf)
0.0
0.0
>>> scorergi(1)
0.2352184398104379375986902
>>> scorergi(-1)
-0.1166722172960152826494198
Evaluation for large arguments::
>>> scorergi(10)
0.03189600510067958798062034
>>> scorergi(100)
0.003183105228162961476590531
>>> scorergi(1000000)
0.0000003183098861837906721743873
>>> 1/(pi*1000000)
0.0000003183098861837906715377675
>>> scorergi(-1000)
-0.08358288400262780392338014
>>> scorergi(-100000)
0.02886866118619660226809581
>>> scorergi(50+10j)
(0.0061214102799778578790984 - 0.001224335676457532180747917j)
>>> scorergi(-50-10j)
(5.236047850352252236372551e+29 - 3.08254224233701381482228e+29j)
>>> scorergi(100000j)
(-8.806659285336231052679025e+6474077 + 8.684731303500835514850962e+6474077j)
Verifying the connection between Gi and Hi::
>>> z = 0.25
>>> scorergi(z) + scorerhi(z)
0.7287469039362150078694543
>>> airybi(z)
0.7287469039362150078694543
Verifying the differential equation::
>>> for z in [-3.4, 0, 2.5, 1+2j]:
... chop(diff(scorergi,z,2) - z*scorergi(z))
...
-0.3183098861837906715377675
-0.3183098861837906715377675
-0.3183098861837906715377675
-0.3183098861837906715377675
Verifying the integral representation::
>>> z = 0.5
>>> scorergi(z)
0.2447210432765581976910539
>>> Ai,Bi = airyai,airybi
>>> Bi(z)*(Ai(inf,-1)-Ai(z,-1)) + Ai(z)*(Bi(z,-1)-Bi(0,-1))
0.2447210432765581976910539
**References**
1. [DLMF]_ section 9.12: Scorer Functions
"""
scorerhi = r"""
Evaluates the second Scorer function
.. math ::
\operatorname{Hi}(z) =
\operatorname{Bi}(z) \int_{-\infty}^z \operatorname{Ai}(t) dt -
\operatorname{Ai}(z) \int_{-\infty}^z \operatorname{Bi}(t) dt
which gives a particular solution to the inhomogeneous Airy
differential equation `f''(z) - z f(z) = 1/\pi`. See also
:func:`~mpmath.scorergi`.
**Plots**
.. literalinclude :: /modules/mpmath/plots/hi.py
.. image :: /modules/mpmath/plots/hi.png
.. literalinclude :: /modules/mpmath/plots/hi_c.py
.. image :: /modules/mpmath/plots/hi_c.png
**Examples**
Some values and limits::
>>> from mpmath import *
>>> mp.dps = 25; mp.pretty = True
>>> scorerhi(0); 2/(power(3,'7/6')*gamma('2/3'))
0.4099510849640004901006149
0.4099510849640004901006149
>>> diff(scorerhi,0); 2/(power(3,'5/6')*gamma('1/3'))
0.2988589049025509052765491
0.2988589049025509052765491
>>> scorerhi(+inf); scorerhi(-inf)
+inf
0.0
>>> scorerhi(1)
0.9722051551424333218376886
>>> scorerhi(-1)
0.2206696067929598945381098
Evaluation for large arguments::
>>> scorerhi(10)
455641153.5163291358991077
>>> scorerhi(100)
6.041223996670201399005265e+288
>>> scorerhi(1000000)
7.138269638197858094311122e+289529652
>>> scorerhi(-10)
0.0317685352825022727415011
>>> scorerhi(-100)
0.003183092495767499864680483
>>> scorerhi(100j)
(-6.366197716545672122983857e-9 + 0.003183098861710582761688475j)
>>> scorerhi(50+50j)
(-5.322076267321435669290334e+63 + 1.478450291165243789749427e+65j)
>>> scorerhi(-1000-1000j)
(0.0001591549432510502796565538 - 0.000159154943091895334973109j)
Verifying the differential equation::
>>> for z in [-3.4, 0, 2, 1+2j]:
... chop(diff(scorerhi,z,2) - z*scorerhi(z))
...
0.3183098861837906715377675
0.3183098861837906715377675
0.3183098861837906715377675
0.3183098861837906715377675
Verifying the integral representation::
>>> z = 0.5
>>> scorerhi(z)
0.6095559998265972956089949
>>> Ai,Bi = airyai,airybi
>>> Bi(z)*(Ai(z,-1)-Ai(-inf,-1)) - Ai(z)*(Bi(z,-1)-Bi(-inf,-1))
0.6095559998265972956089949
"""
|
lidavidm/mathics-heroku
|
venv/lib/python2.7/site-packages/sympy/mpmath/function_docs.py
|
Python
|
gpl-3.0
| 276,842
|
[
"Gaussian"
] |
4d229ff6cf4ea85ee09d111a5f549f4c60e14a0f2995534def76865a3e5c2edb
|
import numpy as np
from time import time
from os.path import isfile
from ase import Atoms
from gpaw import GPAW, Mixer
from gpaw.lcao.projected_wannier import ProjectedWannierFunctions, get_phs
if not isfile('al8.gpw'):
atoms = Atoms('Al', cell=(2.42, 7, 7), pbc=True)
atoms*=(8, 1, 1)
calc = GPAW(h=0.2, basis='szp', kpts=(1, 1, 1),
convergence={'bands':4*8}, width=0.1,
maxiter=200, mixer=Mixer(0.1, 7, weight=100.))
atoms.set_calculator(calc)
atoms.get_potential_energy()
calc.write('al8.gpw', 'all')
calc = GPAW('al8.gpw', txt=None, basis='sz')
ibzk_kc = calc.wfs.ibzk_kc
nk = len(ibzk_kc)
Ef = calc.get_fermi_level()
eps_kn = np.asarray([calc.get_eigenvalues(kpt=k) for k in range(nk)])
eps_kn -= Ef
V_knM, H_kMM, S_kMM, P_aqMi = get_phs(calc, s=0)
H_kMM -= S_kMM * Ef
pwf = ProjectedWannierFunctions(V_knM,
h_lcao=H_kMM,
s_lcao=S_kMM,
eigenvalues=eps_kn,
fixedenergy=0.0,
kpoints=ibzk_kc)
t1 = time()
h_kMM, s_kMM = pwf.get_hamiltonian_and_overlap_matrix(useibl=True)
t2 = time()
print "\nTime to construct PWF: %.3f seconds " % (t2 - t1)
|
qsnake/gpaw
|
oldtest/pwftest_Al_gamma.py
|
Python
|
gpl-3.0
| 1,276
|
[
"ASE",
"GPAW"
] |
5ba275248a014cc129ccd3aeb332bda58f74ec84cd40619e33274553f479e6a6
|
try: paraview.simple
except: from paraview.simple import *
import numpy as np
from mpi4py import MPI
import os
import csv
from scipy import interpolate
import gc
import sys
gc.enable()
comm = MPI.COMM_WORLD
#label = 'm_10_1'
#labelo = 'm_10_1'
tt = int(sys.argv[1]) - 1
labelo = sys.argv[2]
label = sys.argv[2]
basename = sys.argv[3]
resx = int(sys.argv[4])
resy = int(sys.argv[5])
path = '/scratch/jmensa/'+label+'/'
Xlist = np.linspace(0,10000,resx)
Ylist = np.linspace(0,4000,resy)
#Xlist = np.linspace(0,10000,resx)
#Ylist = np.linspace(0,4000,resy)
Zlist = np.linspace(0,-50,51)
dl = [0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1]
Zlist = np.cumsum(dl)
[X,Y] = np.meshgrid(Xlist,Ylist)
size = MPI.COMM_WORLD.Get_size()
rank = MPI.COMM_WORLD.Get_rank()
nl = len(Zlist)/size
ll = len(Zlist)%size
mli_pvtu = XMLPartitionedUnstructuredGridReader( FileName=[path+basename+'_'+str(tt)+'.pvtu'] )
mli_pvtu.PointArrayStatus = ['Velocity_CG']
sliceFilter = Slice(mli_pvtu)
sliceFilter.SliceType.Normal = [0,0,1]
if rank == 0:
U = np.zeros((len(Ylist),len(Xlist),len(Zlist),3))
for n in range(nl+ll):
layer = n+rank*nl
print 'layer:', rank, layer
sliceFilter.SliceType.Origin = [0,0,-1*Zlist[layer]]
DataSliceFile = paraview.servermanager.Fetch(sliceFilter)
points = DataSliceFile.GetPoints()
numPoints = DataSliceFile.GetNumberOfPoints()
#
data=np.zeros((numPoints,3))
coords=np.zeros((numPoints,3))
#
for x in xrange(numPoints):
data[x,:] = DataSliceFile.GetPointData().GetArray('Velocity_CG').GetTuple(x)
coords[x] = points.GetPoint(x)
#
U[:,:,layer,0] = interpolate.griddata((coords[:,0],coords[:,1]),data[:,0],(X,Y),method='linear')
U[:,:,layer,1] = interpolate.griddata((coords[:,0],coords[:,1]),data[:,1],(X,Y),method='linear')
U[:,:,layer,2] = interpolate.griddata((coords[:,0],coords[:,1]),data[:,2],(X,Y),method='linear')
# print rank, U[:,:,:]
if rank > 0:
U = np.zeros((len(Ylist),len(Xlist),nl,3))
for n in xrange(nl):
layer = n+rank*nl
print 'layer:', rank, layer
sliceFilter.SliceType.Origin = [0,0,-1*Zlist[layer]]
DataSliceFile = paraview.servermanager.Fetch(sliceFilter)
points = DataSliceFile.GetPoints()
numPoints = DataSliceFile.GetNumberOfPoints()
#
data=np.zeros((numPoints,3))
coords=np.zeros((numPoints,3))
#
for x in xrange(numPoints):
data[x,:] = DataSliceFile.GetPointData().GetArray('Velocity_CG').GetTuple(x)
coords[x] = points.GetPoint(x)
U[:,:,n,0] = interpolate.griddata((coords[:,0],coords[:,1]),data[:,0],(X,Y),method='linear')
U[:,:,n,1] = interpolate.griddata((coords[:,0],coords[:,1]),data[:,1],(X,Y),method='linear')
U[:,:,n,2] = interpolate.griddata((coords[:,0],coords[:,1]),data[:,2],(X,Y),method='linear')
# print rank, U[:,:,:]
comm.send(nl*rank+ll, dest=0, tag=10)
comm.send(U, dest=0, tag=11)
if rank == 0:
for s in range(size-1):
print 's', s+1
l = comm.recv(source=s+1, tag=10)
print 'l', l
U[:,:,l:l+nl,:] = comm.recv(source=s+1, tag=11)
fd0 = open('./csv/Velocity_CG_0_'+labelo+'_'+str(tt)+'.csv','w')
fd1 = open('./csv/Velocity_CG_1_'+labelo+'_'+str(tt)+'.csv','w')
fd2 = open('./csv/Velocity_CG_2_'+labelo+'_'+str(tt)+'.csv','w')
print U[:,:,:]
for z in xrange(len(Zlist)):
print z
for j in xrange(len(Ylist)):
for i in xrange(len(Xlist)):
fd0.write(str(U[j,i,z,0])+', ')
fd1.write(str(U[j,i,z,1])+', ')
fd2.write(str(U[j,i,z,2])+', ')
fd0.write('\n')
fd1.write('\n')
fd2.write('\n')
fd0.close()
fd1.close()
fd2.close()
del mli_pvtu, U, coords, data, numPoints, points, DataSliceFile, sliceFilter
gc.collect()
|
jungla/ICOM-fluidity-toolbox
|
2D/U/extract_Velocity_CG_temp.py
|
Python
|
gpl-2.0
| 3,678
|
[
"ParaView"
] |
a467ec2ee9e06587cc491f2c5275046c81f3c4d9afa3fb1567fad226dbfd28db
|
# -*- coding: utf-8 -*-
import pytest
import sys
from test_base_class import TestBaseClass
try:
from collections import Counter
except ImportError:
from counter26 import Counter
aerospike = pytest.importorskip("aerospike")
try:
from aerospike.exception import *
except:
print "Please install aerospike python client."
sys.exit(1)
class TestExistsMany(TestBaseClass):
def setup_class(cls):
"""
Setup method.
"""
hostlist, user, password = TestBaseClass.get_hosts()
config = {'hosts': hostlist}
if user == None and password == None:
TestExistsMany.client = aerospike.client(config).connect()
else:
TestExistsMany.client = aerospike.client(config).connect(user,
password)
def teardown_class(cls):
TestExistsMany.client.close()
def setup_method(self, method):
self.keys = []
for i in xrange(5):
key = ('test', 'demo', i)
rec = {'name': 'name%s' % (str(i)), 'age': i}
TestExistsMany.client.put(key, rec)
self.keys.append(key)
def teardown_method(self, method):
"""
Teardown method.
"""
for i in xrange(5):
key = ('test', 'demo', i)
TestExistsMany.client.remove(key)
def test_exists_many_without_any_parameter(self):
with pytest.raises(TypeError) as typeError:
TestExistsMany.client.exists_many()
assert "Required argument 'keys' (pos 1) not found" in typeError.value
def test_exists_many_without_policy(self):
records = TestExistsMany.client.exists_many(self.keys)
assert type(records) == list
assert len(records) == 5
def test_exists_many_with_proper_parameters(self):
records = TestExistsMany.client.exists_many(self.keys, {'timeout': 1200})
assert type(records) == list
assert len(records) == 5
assert Counter([x[0][2] for x in records]) == Counter([0, 1, 2, 3,
4])
def test_exists_many_with_none_policy(self):
records = TestExistsMany.client.exists_many(self.keys, None)
assert type(records) == list
assert len(records) == 5
assert Counter([x[0][2] for x in records]) == Counter([0, 1, 2, 3,
4])
def test_exists_many_with_none_keys(self):
try:
TestExistsMany.client.exists_many( None, {} )
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "Keys should be specified as a list or tuple."
def test_exists_many_with_non_existent_keys(self):
self.keys.append(('test', 'demo', 'some_key'))
records = TestExistsMany.client.exists_many(self.keys)
assert type(records) == list
assert len(records) == 6
assert Counter([x[0][2] for x in records]) == Counter([0, 1, 2, 3,
4, 'some_key'])
for x in records:
if x[0][2] == 'some_key':
assert x[1] == None
def test_exists_many_with_all_non_existent_keys(self):
keys = [('test', 'demo', 'key')]
records = TestExistsMany.client.exists_many(keys)
assert len(records) == 1
for x in records:
if x[0][2] == 'key':
assert x[1] == None
def test_exists_many_with_invalid_key(self):
try:
records = TestExistsMany.client.exists_many( "key" )
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "Keys should be specified as a list or tuple."
def test_exists_many_with_invalid_timeout(self):
policies = { 'timeout' : 0.2 }
try:
records = TestExistsMany.client.exists_many(self.keys, policies)
except ParamError as exception:
assert exception.code == -2
assert exception.msg == "timeout is invalid"
def test_exists_many_with_initkey_as_digest(self):
keys = []
key = ("test", "demo", None, bytearray("asd;as[d'as;djk;uyfl"))
rec = {'name': 'name1', 'age': 1}
TestExistsMany.client.put(key, rec)
keys.append(key)
key = ("test", "demo", None, bytearray("ase;as[d'as;djk;uyfl"))
rec = {'name': 'name2', 'age': 2}
TestExistsMany.client.put(key, rec)
keys.append(key)
records = TestExistsMany.client.exists_many(keys)
for key in keys:
TestExistsMany.client.remove(key)
assert type(records) == list
assert len(records) == 2
i = 0
for x in records:
if i:
assert x[0][3] == bytearray(b"ase;as[d'as;djk;uyfl")
else:
assert x[0][3] == bytearray(b"asd;as[d'as;djk;uyfl")
i += 1
def test_exists_many_with_non_existent_keys_in_middle(self):
self.keys.append(('test', 'demo', 'some_key'))
for i in xrange(15, 20):
key = ('test', 'demo', i)
rec = {'name': 'name%s' % (str(i)), 'age': i}
TestExistsMany.client.put(key, rec)
self.keys.append(key)
records = TestExistsMany.client.exists_many(self.keys)
for i in xrange(15, 20):
key = ('test', 'demo', i)
TestExistsMany.client.remove(key)
assert type(records) == list
assert len(records) == 11
assert Counter([x[0][2] for x in records]) == Counter([0, 1, 2, 3,
4, 'some_key', 15, 16, 17, 18, 19])
for x in records:
if x[0][2] == 'some_key':
assert x[1] == None
def test_exists_many_with_proper_parameters_without_connection(self):
config = {'hosts': [('127.0.0.1', 3000)]}
client1 = aerospike.client(config)
try:
records = client1.exists_many( self.keys, { 'timeout': 20 } )
except ClusterError as exception:
assert exception.code == 11L
assert exception.msg == 'No connection to aerospike cluster'
|
trupty/aerospike-client-python
|
test/test_exists_many.py
|
Python
|
apache-2.0
| 6,125
|
[
"ASE"
] |
3c7f638317ae191b58dfd3ca004633e898cff3125657e5bd39b948983c62ff5b
|
import pysam
import click
import gc
import gzip
from sidr import common
from Bio.SeqUtils import GC # for GC content
from Bio.SeqIO.FastaIO import FastaIterator
def readFasta(fastaFile):
"""
Reads a FASTA file and parses contigs for GC content.
Args:
fastaFile: The path to the FASTA file.
Returns:
contigs A dictionary mapping contigIDs to sidr.common.Contig objects with GC content as a variable.
"""
contigs = []
if ".gz" in fastaFile: # should support .fa.gz files in a seamless (if slow) way
openFunc = gzip.open
else:
openFunc = open
with openFunc(fastaFile) as data:
click.echo("Reading %s" % fastaFile)
with click.progressbar(FastaIterator(data)) as fi:
for record in fi: # TODO: conditional formatting
contigs.append(common.Contig(record.id.split(' ')[0], variables={"GC": GC(record.seq)}))
if len(contigs) != len(set([x.contigid for x in contigs])): # exit if duplicate contigs, https://stackoverflow.com/questions/5278122/checking-if-all-elements-in-a-list-are-unique
raise ValueError("Input FASTA contains duplicate contigIDs, exiting")
return dict((x.contigid, x) for x in contigs) # https://stackoverflow.com/questions/3070242/reduce-python-list-of-objects-to-dict-object-id-object
def readBAM(BAMFile, contigs):
"""
Parses an aligned BAM file for coverage.
Args:
BAMFile: The BAM file to parse.
contigs: List of sidr.common.Contigs taken from input FASTA.
Returns:
contigs: Input contigs updated with coverage, measured as an
average over the whole contig.
"""
alignment = pysam.AlignmentFile(BAMFile, "rb")
click.echo("Reading BAM file")
with click.progressbar(contigs) as ci:
for contig in ci:
covArray = [] # coverage over contig = sum(coverage per base)/number of bases
for pile in alignment.pileup(region=str(contig)):
covArray.append(pile.nsegments)
try:
contigs[contig].variables["Coverage"] = (sum(covArray) / len(covArray))
except ZeroDivisionError: # should only occur if 0 coverage recorded
contigs[contig].variables["Coverage"] = 0
return contigs
def readBLAST(classification, taxdump, classificationLevel, contigs):
"""
Reads a BLAST result file and combines it with other known information about the contigs.
Args:
classification: A string containing the filename of the BLAST results. The BLAST
results must be in the format -outfmt '6 qseqid staxids', additional information
can be added but the first two fields must be qseqid and staxids.
taxdump: The NCBI taxdump as processed by parseTaxdump()
classificationLevel: The level of classification to save into the corpus. Defaults to phylum.
contigs: List of sidr.common.Contigs taken from input FASTA
Returns:
contigs: Input list of contigs updated with classification form BLAST
classMap: A dictionary mapping class names to their class id used by scikit-learn.
classList: A list of class names.
"""
classList = []
classMap = {}
with open(classification) as data:
click.echo("Reading %s" % classification)
with click.progressbar(data) as dt:
for line in dt:
record = line.split("\t")
contig = record[0]
taxid = record[1].strip()
taxonomy = common.taxidToLineage(taxid, taxdump, classificationLevel)
taxonomy = taxonomy.lower()
try:
if not contigs[contig].classification: # assume that the first hit in blast output is best
contigs[contig].classification = taxonomy
if taxonomy not in classList:
classList.append(taxonomy)
except IndexError: # if a contig is in BLAST but not FASTA (should be impossible but)
continue
for idx, className in enumerate(classList):
classMap[className] = idx
return contigs, classMap, classList
def runAnalysis(bam, fasta, blastresults, taxdump, modelOutput, output, tokeep, toremove, binary, target, level):
taxdump, taxidDict = common.parseTaxdump(taxdump, False)
gc.collect()
click.echo("Taxdump parsed, %d taxIDs loaded" % len(taxdump))
contigs = readFasta(fasta)
gc.collect()
click.echo("FASTA loaded, %d contigs returned" % len(contigs))
contigs = readBAM(bam, contigs)
gc.collect()
click.echo("BAM loaded")
contigs, classMap, classList = readBLAST(blastresults,
taxdump, level.lower(), contigs)
gc.collect()
click.echo("BLAST results loaded")
corpus, testdata, features = common.constructCorpus(list(contigs.values()), classMap, binary, target)
gc.collect()
click.echo("Corpus constucted, %d contigs in corpus and %d contigs in test data" % (len(corpus), len(testdata)))
classifier = common.constructModel(corpus, classList, features, modelOutput)
result = common.classifyData(classifier, testdata, classMap)
common.generateOutput(tokeep, toremove, result, contigs.values(), target, output)
|
damurdock/SIDR
|
sidr/default.py
|
Python
|
mit
| 5,335
|
[
"BLAST",
"pysam"
] |
1806ae62c955fd500fe2faffcb15d68800b1085f55c1f46ebc7d93d70c064050
|
import click
from parsec.cli import pass_context, json_loads
from parsec.decorators import custom_exception, json_output
@click.command('upload_file_from_server')
@click.argument("library_id", type=str)
@click.argument("server_dir", type=str)
@click.option(
"--folder_id",
help="id of the folder where to place the uploaded files. If not provided, the root folder will be used",
type=str
)
@click.option(
"--file_type",
help="Galaxy file format name",
default="auto",
show_default=True,
type=str
)
@click.option(
"--dbkey",
help="Dbkey",
default="?",
show_default=True,
type=str
)
@click.option(
"--link_data_only",
help="either 'copy_files' (default) or 'link_to_files'. Setting to 'link_to_files' symlinks instead of copying the files",
type=str
)
@click.option(
"--roles",
help="???",
type=str
)
@click.option(
"--preserve_dirs",
help="Indicate whether to preserve the directory structure when importing dir",
is_flag=True
)
@click.option(
"--tag_using_filenames",
help="Indicate whether to generate dataset tags from filenames.",
is_flag=True
)
@click.option(
"--tags",
help="A list of tags to add to the datasets",
type=str,
multiple=True
)
@pass_context
@custom_exception
@json_output
def cli(ctx, library_id, server_dir, folder_id="", file_type="auto", dbkey="?", link_data_only="", roles="", preserve_dirs=False, tag_using_filenames=False, tags=""):
"""Upload all files in the specified subdirectory of the Galaxy library import directory to a library.
Output:
List with a single dictionary containing information about the LDDA
"""
return ctx.gi.libraries.upload_file_from_server(library_id, server_dir, folder_id=folder_id, file_type=file_type, dbkey=dbkey, link_data_only=link_data_only, roles=roles, preserve_dirs=preserve_dirs, tag_using_filenames=tag_using_filenames, tags=tags)
|
galaxy-iuc/parsec
|
parsec/commands/libraries/upload_file_from_server.py
|
Python
|
apache-2.0
| 1,929
|
[
"Galaxy"
] |
e4420f4739978b15135687a92321ef6430b7d55b0529da6dc8faafe9d0941576
|
from functools import partial
import re
import logging
from sets import Set
import lib.visit as v
import lib.const as C
from .. import util
from ..meta import methods, classes, class_lookup
from ..meta.template import Template
from ..meta.clazz import Clazz
from ..meta.method import Method, find_formals
from ..meta.field import Field
from ..meta.statement import Statement, to_statements
from ..meta.expression import Expression
class AccessorUni(object):
__aux_name = C.ACC.AUX+"Uni"
regex_log = r"log::check_log::(-)(\d+)"
_invoked = Set()
@staticmethod
def is_method_log(msg):
return re.match(AccessorUni.regex_log, msg)
def add_invoked(self, msg):
m = re.match(AccessorUni.regex_log, msg)
self._invoked.add(int(m.group(2)))
## hole assignments for roles
## glblInit_accessor_????,StmtAssign,accessor_???? = n
regex_role = r"(({})_\S+_{})__.* = (\d+)$".format('|'.join(C.acc_roles), __aux_name)
@staticmethod
def simple_role_of_interest(msg):
return re.match(AccessorUni.regex_role, msg)
# add a mapping from role variable to its value chosen by sketch
def add_simple_role(self, msg):
m = re.match(AccessorUni.regex_role, msg)
v, n = m.group(1), m.group(3)
self._role[v] = n
# initializer
def __init__(self, cmd, output_path, acc_conf):
self._cmd = cmd
self._output = output_path
self._demo = util.pure_base(output_path)
self._acc_conf = acc_conf
self._cur_mtd = None
self._role = {} # { v : n }
# class roles
self._accessors = {} # { Aux... : {key1 : accessor1, key2 : accessor2} }
self._implicits = {} # { Aux... : {key1 : implicit1, key2 : implicit2} }
# method roles
self._getters = {} # { Aux... : {key1 : getter1, key2 : getter2 ...} }
self._setters = {} # { Aux... : {key1 : setter1, key2 : setter2 ...} }
self._cons = {} # { Aux... : {key1 : cons1, key2 : cons2 ...} }
# getter/setter fields
self._gs = {}
# interpret the synthesis result
with open(self._output, 'r') as f:
for line in f:
line = line.strip()
try:
if AccessorUni.is_method_log(line): self.add_invoked(line)
items = line.split(',')
func, kind, msg = items[0], items[1], ','.join(items[2:])
#if func == "AuxAccessorUni": print items
if AccessorUni.simple_role_of_interest(msg): self.add_simple_role(msg)
except IndexError: # not a line generated by custom codegen
pass # if "Total time" in line: logging.info(line)
@property
def demo(self):
return self._demo
@v.on("node")
def visit(self, node):
"""
This is the generic method to initialize the dynamic dispatcher
"""
# add a private field
@staticmethod
def add_prvt_fld(acc, k, typ, num):
name = u'_'.join([C.ACC.prvt, unicode(num), k, u"for", acc.name])
fld = acc.fld_by_name(name)
if fld and fld.typ != typ:
fld.typ = typ
if not fld:
logging.debug("adding private field {} for {} of type {}".format(name, acc.name, typ))
fld = Field(clazz=acc, typ=typ, name=name)
acc.add_fld(fld)
return fld
# getter code
@staticmethod
def def_getter(mtd, fld):
logging.debug("adding getter code into {}".format(repr(mtd)))
get = u"return {};".format(fld.name)
mtd.body = to_statements(mtd, get)
# setter code
@staticmethod
def def_setter(mtd, fld, typ):
arg = find_formals(mtd.params, [typ])[0]
logging.debug("adding setter code into {}".format(repr(mtd)))
set = u"{} = {};".format(fld.name, arg)
mtd.body = to_statements(mtd, set)
# constructor code
@staticmethod
def def_constructor(mtd, flds, imp):
if len(mtd.params) > len(flds): return
logging.debug("adding constructor code into {}".format(repr(mtd)))
for (_, nm), fld in zip(mtd.params, flds[:len(mtd.params)]):
init = u"{} = {};".format(fld.name, nm)
mtd.body += to_statements(mtd, init)
for i in range(len(imp)-len(mtd.params)):
hidden = imp[len(mtd.params)+i]
fldnm = flds[len(mtd.params)+i].name
init = u"{} = new {}();".format(fldnm, hidden.name)
mtd.body += to_statements(mtd, init)
@v.when(Template)
def visit(self, node):
def find_role(lst, aux_name, role):
try:
_id = self._role['_'.join([role, aux_name])]
return lst[int(_id)]
except KeyError:
# ignore what Sketch thought not critical for log conformity
return None
aux_name = self.__aux_name
aux = class_lookup(aux_name)
# find and store class roles
find_cls_role = partial(find_role, classes(), aux_name)
# find and store method roles
find_mtd_role = partial(find_role, methods(), aux_name)
cons = {}
for key in self._acc_conf.iterkeys():
cons[key] = find_mtd_role('_'.join([C.ACC.CONS, key]))
#cons_params = []
#for key in self._acc_conf.iterkeys():
# if self._acc_conf[key][0] >= 0:
# cons_params += map(find_mtd_role, map(lambda x: '_'.join([C.ACC.CONS, key, x]), range(self._acc_conf[key][0])))
implicits = {}
for key in self._acc_conf.iterkeys():
implicits[key] = {}
for x in xrange(self._acc_conf[key][0]):
implicits[key][x] = find_cls_role('_'.join([C.ACC.IMP, key, str(x)]))
getters = {}
for key in self._acc_conf.iterkeys():
getters[key] = {}
for x in xrange(self._acc_conf[key][1]):
getters[key][x] = find_mtd_role('_'.join([C.ACC.GET, key, str(x)]))
setters = {}
for key in self._acc_conf.iterkeys():
setters[key] = {}
for x in xrange(self._acc_conf[key][2]):
setters[key][x] = find_mtd_role('_'.join([C.ACC.SET, key, str(x)]))
gs = {}
for key in self._acc_conf.iterkeys():
gs[key] = {}
for x in xrange(max(self._acc_conf[key][1], self._acc_conf[key][2])):
try:
gs[key][x] = self._role['_'.join([C.ACC.GS, key, str(x), aux_name])]
except KeyError:
# ignore what Sketch thought not critical for log conformity
pass
self._cons[aux.name] = cons
self._implicits[aux.name] = implicits
self._getters[aux.name] = getters
self._setters[aux.name] = setters
self._gs[aux.name] = gs
# add private fields for constructors
for k in cons.iterkeys():
c = cons[k]
i = implicits[k]
if not c: continue
if util.exists(lambda m: m.id in self._invoked, c.clazz.mtds):
flds = []
for n, t in enumerate(c.param_typs):
fld = AccessorUni.add_prvt_fld(c.clazz, k, t, n)
flds.append(fld)
for dif in range(len(i.keys()) - len(c.params)):
fld = AccessorUni.add_prvt_fld(c.clazz, k, i[len(c.params)+dif].name, len(c.params)+dif)
flds.append(fld)
AccessorUni.def_constructor(c, flds, i)
# add private fields for getters/setters
# insert or move code snippets from Aux classes to actual participants
for k in gs.iterkeys():
for e in gs[k].iterkeys():
getr = getters[k][e]
setr = setters[k][e] if e in setters[k].keys() else None
effective = getr.id in self._invoked
if not effective: effective = (setr != None and setr.id in self._invoked)
if effective:
fld = AccessorUni.add_prvt_fld(getr.clazz, k, getr.typ, int(gs[k][e]))
logging.debug("getter: {}_{}: {}".format(k, e, repr(getr)))
AccessorUni.def_getter(getr, fld)
if setr != None:
fld = AccessorUni.add_prvt_fld(setr.clazz, k, setr.param_typs[0], int(gs[k][e]))
logging.debug("setter: {}_{}: {}".format(k, e, repr(setr)))
AccessorUni.def_setter(setr, fld, setr.param_typs[0])
# remove Aux class
node.classes.remove(aux)
@v.when(Clazz)
def visit(self, node): pass
@v.when(Field)
def visit(self, node): pass
@v.when(Method)
def visit(self, node):
self._cur_mtd = node
@v.when(Statement)
def visit(self, node):
if node.kind == C.S.EXP and node.e.kind == C.E.CALL:
call = unicode(node)
if call.startswith(C.ACC.AUX+"Uni"):
logging.debug("removing {}".format(call))
if "setterInOne" in call or "SetterInOne" in call:
## Aux.....setterInOne(...);
return []
else:
## Aux...constructor...
return []
if node.kind == C.S.RETURN:
call = unicode(node)
## return Aux....getterInOne(...);
if call.startswith(u"return " + C.ACC.AUX+"Uni") and "etterInOne" in call:
logging.debug("removing {}".format(call))
return []
return [node]
@v.when(Expression)
def visit(self, node): return node
|
plum-umd/pasket
|
pasket/decode/accessor_uni.py
|
Python
|
mit
| 8,646
|
[
"VisIt"
] |
03c89c0eafe0c656ab7585591dd30486d50ae6a1b8d21e6c2145571fbf29ea6e
|
#!/usr/bin/env python
#----------------------------------------------------------------------
# Copyright (c) 2011-2015 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
from __future__ import absolute_import
""" The OMNI client
This client is a GENI API client that is capable of connecting
to multiple slice authorities (clearinghouses) for slice creation and deletion.
See README-omni.txt
Be sure to create an omni config file (typically ~/.gcf/omni_config)
and supply valid paths to your per control framework user certs and keys.
See gcf/omni_config.sample for an example, and src/omni-configure.py
for a script to configure omni for you.
Typical usage:
omni.py sfa listresources
The currently supported control frameworks (clearinghouse implementations)
are SFA (i.e. PlanetLab), PG and GCF.
Extending Omni to support additional frameworks with their own
clearinghouse APIs requires adding a new Framework extension class.
Return Values and Arguments of various omni commands:
Aggregate functions:
Most aggregate functions return 2 items: A string describing the result, and an object for tool use.
In AM APIV3+ functions, that object is a dictionary by aggregate URL containing the full AM API v3+ return struct
(code, value, output).
[string dictionary] = omni.py getversion # dict is keyed by AM url
[string dictionary] = omni.py listresources # dict is keyed by AM url,urn
[string dictionary] = omni.py listresources SLICENAME # AM API V1&2 only; dict is keyed by AM url,urn
[string dictionary] = omni.py describe SLICENAME # AM API V3+ only
[string rspec] = omni.py createsliver SLICENAME RSPEC_FILENAME # AM API V1&2 only
[string dictionary] = omni.py allocate SLICENAME RSPEC_FILENAME # AM API V3+ only
[string dictionary] = omni.py provision SLICENAME # AM API V3+ only
[string dictionary] = omni.py performoperationalaction SLICENAME ACTION # AM API V3+ only
[string dictionary] = omni.py poa SLICENAME ACTION # AM API V3+ only; alias for performoperationalaction
[string dictionary] = omni .py sliverstatus SLICENAME # AM API V1&2 only
[string dictionary] = omni .py status SLICENAME # AM API V3+ only
[string (successList of AM URLs, failList)] = omni.py renewsliver SLICENAME # AM API V1&2 only
[string dictionary] = omni.py renew SLICENAME # AM API V3+ only
[string (successList of AM URLs, failList)] = omni.py deletesliver SLICENAME # AM API V1&2 only
[string dictionary] = omni.py delete SLICENAME # AM API V3+ only
In AM API v1&2:
[string (successList, failList)] = omni.py shutdown SLICENAME
In AM API v3:
[string dictionary] = omni.py shutdown SLICENAME
[string dictionary] = omni.py update SLICENAME RSPEC_FILENAME # Some AM API V3+ AMs only
[string dictionary] = omni.py cancel SLICENAME # Some AM API V3+ AMs only
Non-AM API functions exported by aggregates, supported by Omni:
From ProtoGENI/InstaGENI:
[string dictionary] = omni.py createimage SLICENAME IMAGENAME [false] -u <SLIVER URN>
[string dictionary] = omni.py snapshotimage SLICENAME IMAGENAME [false] -u <SLIVER URN> ; alias for createimage
[string dictionary] = omni.py deleteimage IMAGEURN [CREATORURN]
[string dictionary] = omni.py listimages [CREATORURN]
Clearinghouse functions:
[string dictionary] = omni.py get_ch_version # dict of CH specific version information
[string dictionary urn->url] = omni.py listaggregates
On success: [string sliceurnstring] = omni.py createslice SLICENAME
On fail: [string None] = omni.py createslice SLICENAME
[stringCred stringCred] = omni.py getslicecred SLICENAME
On success: [string dateTimeRenewedTo] = omni.py renewslice SLICENAME
On fail: [string None] = omni.py renewslice SLICENAME
[string Boolean] = omni.py deleteslice SLICENAME
[string listOfSliceURNs] = omni.py listslices USER
[string listOfSliceURNs] = omni.py listmyslices USER
[string listOfProjectDictionaries (PROJECT_URN, PROJECT_UID, PROJECT_ROLE, EXPIRED)] = omni.py listprojects USER
[string listOfProjectDictionaries (PROJECT_URN, PROJECT_UID, PROJECT_ROLE, EXPIRED)] = omni.py listmyprojects USER
[string listOfSSHKeyPairs] = omni.py listmykeys
[string listOfSSHKeyPairs] = omni.py listkeys USER
[string stringCred] = omni.py getusercred
[string string] = omni.py print_slice_expiration SLICENAME
[string dictionary AM URN->dict by sliver URN of silver info] = omni.py listslivers SLICENAME
[string listOfMemberDictionaries (PROJECT_MEMBER (URN), EMAIL, PROJECT_ROLE, PROJECT_MEMBER_UID)] = omni.py listprojectmembers PROJECTNAME
[string listOfMemberDictionaries (KEYS, URN, EMAIL, ROLE)] = omni.py listslicemembers SLICENAME
[string Boolean] = omni.py addslicemember SLICENAME USER [ROLE]
[string Boolean] = omni.py removeslicemember SLICENAME USER
Other functions:
[string dictionary] = omni.py nicknames # List aggregate and rspec nicknames
[string dictionary] = omni.py print_sliver_expirations SLICENAME
"""
import ConfigParser
from copy import deepcopy
import datetime
import inspect
import logging.config
import optparse
import os
import shutil
import sys
import urllib
from .omnilib.util import OmniError, AMAPIError
from .omnilib.handler import CallHandler
from .omnilib.util.handler_utils import validate_url, printNicknames
# Explicitly import framework files so py2exe is happy
from .omnilib.frameworks import framework_apg
from .omnilib.frameworks import framework_base
from .omnilib.frameworks import framework_gcf
from .omnilib.frameworks import framework_gch
from .omnilib.frameworks import framework_gib
from .omnilib.frameworks import framework_of
from .omnilib.frameworks import framework_pg
from .omnilib.frameworks import framework_pgch
from .omnilib.frameworks import framework_sfa
from .omnilib.frameworks import framework_chapi
from .gcf_version import GCF_VERSION
#DEFAULT_RSPEC_LOCATION = "http://www.gpolab.bbn.com/experiment-support"
#DEFAULT_RSPEC_EXTENSION = "xml"
def countSuccess( successList, failList ):
"""Intended to be used with 'renewsliver', 'deletesliver', and
'shutdown' which return a two item tuple as their second
argument. The first item is a list of urns/urls for which it
successfully performed the operation. The second item is a
list of the urns/urls for which it did not successfully
perform the operation. Failure could be due to an actual
error or just simply that there were no such resources
allocated to this sliver at that aggregates. In this context
this method returns a tuple containing the number of items
which succeeded and the number of items attempted.
"""
succNum = len( successList )
return (succNum, succNum + len( failList ) )
def load_agg_nick_config(opts, logger):
"""Load the agg_nick_cache file.
Search path:
- filename from commandline
"""
if opts.noCacheFiles:
logger.debug("Not loading agg_nick_config per option noCacheFiles")
config = {}
if not config.has_key('aggregate_nicknames'):
config['aggregate_nicknames'] = {}
if not config.has_key('omni_defaults'):
config['omni_defaults'] = {}
return config
# the directory of this file
curr_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = curr_dir.rsplit(os.sep,2)[0]
# Load up the config file
configfiles = [os.path.join(parent_dir, 'agg_nick_cache.base')]
aggNickCacheExists = False
# if aggNickCacheName defined on commandline exists, check it first
if os.path.exists( opts.aggNickCacheName ):
configfiles.insert(0, opts.aggNickCacheName)
aggNickCacheExists = True
# get date of current file
if aggNickCacheExists:
aggNickCacheDate = os.path.getmtime(opts.aggNickCacheName)
aggNickCacheTimestamp = datetime.datetime.fromtimestamp(aggNickCacheDate)
else:
aggNickCacheTimestamp = None
# update the file if necessary
if opts.noAggNickCache or (not aggNickCacheTimestamp and not opts.useAggNickCache) or (aggNickCacheTimestamp and aggNickCacheTimestamp < opts.AggNickCacheOldestDate and not opts.useAggNickCache):
update_agg_nick_cache( opts, logger )
# aggNickCacheName may now exist. If so, add it to the front of the list.
if not aggNickCacheExists and os.path.exists( opts.aggNickCacheName ):
configfiles.insert(0, opts.aggNickCacheName)
readConfigFile = False
# Find the first valid config file
for cf in configfiles:
filename = os.path.expanduser(cf)
if os.path.exists(filename):
config = {}
# Did we find a valid config file?
if not os.path.exists(filename):
prtStr = "Could not find agg_nick_cache file: %s"%filename
logger.info( prtStr )
continue
# return config
logger.info("Loading agg_nick_cache file '%s'", filename)
confparser = ConfigParser.RawConfigParser()
try:
confparser.read(filename)
readConfigFile = True
break
except ConfigParser.Error as exc:
logger.error("agg_nick_cache file %s could not be parsed: %s"% (filename, str(exc)))
if not readConfigFile:
logger.error("Failed to read any possible agg_nick_cache file; Check your network connection and/or permissions to read/write '%s'.", opts.aggNickCacheName)
return {}
config = load_aggregate_nicknames( config, confparser, filename, logger, opts )
config = load_omni_defaults( config, confparser, filename, logger, opts )
return config
def locate_config( opts, logger, config={}):
"""Locate the omni config file.
Search path:
- filename from commandline
- in current directory
- in ~/.gcf
- omni_config in current directory
- omni_config in ~/.gcf
"""
# Load up the config file
configfiles = ['omni_config','~/.gcf/omni_config']
if opts.configfile:
# if configfile defined on commandline does not exist, fail
if os.path.exists( opts.configfile ):
configfiles.insert(0, opts.configfile)
else:
# Check maybe the default directory for the file
configfile = os.path.join( os.path.join('~','.gcf'), opts.configfile )
configfile = os.path.normpath(os.path.expanduser( configfile ))
if os.path.exists( configfile ):
configfiles.insert(0, configfile)
else:
logger.error("Config file '%s' or '%s' does not exist"
% (opts.configfile, configfile))
raise (OmniError, "Config file '%s' or '%s' does not exist"
% (opts.configfile, configfile))
# Find the first valid config file
for cf in configfiles:
filename = os.path.normpath(os.path.expanduser(cf))
if os.path.exists(filename):
break
# Did we find a valid config file?
if not os.path.exists(filename):
prtStr = """ Could not find an omni configuration file in local directory or in ~/.gcf/omni_config
An example config file can be found in the source tarball or on the wiki"""
logger.error( prtStr )
raise OmniError, prtStr
return filename
def load_config(opts, logger, config={}, filename=None):
"""Load the omni_config file specified by the `filename` option.
"""
if filename is None:
filename = locate_config(opts, logger, config)
logger.info("Loading config file '%s'", filename)
confparser = ConfigParser.RawConfigParser()
try:
confparser.read(filename)
except ConfigParser.Error as exc:
logger.error("Config file '%s' could not be parsed: %s"% (filename, str(exc)))
raise OmniError, "Config file '%s' could not be parsed: %s"% (filename, str(exc))
# Load up the omni options
config['logger'] = logger
config['omni'] = {}
for (key,val) in confparser.items('omni'):
config['omni'][key] = val
# Load up the users the user wants us to see
config['users'] = []
if 'users' in config['omni']:
if config['omni']['users'].strip() is not '' :
for user in config['omni']['users'].split(','):
if user.strip() is not '' :
d = {}
for (key,val) in confparser.items(user.strip()):
d[key] = val
config['users'].append(d)
config = load_aggregate_nicknames( config, confparser, filename, logger, opts )
config = load_omni_defaults( config, confparser, filename, logger, opts )
# Find rspec nicknames
config['rspec_nicknames'] = {}
# config['default_rspec_location'] = DEFAULT_RSPEC_LOCATION
# config['default_rspec_extension'] = DEFAULT_RSPEC_EXTENSION
if confparser.has_section('rspec_nicknames'):
for (key,val) in confparser.items('rspec_nicknames'):
key = key.strip()
temp = val.strip()
if temp == "":
continue
if key == "default_rspec_location":
config['default_rspec_location'] = temp
elif key == "default_rspec_extension":
config['default_rspec_extension'] = temp
else:
config['rspec_nicknames'][key] = temp
# Load up the framework section
if not opts.framework:
if config['omni'].has_key('default_cf'):
opts.framework = config['omni']['default_cf']
else:
logger.info("No 'default_cf' defined in omni_config. Using 'portal'")
opts.framework = "portal"
# Fill in the project if it is configured
if hasattr(opts,'project') and not opts.project:
if config['omni'].has_key('default_project'):
opts.project = config['omni']['default_project']
# Config of useslicemembers some value of true or false sets the option
if hasattr(opts,'useSliceMembers') and config['omni'].has_key('useslicemembers'):
usm = config['omni']['useslicemembers'].strip().lower()
if usm in ('t', 'true', 'y', 'yes', '1', 'on'):
usm = True
if not opts.useSliceMembers:
logger.info("Setting option 'useSliceMembers' True based on omni_config setting")
opts.useSliceMembers = True
elif usm in ('f', 'false', 'n', 'no', '0', 'off'):
usm = False
if opts.useSliceMembers:
logger.info("Un-Setting option 'useSliceMembers' (set False) based on omni_config setting")
opts.useSliceMembers = False
# Config of ignoreconfigusers some value of true sets the option
if hasattr(opts,'ignoreConfigUsers') and config['omni'].has_key('ignoreconfigusers'):
usm = config['omni']['ignoreconfigusers'].strip().lower()
if usm in ('t', 'true', 'y', 'yes', '1', 'on'):
usm = True
if not opts.ignoreConfigUsers:
logger.info("Setting option 'ignoreConfigUsers' based on omni_config setting")
opts.ignoreConfigUsers = True
logger.info("Using control framework %s" % opts.framework)
# Find the control framework
cf = opts.framework.strip()
if not confparser.has_section(cf):
logger.error("Missing framework '%s' in configuration file" % cf )
raise OmniError, "Missing framework '%s' in configuration file" % cf
# Copy the control framework into a dictionary
config['selected_framework'] = {}
for (key,val) in confparser.items(cf):
config['selected_framework'][key] = val
# This portion of the config is only of interest for `omni-configure`
# but is included here for completeness
if confparser.has_section('omni_configure'):
for (key,val) in confparser.items('omni_configure'):
key = key.strip()
temp = val.strip()
if key == "version":
config['omni_configure_version'] = temp
elif key == "date":
config['omni_configure_date'] = temp
elif key == "files":
files1 = temp.split("\n")
files2 = []
for item in files1:
fdesc,fname,oktodelete = item.split(",")
files2.append((fdesc.strip(),fname.strip(),oktodelete.strip()))
config['omni_configure_files'] = files2
return config
def load_aggregate_nicknames( config, confparser, filename, logger, opts ):
# Find aggregate nicknames
if not config.has_key('aggregate_nicknames'):
config['aggregate_nicknames'] = {}
if confparser.has_section('aggregate_nicknames'):
for (key,val) in confparser.items('aggregate_nicknames'):
temp = val.split(',')
for i in range(len(temp)):
temp[i] = temp[i].strip()
if len(temp) != 2:
logger.warn("Malformed definition of aggregate nickname '%s'. Should be <URN>,<URL> where URN may be empty. Got: %s", key, val)
if len(temp) == 0:
continue
if len(temp) == 1:
# Got 1 entry - if its a valid URL, use it
res = validate_url(temp[0])
if res is None or res.startswith("WARN:"):
t = temp[0]
temp = ["",t]
else:
# not a valid URL. Skip it
logger.warn("Skipping aggregate nickname '%s': '%s' doesn't look like a URL", key, temp[0])
continue
# If temp len > 2: try to use it as is
if config['aggregate_nicknames'].has_key(key):
if config['aggregate_nicknames'][key] == temp:
#logger.debug("AM nickname %s from %s defined identically already", key, filename)
continue
elif temp[0] == "" and config['aggregate_nicknames'][key][1] == temp[1]:
#logger.debug("AM nickname %s from %s already defined and with a URN", key, filename)
continue
else:
logger.debug("Aggregate nickname '%s' being redefined using value from '%s'", key, filename)
logger.debug(" Old: %s=%s. New: %s=%s", config['aggregate_nicknames'][key][0], config['aggregate_nicknames'][key][1], temp[0], temp[1])
# else:
# logger.debug("Loaded aggregate nickname '%s' from file '%s'." % (key, filename))
config['aggregate_nicknames'][key] = temp
return config
def load_omni_defaults( config, confparser, filename, logger, opts ):
# Find Omni defaults in the omni_config
# These are values that should over-ride any hard coded defaults
# But if the incoming config already has a value for this, don't replace that.
# In practice this should mean that values in the agg_nick_cache
# over-ride values in a user's custom omni_config.
# So this should only be used for setup values
# That can be over-ridden with other omni_config settings
# or commandline options.
if not config.has_key('omni_defaults'):
config['omni_defaults'] = {}
if confparser.has_section('omni_defaults'):
for (key,val) in confparser.items('omni_defaults'):
val = val.strip()
key = key.strip()
if config['omni_defaults'].has_key(key):
if config['omni_defaults'][key] == val:
# logger.debug("Ignoring omni_default '%s' from '%s': using earlier identical config setting", key, filename)
continue
else:
logger.debug("Ignoring omni_default '%s' from '%s': using earlier different config setting", key, filename)
logger.debug(" Current: %s=%s. Ignored: %s", key, config['omni_defaults'][key], val)
continue
# else:
# logger.debug("Loaded omni default '%s' from file '%s'." % (key, filename))
config['omni_defaults'][key] = val
return config
def load_framework(config, opts):
"""Select the Control Framework to use from the config, and instantiate the proper class."""
cf_type = config['selected_framework']['type']
config['logger'].debug('Using framework type %s', cf_type)
# Compute the module path leading up to where we will find frameworks, so that we can live
# inside the standard omni/gcf distribution, or deeper inside a larger package
prefix = ".".join(__name__.split(".")[:-1])
framework_mod = __import__('%s.omnilib.frameworks.framework_%s' % (prefix, cf_type), fromlist=['%s.omnilib.frameworks' % (prefix)])
config['selected_framework']['logger'] = config['logger']
framework = framework_mod.Framework(config['selected_framework'], opts)
return framework
def update_agg_nick_cache( opts, logger ):
"""Try to download the definitive version of `agg_nick_cache` and
store in the specified place."""
tmpcache = None
try:
import tempfile
handle, tmpcache = tempfile.mkstemp()
os.close(handle)
# make sure the directory containing --aggNickCacheName exists
# wget `agg_nick_cache`
# cp `agg_nick_cache` opts.aggNickCacheName
directory = os.path.dirname(opts.aggNickCacheName)
if not os.path.exists( directory ):
os.makedirs( directory )
urllib.urlretrieve( opts.aggNickDefinitiveLocation, tmpcache )
good = False
if os.path.exists(tmpcache) and os.path.getsize(tmpcache) > 0:
if os.path.exists(opts.aggNickCacheName) and os.path.getsize(opts.aggNickCacheName) > 0:
tmpsize = os.path.getsize(tmpcache)
oldsize = os.path.getsize(opts.aggNickCacheName)
if tmpsize / oldsize > 10 or oldsize / tmpsize > 10:
# If the size changed dramatically, then assume the new one is broken.
# Of course, it could be that the old one is broken...
logger.info("Download of latest `agg_nick_cache` from '%s' seems broken (size is wrong). Keeping old cache.", opts.aggNickDefinitiveLocation)
logger.debug("Old cache '%s' size: %d. New temp '%s' size: %d", opts.aggNickCacheName, oldsize, tmpcache, tmpsize)
else:
# Size didn't change dramatically
good = True
else:
# No previous cache - use the new one
good = True
else:
logger.info("Download of latest `agg_nick_cache` from '%s' seems broken (no or empty file). Keeping old cache.", opts.aggNickDefinitiveLocation)
logger.debug("Temp file: '%s'. Exists? %s", tmpcache, os.path.exists(tmpcache))
if good:
# On Windows, rename doesn't delete any existing file, so explicitly delete the old one first
# And shutil.move also wants the destination to be gone
try:
os.unlink(opts.aggNickCacheName)
except:
pass
shutil.move(tmpcache, opts.aggNickCacheName)
logger.info("Downloaded latest `agg_nick_cache` from '%s' and copied to '%s'." % (opts.aggNickDefinitiveLocation, opts.aggNickCacheName))
except Exception, e:
logger.info("Attempted to download latest `agg_nick_cache` from '%s' but could not." % opts.aggNickDefinitiveLocation )
logger.debug(e)
finally:
try:
os.unlink(tmpcache)
except:
pass
# Check if there is a newer version of Omni available.
# Look for an entry "latest_omni_version" under "omni_defaults" in the omni_config (or really, agg_nick_cache).
# Expected format is "#,Message" EG: "2.8,Omni 2.8 was release 2/1/2015". No commas in the message.
# If a newer version is available, log a message at INFO level.
def checkForUpdates(config, logger):
if not config or not config.has_key('omni_defaults') or not config['omni_defaults'].has_key('latest_omni_version') or config['omni_defaults']['latest_omni_version'] is None:
logger.debug("No latest Omni version found in config")
return False
latestStr = str(config['omni_defaults']['latest_omni_version']).strip()
latestVals = latestStr.split(',')
if len(latestVals) == 0:
logger.debug("Failed to find any values in latest_omni_version: %s", latestStr)
return False
if latestVals[0].strip() == GCF_VERSION.strip():
logger.debug("Already running latest GCF: %s", GCF_VERSION)
return False
import re
def natSort(s, _nsre=re.compile('([0-9]+)')):
return [int(text) if text.isdigit() else text.lower()
for text in re.split(_nsre, s)]
latest = max(latestVals[0].strip(), GCF_VERSION, key=natSort)
if latest == GCF_VERSION.strip():
logger.debug("Running a newer version of Omni than the last release. Running %s > %s", GCF_VERSION, latestVals[0])
return False
logger.debug("New Omni version available: %s > %s", latestVals[0], GCF_VERSION)
if len(latestVals) > 1:
logger.info(latestVals[1])
else:
logger.info("A new version of Omni is available: Version %s", latestVals[0])
return True
def initialize(argv, options=None, dictLoggingConfig=None ):
"""Parse argv (list) into the given optional optparse.Values object options.
(Supplying an existing options object allows pre-setting certain values not in argv.)
Then configure logging per those options.
Then load the omni_config file
Then initialize the control framework.
Return the framework, config, args list, and optparse.Values struct."""
opts, args = parse_args(argv, options)
logger = configure_logging(opts, dictLoggingConfig)
if "--useSliceMembers" in argv:
logger.info("Option --useSliceMembers is no longer necessary and is now deprecated, as that behavior is now the default. This option will be removed in a future release.")
config = load_agg_nick_config(opts, logger)
# Load custom config _after_ system agg_nick_cache,
# which also sets omni_defaults
config = load_config(opts, logger, config)
checkForUpdates(config, logger)
framework = load_framework(config, opts)
logger.debug('User Cert File: %s', framework.cert)
return framework, config, args, opts
####
def call(argv, options=None, verbose=False, dictLoggingConfig=None):
"""Method to use when calling omni as a library
argv is a list ala sys.argv
options is an optional optparse.Values structure like you get from parser.parse_args
Use this to pre-set certain values, or allow your caller to get omni options from its commandline
Verbose option allows printing the command and summary, or suppressing it.
dictLoggingConfig is a Python logging configuration dictionary for configuring logging. If
not supplied, any logging config filename provided using the option --logconfig will be applied.
Callers can control omni logs (suppressing console printing for example) using python logging.
Return is a list of 2 items: a human readable string summarizing the result
(possibly an error message), and the result object (may be None on error). The result
object type varies by underlying command called.
Can call functions like this:
User does: myscript.py -f my_sfa --myScriptPrivateOption describe ahtest-describe-emulab-net.json
Your myscript.py code does:
import os
import pprint
import re
import sys
import gcf.oscript as omni
from .omnilib.util.files import *
from .omnilib.util.omnierror import OmniError
################################################################################
# Requires that you have omni installed or the path to gcf/src in your
# PYTHONPATH.
#
# For example put the following in your bashrc:
# export PYTHONPATH=${PYTHONPATH}:path/to/gcf/src
#
################################################################################
def main(argv=None):
##############################################################################
# Get a parser from omni that understands omni options
##############################################################################
parser = omni.getParser()
# update usage for help message
omni_usage = parser.get_usage()
parser.set_usage(omni_usage+"\nmyscript.py supports additional commands.\n\n\tCommands and their arguments are:\n\t\t\t[add stuff here]")
##############################################################################
# Add additional optparse.OptionParser style options for your
# script as needed.
# Be sure not to re-use options already in use by omni for
# different meanings, otherwise you'll raise an OptionConflictError
##############################################################################
parser.add_option("--myScriptPrivateOption",
help="A non-omni option added by %s"%sys.argv[0],
action="store_true", default=False)
# options is an optparse.Values object, and args is a list
options, args = omni.parse_args(sys.argv[1:], parser=parser)
if options.myScriptPrivateOption:
# do something special for your private script's options
print "Got myScriptOption"
##############################################################################
# Try to read 2nd argument as an RSpec filename. Pull the AM URL and
# and maybe slice name from that file.
# Then construct omni args appropriately: command, slicename, action or rspecfile or datetime
##############################################################################
omniargs = []
if args and len(args)>1:
sliceurn = None
# Try to read args[1] as an RSpec filename to read
rspecfile = args[1]
rspec = None
if rspecfile:
print "Looking for slice name and AM URL in RSpec file %s" % rspecfile
try:
rspec = readFile(rspecfile)
except:
print "Failed to read rspec from %s" % rspecfile
if rspec:
# Now parse the comments, whch look like this:
#<!-- Resources at AM:
# URN: unspecified_AM_URN
# URL: https://localhost:8001
# -->
# Reserved resources for:\n\tSlice: %s
# at AM:\n\tURN: %s\n\tURL: %s
if not ("Resources at AM" in rspec or "Reserved resources for" in rspec):
sys.exit("Could not find slice name or AM URL in RSpec %s" % rspec)
amurn = None
amurl = None
# Pull out the AM URN and URL
match = re.search(r"at AM:\n\tURN: (\S+)\n\tURL: (\S+)\n", rspec)
if match:
amurn = match.group(1)
amurl = match.group(2)
print " Found AM %s (%s)" % (amurn, amurl)
omniargs.append("-a")
omniargs.append(amurl)
# Pull out the slice name or URN if any
if "Reserved resources for" in rspec:
match = re.search(r"Reserved resources for:\n\tSlice: (\S+)\n\t", rspec)
if match:
sliceurn = match.group(1)
print " Found slice %s" % sliceurn
command = args[0]
rest = []
if len(args) > 2:
rest = args[2:]
# If the command requires a slice and we didn't get a readable rspec from the rspecfile,
# Then treat that as the slice
if not sliceurn and rspecfile and not rspec:
sliceurn = rspecfile
rspecfile = None
# construct the args in order
omniargs.append(command)
if sliceurn:
omniargs.append(sliceurn)
if rspecfile and command.lower() in ('createsliver', 'allocate'):
omniargs.append(rspecfile)
for arg in rest:
omniargs.append(arg)
elif len(args) == 1:
omniargs = args
else:
print "Got no command or rspecfile. Run '%s -h' for more information."%sys.argv[0]
return
##############################################################################
# And now call omni, and omni sees your parsed options and arguments
##############################################################################
print "Call Omni with args %s:\n" % omniargs
try:
text, retItem = omni.call(omniargs, options)
except OmniError, oe:
sys.exit("\nOmni call failed: %s" % oe)
print "\nGot Result from Omni:\n"
# Process the dictionary returned in some way
if isinstance(retItem, dict):
import json
print json.dumps(retItem, ensure_ascii=True, indent=2)
else:
print pprint.pformat(retItem)
# Give the text back to the user
print text
if type(retItem) == type({}):
numItems = len(retItem.keys())
elif type(retItem) == type([]):
numItems = len(retItem)
elif retItem is None:
numItems = 0
else:
numItems = 1
if numItems:
print "\nThere were %d item(s) returned." % numItems
if __name__ == "__main__":
sys.exit(main())
This is equivalent to: ./omni.py -a <AM URL> describe <slicename>
"""
if options is not None and not options.__class__==optparse.Values:
raise OmniError("Invalid options argument to call: must be an optparse.Values object")
if argv is None or not type(argv) == list:
raise OmniError("Invalid argv argument to call: must be a list")
framework, config, args, opts = initialize(argv, options, dictLoggingConfig)
# process the user's call
return API_call( framework, config, args, opts, verbose=verbose )
def getOptsUsed(parser, opts, logger=None):
'''Get string to print out the options supplied'''
#sys.argv when called as a library is
# uninteresting/misleading. So args is better, but this misses
# the options.
# We print here all non-default options
nondef = ""
for attr in dir(opts):
import types
if attr.startswith("_"):
continue
if isinstance(getattr(opts, attr), types.MethodType):
continue
# if the parser has no option with a dest==attr,
# then continue
# This means that the user supplied an option the parser didn't
# handle, and typically there would have been an error,
# but lets not complain here
has = False
for opt in parser.option_list:
if opt.dest == attr:
has=True
break
if has == False:
for group in parser.option_groups:
for opt in group.option_list:
if opt.dest == attr:
has = True
break
if has:
break
if not has:
continue
if (not parser.defaults.has_key(attr)) or (parser.defaults[attr] != getattr(opts, attr)):
# If default is a relative path we expanded,
# then it looks like it changed here. So try expanding
# any defaults to see if that makes it match
try:
defVal = parser.defaults[attr]
defVal = os.path.normcase(os.path.expanduser(defVal))
if defVal == getattr(opts, attr):
continue
except:
pass
# non-default value
nondef += "\n\t\t" + attr + ": " + str(getattr(opts, attr))
if nondef != "":
nondef = "\n Options as run:" + nondef + "\n\n "
return nondef
def API_call( framework, config, args, opts, verbose=False ):
"""Call the function from the given args list.
Apply the options from the given optparse.Values opts argument
If verbose, print the command and the summary.
Return is a list of 2 items: a human readable string summarizing the result
(possibly an error message), and the result object (may be None on error). The result
object type varies by underlying command called.
"""
logger = config['logger']
if opts.debug:
logger.info(getSystemInfo() + "\nOmni: " + getOmniVersion())
if len(args) > 0 and args[0].lower() == "nicknames":
result = printNicknames(config, opts)
else:
# Process the user's call
handler = CallHandler(framework, config, opts)
# Returns string, item
result = handler._handle(args)
if result is None:
retVal = None
retItem = None
elif len(result)==2:
retVal, retItem = result
else:
retVal = result
retItem = None
# Print the summary of the command result
if verbose:
nondef = getOptsUsed(getParser(), opts, logger)
cmd = None
if len(args) > 0:
cmd = args[0]
s = "Completed " + cmd + ":\n" + nondef + "Args: "+" ".join(args)+"\n\n Result Summary: " + str(retVal)
headerLen = (70 - (len(s) + 2)) / 4
header = "- "*headerLen+" "+s+" "+"- "*headerLen
logger.info( " " + "-"*54 )
logger.info( header )
# printed not logged so can redirect output to a file
#logger.info(retVal)
# logger.info( " " + "="*54 )
# print retItem
logger.info( " " + "="*54 )
# end of if verbose
return retVal, retItem
def configure_logging(opts, dictConfig=None):
"""Configure logging. If a logging config dictionary is supplied, configuring Logging using that.
Else, if a log config filename is supplied with the -l option,
and the file is non-empty, configure logging from that file. For details on this,
see the applyLogConfig documentation.
Otherwise, use a basic config, with INFO level by default,
DEBUG level if opts.debug, INFO if opts.info, etc.
Return a logger for 'omni'."""
# Warning: If Omni is used as a library, and the caller did some logging configuration,
# then the call here to logging.basicConfig(level) will do nothing. In particular, it will not reset
# the log level based on the options supplied to Omni. The caller should supply a separate logging config
# file, or use e.g. logging.disable(logging.INFO) before calling omni. and logging.disable(logging.NOTSET) after
level = logging.INFO
optlevel = 'INFO'
# If log level was specified in options, use it. Most verbose
# level is used. Note that at ERROR and WARN levels, command
# outputs (like manifests) are not printed: use -o.
if opts.error:
level = logging.ERROR
optlevel = 'ERROR'
if opts.warn:
level = logging.WARN
optlevel = 'WARNING'
if opts.info:
level = logging.INFO
optlevel = 'INFO'
if opts.debug:
level = logging.DEBUG
optlevel = 'DEBUG'
deft = {}
# Add the ability to use %(logfilename)s in the logging config
# file
deft['logfilename'] = opts.logoutput
error = None # error raised configuring from given dictionary
if not opts.noLoggingConfiguration:
if dictConfig is not None:
# Try to configure logging from the given object
# Note this raises an exception if it fails (a ValueError, TypeError, AttributeError or ImportError)
# Also note this only works in python2.7+
logging.config.dictConfig(dictConfig)
elif opts.logconfig:
deft['optlevel'] = optlevel
applyLogConfig(opts.logconfig, defaults=deft)
else:
# Ticket 296: Add timestamps to log messages
# fmt = '%(asctime)s %(levelname)-8s %(name)s: %(message)s'
fmt = '%(asctime)s %(levelname)-8s: %(message)s'
logging.basicConfig(level=level,format=fmt,datefmt='%H:%M:%S')
logger = logging.getLogger("omni")
if dictConfig is not None and not opts.noLoggingConfiguration:
logger.debug("Configured logging from dictionary")
return logger
def applyLogConfig(logConfigFilename, defaults={'optlevel': 'INFO'}):
"""Change the logging configuration to that in the specified file, if found.
Effects all uses of python logging in this process.
Existing loggers are not modified, unless they are explicitly named
in the logging config file (they or their ancestor, not 'root').
Tries hard to find the file, and does nothing if not found.
'defaults' is a dictionary in ConfigParser format, that sets variables
for use in the config files. Specifically,
use this to set 'optlevel' to the basic logging level desired: INFO is the default.
For help creating a logging config file,
see http://docs.python.org/library/logging.config.html#configuration-file-format
and see the sample 'omni_log_conf_sample.conf'
From a script, you can over-ride the -l argument to change the log level.
Alternatively, you can call this function during omni operations.
Sample usage from a script:
# Configure logging based on command line options, using any -l specified file
framework, config, args, opts = omni.initialize(omniargs, options)
text, retItem = omni.API_call( framework, config, args, opts )
# Without changing commandline args, reset the logging config
omni.applyLogConfig("examples/myLogConfig.conf")
# <Here your script resets 'args' to give a different command>
# Then make the call for the new command, using the new log level
text, retItem = omni.API_call( framework, config, args, opts )
"""
fns = [logConfigFilename, os.path.join('src', logConfigFilename), os.path.expanduser(logConfigFilename), os.path.join('.', logConfigFilename), os.path.abspath(logConfigFilename)]
found = False
for fn in fns:
if os.path.exists(fn) and os.path.getsize(fn) > 0:
# Only new loggers get the parameters in the config file.
# If disable_existing is True(default), then existing loggers are disabled,
# unless they (or ancestors, not 'root') are explicitly listed in the config file.
logging.config.fileConfig(fn, defaults=defaults, disable_existing_loggers=False)
logging.info("Configured logging from file %s", fn)
found = True
break
if not found:
logging.warn("Failed to find log config file %s", logConfigFilename)
def getSystemInfo():
import platform
pver = platform.python_implementation() + " " + platform.python_version()
osinfo = platform.platform()
return "Python: " + pver + "\nOS: " + osinfo
def getOmniVersion():
version ="GENI Omni Command Line Aggregate Manager Tool Version %s" % GCF_VERSION
version +="\nCopyright (c) 2011-2015 Raytheon BBN Technologies"
return version
def getParser():
"""Construct an Options Parser for parsing omni arguments.
Do not actually parse anything"""
usage = "\n" + getOmniVersion() + "\n\n%prog [options] [--project <proj_name>] <command and arguments> \n\
\n \t Commands and their arguments are: \n\
\t\tAM API functions: \n\
\t\t\t getversion \n\
\t\t\t listresources [In AM API V1 and V2 optional: slicename] \n\
\t\t\t describe slicename [AM API V3 only] \n\
\t\t\t createsliver <slicename> <rspec URL, filename, or nickname> [AM API V1&2 only] \n\
\t\t\t allocate <slicename> <rspec URL, filename, or nickname> [AM API V3 only] \n\
\t\t\t provision <slicename> [AM API V3 only] \n\
\t\t\t performoperationalaction <slicename> <action> [AM API V3 only] \n\
\t\t\t poa <slicename> <action> \n\
\t\t\t\t [alias for 'performoperationalaction'; AM API V3 only] \n\
\t\t\t sliverstatus <slicename> [AMAPI V1&2 only]\n\
\t\t\t status <slicename> [AMAPI V3 only]\n\
\t\t\t renewsliver <slicename> <new expiration time in UTC> [AM API V1&2 only] \n\
\t\t\t renew <slicename> <new expiration time in UTC> [AM API V3 only] \n\
\t\t\t deletesliver <slicename> [AM API V1&2 only] \n\
\t\t\t delete <slicename> [AM API V3 only] \n\
\t\t\t shutdown <slicename> \n\
\t\t\t update <slicename> <rspec URL, filename, or nickname> [Some AM API V3 AMs only] \n\
\t\t\t cancel <slicename> [Some AM API V3 AMs only] \n\
\t\tNon AM API aggregate functions (supported by some aggregates): \n\
\t\t\t createimage <slicename> <imagename> [optional: false (keep image private)] -u <sliver urn> [ProtoGENI/InstaGENI only] \n\
\t\t\t snapshotimage <slicename> <imagename> [optional: false (keep image private)] -u <sliver urn> [ProtoGENI/InstaGENI only] \n\
\t\t\t\t [alias for 'createimage'] \n\
\t\t\t deleteimage <imageurn> [optional: creatorurn] [ProtoGENI/InstaGENI only] \n\
\t\t\t listimages [optional: creatorurn] [ProtoGENI/InstaGENI only] \n\
\t\tClearinghouse / Slice Authority functions: \n\
\t\t\t get_ch_version \n\
\t\t\t listaggregates \n\
\t\t\t createslice <slicename> \n\
\t\t\t getslicecred <slicename> \n\
\t\t\t renewslice <slicename> <new expiration time in UTC> \n\
\t\t\t deleteslice <slicename> \n\
\t\t\t listslices [optional: username] [Alias for listmyslices]\n\
\t\t\t listmyslices [optional: username] \n\
\t\t\t listprojects [optional: username] [Alias for listmyprojects]\n\
\t\t\t listmyprojects [optional: username] \n\
\t\t\t listmykeys [optional: username] [Alias for listkeys]\n\
\t\t\t listkeys [optional: username]\n\
\t\t\t getusercred \n\
\t\t\t print_slice_expiration <slicename> \n\
\t\t\t listslivers <slicename> \n\
\t\t\t listprojectmembers <projectname> \n\
\t\t\t listslicemembers <slicename> \n\
\t\t\t addslicemember <slicename> <username> [optional: role] \n\
\t\t\t removeslicemember <slicename> <username> \n\
\t\tOther functions: \n\
\t\t\t nicknames \n\
\t\t\t print_sliver_expirations <slicename> \n\
\n\t See README-omni.txt for details.\n\
\t And see the Omni website at http://trac.gpolab.bbn.com/gcf"
parser = optparse.OptionParser(usage=usage, version="%prog: " + getOmniVersion())
# Basics
basicgroup = optparse.OptionGroup( parser, "Basic and Most Used Options")
basicgroup.add_option("-a", "--aggregate", metavar="AGGREGATE_URL", action="append",
help="Communicate with a specific aggregate")
basicgroup.add_option("--available", dest='geni_available',
default=False, action="store_true",
help="Only return available resources")
basicgroup.add_option("-c", "--configfile",
help="Config file name (aka `omni_config`)", metavar="FILE")
basicgroup.add_option("-f", "--framework", default=os.getenv("GENI_FRAMEWORK", ""),
help="Control framework to use for creation/deletion of slices")
basicgroup.add_option("-r", "--project",
help="Name of project. (For use with pgch framework.)")
basicgroup.add_option("--alap", action="store_true", default=False,
help="Request slivers be renewed as close to the requested time as possible, instead of failing if the requested time is not possible. Default is False.")
# Note that type and version are case in-sensitive strings.
# This causes settiong options.explicitRSpecVersion as well
basicgroup.add_option("-t", "--rspectype", nargs=2, default=["GENI", '3'], metavar="RSPEC-TYPE RSPEC-VERSION",
help="RSpec type and version to return, default: '%default'")
# This goes in options.api_version. Also causes setting options.explicitAPIVersion
basicgroup.add_option("-V", "--api-version", type="int", default=2,
help="Specify version of AM API to use (default v%default)")
basicgroup.add_option("--useSliceAggregates", default=False, action="store_true",
help="Perform the slice action at all aggregates the given slice is known to use according to clearinghouse records. Default is %default.")
parser.add_option_group( basicgroup )
# AM API v3 specific
v3group = optparse.OptionGroup( parser, "AM API v3+",
"Options used in AM API v3 or later" )
v3group.add_option("--best-effort", dest='geni_best_effort',
default=False, action="store_true",
help="Should AMs attempt to complete the operation on only some slivers, if others fail")
v3group.add_option("--cred", action='append', metavar="CRED_FILENAME",
help="Send credential in given filename with any call that takes a list of credentials")
v3group.add_option("--end-time", dest='geni_end_time',
help="Requested end time for any newly allocated or provisioned slivers - may be ignored by the AM")
v3group.add_option("--start-time", dest='geni_start_time',
help="Requested start time for any allocated slivers - NOW if not provided, could be for future reservations")
# Sample options file content:
#{
# "option_name_1": "value",
# "option_name_2": {"complicated_dict" : 37},
# "option_name_3": 67
#}
v3group.add_option("--optionsfile", metavar="JSON_OPTIONS_FILENAME",
help="Send all options defined in named JSON format file to methods that take options")
v3group.add_option("--speaksfor", metavar="USER_URN",
help="Supply given URN as user we are speaking for in Speaks For option")
v3group.add_option("-u", "--sliver-urn", dest="slivers", action="append",
help="Sliver URN (not name) on which to act. Supply this option multiple times for multiple slivers, or not at all to apply to the entire slice")
# For Update. See http://groups.geni.net/geni/wiki/GAPI_AM_API_DRAFT/Adopted#ChangestoDescribe
v3group.add_option("--cancelled", action="store_true", default=False,
help="Should Describe show sliver state of only geni_provisioned slivers, ignoring any geni_updating and geni_allocated slivers (default %default)")
parser.add_option_group( v3group )
# logging levels
loggroup = optparse.OptionGroup( parser, "Logging and Verboseness",
"Control the amount of output to the screen and/or to a log" )
loggroup.add_option("-q", "--quiet", default=True, action="store_false", dest="verbose",
help="Turn off verbose command summary for omni commandline tool")
loggroup.add_option("-v", "--verbose", default=True, action="store_true",
help="Turn on verbose command summary for omni commandline tool")
loggroup.add_option("--debug", action="store_true", default=False,
help="Enable debugging output. If multiple loglevel are set from commandline (e.g. --debug, --info) the more verbose one will be preferred.")
loggroup.add_option("--info", action="store_true", default=False,
help="Set logging to INFO.If multiple loglevel are set from commandline (e.g. --debug, --info) the more verbose one will be preferred.")
loggroup.add_option("--warn", action="store_true", default=False,
help="Set log level to WARN. This won't print the command outputs, e.g. manifest rspec, so use the -o or the --outputfile options to save it to a file. If multiple loglevel are set from commandline (e.g. --debug, --info) the more verbose one will be preferred.")
loggroup.add_option("--error", action="store_true", default=False,
help="Set log level to ERROR. This won't print the command outputs, e.g. manifest rspec, so use the -o or the --outputfile options to save it to a file.If multiple loglevel are set from commandline (e.g. --debug, --info) the more verbose one will be preferred.")
loggroup.add_option("--verbosessl", default=False, action="store_true",
help="Turn on verbose SSL / XMLRPC logging")
loggroup.add_option("-l", "--logconfig", default=None,
help="Python logging config file. Default: '%default'")
loggroup.add_option("--logoutput", default='omni.log',
help="Python logging output file [use %(logfilename)s in logging config file]. Default: '%default'")
loggroup.add_option("--tostdout", default=False, action="store_true",
help="Print results like rspecs to STDOUT instead of to log stream")
loggroup.add_option("--noLoggingConfiguration", default=False, action="store_true",
help="Do not configure python logging; for use by other tools.")
parser.add_option_group( loggroup )
# output to files
filegroup = optparse.OptionGroup( parser, "File Output",
"Control name of output file and whether to output to a file" )
filegroup.add_option("-o", "--output", default=False, action="store_true",
help="Write output of many functions (getversion, listresources, allocate, status, getslicecred,...) , to a file (Omni picks the name)")
filegroup.add_option("-p", "--prefix", default=None, metavar="FILENAME_PREFIX",
help="Filename prefix when saving results (used with -o, not --usercredfile, --slicecredfile, or --outputfile)")
# If this next is set, then options.output is also set
filegroup.add_option("--outputfile", default=None, metavar="OUTPUT_FILENAME",
help="Name of file to write output to (instead of Omni picked name). '%a' will be replaced by servername, '%s' by slicename if any. Implies -o. Note that for multiple aggregates, without a '%a' in the name, only the last aggregate output will remain in the file. Will ignore -p.")
filegroup.add_option("--usercredfile", default=os.getenv("GENI_USERCRED", None), metavar="USER_CRED_FILENAME",
help="Name of user credential file to read from if it exists, or save to when running like '--usercredfile " +
"myUserCred.xml -o getusercred'. Defaults to value of 'GENI_USERCRED' environment variable if defined.")
filegroup.add_option("--slicecredfile", default=os.getenv("GENI_SLICECRED", None), metavar="SLICE_CRED_FILENAME",
help="Name of slice credential file to read from if it exists, or save to when running like '--slicecredfile " +
"mySliceCred.xml -o getslicecred mySliceName'. Defaults to value of 'GENI_SLICECRED' environment variable if defined.")
parser.add_option_group( filegroup )
# GetVersion
gvgroup = optparse.OptionGroup( parser, "GetVersion Cache",
"Control GetVersion Cache" )
gvgroup.add_option("--NoGetVersionCache", dest='noGetVersionCache',
default=False, action="store_true",
help="Disable using cached GetVersion results (forces refresh of cache)")
gvgroup.add_option("--ForceUseGetVersionCache", dest='useGetVersionCache',
default=False, action="store_true",
help="Require using the GetVersion cache if possible (default false)")
# This causes setting options.GetVersionCacheOldestDate
gvgroup.add_option("--GetVersionCacheAge", dest='GetVersionCacheAge',
default=7,
help="Age in days of GetVersion cache info before refreshing (default is %default)")
gvgroup.add_option("--GetVersionCacheName", dest='getversionCacheName',
default="~/.gcf/get_version_cache.json",
help="File where GetVersion info will be cached, default is %default")
gvgroup.add_option("--noCacheFiles", default=False, action="store_true",
help="Disable both GetVersion and Aggregate Nickname cache functionality completely; no files are downloaded, saved, or loaded.")
parser.add_option_group( gvgroup )
# AggNick
angroup = optparse.OptionGroup( parser, "Aggregate Nickname Cache",
"Control Aggregate Nickname Cache" )
angroup.add_option("--NoAggNickCache", dest='noAggNickCache',
default=False, action="store_true",
help="Disable using cached AggNick results and force refresh of cache (default is %default)")
angroup.add_option("--ForceUseAggNickCache", dest='useAggNickCache',
default=False, action="store_true",
help="Require using the AggNick cache if possible (default %default)")
# This causes setting options.AggNickCacheOldestDate
angroup.add_option("--AggNickCacheAge", dest='AggNickCacheAge',
default=1,
help="Age in days of AggNick cache info before refreshing (default is %default)")
angroup.add_option("--AggNickCacheName", dest='aggNickCacheName',
default="~/.gcf/agg_nick_cache",
help="File where AggNick info will be cached, default is %default")
angroup.add_option("--AggNickDefinitiveLocation", dest='aggNickDefinitiveLocation',
default="http://trac.gpolab.bbn.com/gcf/raw-attachment/wiki/Omni/agg_nick_cache",
help="Website with latest agg_nick_cache, default is %default. To force Omni to read this cache, delete your local AggNickCache or use --NoAggNickCache.")
parser.add_option_group( angroup )
# Development / Advanced
devgroup = optparse.OptionGroup( parser, "For Developers / Advanced Users",
"Features only needed by developers or advanced users" )
devgroup.add_option("--useSliceMembers", default=True, action="store_true",
help="DEPRECATED - this option no longer has any effect. The option is always true, unless you specify --noSliceMembers.")
devgroup.add_option("--noSliceMembers", default=False, action="store_true",
help="Reverse of --useSliceMembers. Do NOT create accounts or install slice members' SSH keys on reserved resources in createsliver, provision or performoperationalaction. Default is %default. " + \
"When specified, only users from your omni_config are used (unless --ignoreConfigUsers).")
devgroup.add_option("--ignoreConfigUsers", default=False, action="store_true",
help="Ignore users and SSH keys listed in your omni_config when installing SSH keys on resources in createsliver or provision or " + \
"performoperationalaction. Default is false - your omni_config users are read and used.")
devgroup.add_option("--ssltimeout", default=360, action="store", type="float",
help="Seconds to wait before timing out AM and CH calls. Default is %default seconds.")
devgroup.add_option("--noExtraCHCalls", default=False, action="store_true",
help="Disable extra Clearinghouse calls like reporting slivers. Default is %default.")
devgroup.add_option("--devmode", default=False, action="store_true",
help="Run in developer mode: more verbose, less error checking of inputs")
devgroup.add_option("--raise-error-on-v2-amapi-error", dest='raiseErrorOnV2AMAPIError',
default=False, action="store_true",
help="In AM API v2, if an AM returns a non-0 (failure) result code, raise an AMAPIError. Default is %default. For use by scripts.")
devgroup.add_option("--maxBusyRetries", default=4, action="store", type="int",
help="Max times to retry AM or CH calls on getting a 'busy' error. Default: %default")
devgroup.add_option("--no-compress", dest='geni_compressed',
default=True, action="store_false",
help="Do not compress returned values")
devgroup.add_option("--abac", default=False, action="store_true",
help="Use ABAC authorization")
devgroup.add_option("--arbitrary-option", dest='arbitrary_option',
default=False, action="store_true",
help="Add an arbitrary option to ListResources (for testing purposes)")
devgroup.add_option("--no-ssl", dest="ssl", action="store_false",
default=True, help="do not use ssl")
devgroup.add_option("--no-tz", default=False, action="store_true",
help="Do not send timezone on RenewSliver")
devgroup.add_option("--orca-slice-id", dest="orca_slice_id",
help="Use the given Orca slice id")
parser.add_option_group( devgroup )
return parser
def parse_args(argv, options=None, parser=None):
"""Parse the given argv list using the Omni optparse.OptionParser, or the parser supplied if given.
Fill options into the given option optparse.Values object if supplied.
"""
if options is not None and not options.__class__==optparse.Values:
raise OmniError("Invalid options argument to parse_args: must be an optparse.Values object")
elif options is not None:
# The caller, presumably a script, gave us an optparse.Values storage object.
# Passing this object to parser.parse_args replaces the storage - it is pass
# by reference. Callers may not expect that. In particular, multiple calls in
# separate threads will conflict.
# Make a deep copy
options = deepcopy(options)
if parser is not None and not isinstance(parser, optparse.OptionParser):
raise OmniError("parse_args got invalid parser: %s." % parser)
if parser is None:
parser = getParser()
if argv is None:
# prints to stderr
parser.print_help()
return None, []
(options, args) = parser.parse_args(argv, options)
# Set an option indicating if the user explicitly requested the RSpec version
options.ensure_value('explicitRSpecVersion', False)
options.explicitRSpecVersion = ('-t' in argv or '--rspectype' in argv)
# Set an option indicating if the user explicitly requested the API version
options.ensure_value('explicitAPIVersion', False)
# FIXME: Do something more extensible...
options.explicitAPIVersion = ('-V' in argv or '--api-version' in argv or '-V1' in argv or '-V2' in argv or '-V3' in argv or '-V4' in argv or '-V5' in argv)
# Validate options here if we want to be careful that options are of the right types...
# particularly if the user passed in an options argument
# Validate the API version. The parser has already converted the argument to
# an integer, so check against a list of valid versions.
supported_versions = [1, 2, 3]
if options.api_version not in supported_versions:
parser.error('API version "%s" is not a supported version. Valid versions are: %r.'
% (options.api_version, supported_versions))
# From GetVersionCacheAge (int days) produce options.GetVersionCacheOldestDate as a datetime.datetime
indays = -1
try:
indays = int(options.GetVersionCacheAge)
except Exception, e:
raise OmniError, "Failed to parse GetVersionCacheAge: %s" % e
options.GetVersionCacheOldestDate = datetime.datetime.utcnow() - datetime.timedelta(days=indays)
options.getversionCacheName = os.path.normcase(os.path.expanduser(options.getversionCacheName))
if options.noGetVersionCache and options.useGetVersionCache:
parser.error("Cannot both force not using the GetVersion cache and force TO use it.")
# From AggNickCacheAge (int days) produce options.AggNickCacheOldestDate as a datetime.datetime
indays = -1
try:
indays = int(options.AggNickCacheAge)
except Exception, e:
raise OmniError, "Failed to parse AggNickCacheAge: %s" % e
options.AggNickCacheOldestDate = datetime.datetime.utcnow() - datetime.timedelta(days=indays)
options.aggNickCacheName = os.path.normcase(os.path.expanduser(options.aggNickCacheName))
if options.noAggNickCache and options.useAggNickCache:
parser.error("Cannot both force not using the AggNick cache and force TO use it.")
if options.outputfile:
options.output = True
if options.usercredfile:
options.usercredfile = os.path.normpath(os.path.normcase(os.path.expanduser(options.usercredfile)))
if options.slicecredfile:
options.slicecredfile = os.path.normpath(os.path.normcase(os.path.expanduser(options.slicecredfile)))
# noSliceMembers forces useSliceMembers to be false
# Note you can also force it false with an omni_config setting of useslicemembers=False in the omni section
if options.noSliceMembers:
options.useSliceMembers = False
return options, args
def main(argv=None):
# do initial setup & process the user's call
if argv is None:
argv = sys.argv[1:]
try:
framework, config, args, opts = initialize(argv)
API_call(framework, config, args, opts, verbose=opts.verbose)
except AMAPIError, ae:
if ae.returnstruct and isinstance(ae.returnstruct, dict) and ae.returnstruct.has_key('code'):
if isinstance(ae.returnstruct['code'], int) or isinstance(ae.returnstruct['code'], str):
sys.exit(int(ae.returnstruct['code']))
if isinstance(ae.returnstruct['code'], dict) and ae.returnstruct['code'].has_key('geni_code'):
sys.exit(int(ae.returnstruct['code']['geni_code']))
sys.exit(ae)
except OmniError, oe:
sys.exit(oe)
|
plantigrade/geni-tools
|
src/gcf/oscript.py
|
Python
|
mit
| 66,572
|
[
"ORCA"
] |
ebac4be7ef9b78b7aa464268079c24dfae012c305d1fdfd90234881d7ea11fe2
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import numpy as np
import unittest
import os
from pymatgen.analysis.structure_analyzer import VoronoiCoordFinder, \
solid_angle, contains_peroxide, RelaxationAnalyzer, VoronoiConnectivity, \
oxide_type, sulfide_type, OrderParameters, average_coordination_number, \
VoronoiAnalyzer, JMolCoordFinder, get_dimensionality
from pymatgen.io.vasp.inputs import Poscar
from pymatgen.io.vasp.outputs import Xdatcar
from pymatgen import Element, Structure, Lattice
from pymatgen.util.testing import PymatgenTest
test_dir = os.path.join(os.path.dirname(__file__), "..", "..", "..",
'test_files')
class VoronoiCoordFinderTest(PymatgenTest):
def setUp(self):
s = self.get_structure('LiFePO4')
self.finder = VoronoiCoordFinder(s, [Element("O")])
def test_get_voronoi_polyhedra(self):
self.assertEqual(len(self.finder.get_voronoi_polyhedra(0).items()), 8)
def test_get_coordination_number(self):
self.assertAlmostEqual(self.finder.get_coordination_number(0),
5.809265748999465, 7)
def test_get_coordinated_sites(self):
self.assertEqual(len(self.finder.get_coordinated_sites(0)), 8)
class VoronoiAnalyzerTest(PymatgenTest):
def setUp(self):
self.ss = Xdatcar(os.path.join(test_dir, 'XDATCAR.MD')).structures
self.s = self.ss[1]
self.va = VoronoiAnalyzer(cutoff=4.0)
def test_analyze(self):
# Check for the Voronoi index of site i in Structure
single_structure = self.va.analyze(self.s, n=5)
self.assertIn(single_structure.view(),
np.array([4, 3, 3, 4, 2, 2, 1, 0]).view(),
"Cannot find the right polyhedron.")
# Check for the presence of a Voronoi index and its frequency in
# a ensemble (list) of Structures
ensemble = self.va.analyze_structures(self.ss, step_freq=2,
most_frequent_polyhedra=10)
self.assertIn(('[1 3 4 7 1 0 0 0]', 3),
ensemble, "Cannot find the right polyhedron in ensemble.")
class JMolCoordFinderTest(PymatgenTest):
def test_get_coordination_number(self):
s = self.get_structure('LiFePO4')
# test the default coordination finder
finder = JMolCoordFinder()
nsites_checked = 0
for site_idx, site in enumerate(s):
if site.specie == Element("Li"):
self.assertEqual(finder.get_coordination_number(s, site_idx), 0)
nsites_checked += 1
elif site.specie == Element("Fe"):
self.assertEqual(finder.get_coordination_number(s, site_idx), 6)
nsites_checked += 1
elif site.specie == Element("P"):
self.assertEqual(finder.get_coordination_number(s, site_idx), 4)
nsites_checked += 1
self.assertEqual(nsites_checked, 12)
# test a user override that would cause Li to show up as 6-coordinated
finder = JMolCoordFinder({"Li": 1})
self.assertEqual(finder.get_coordination_number(s, 0), 6)
# verify get_coordinated_sites function works
self.assertEqual(len(finder.get_coordinated_sites(s, 0)), 6)
class GetDimensionalityTest(PymatgenTest):
def test_get_dimensionality(self):
s = self.get_structure('LiFePO4')
self.assertEqual(get_dimensionality(s), 3)
s = self.get_structure('Graphite')
self.assertEqual(get_dimensionality(s), 2)
def test_get_dimensionality_with_bonds(self):
s = self.get_structure('CsCl')
self.assertEqual(get_dimensionality(s), 1)
self.assertEqual(get_dimensionality(s, bonds={("Cs", "Cl"): 3.7}), 3)
class RelaxationAnalyzerTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR.Li2O'),
check_for_POTCAR=False)
s1 = p.structure
p = Poscar.from_file(os.path.join(test_dir, 'CONTCAR.Li2O'),
check_for_POTCAR=False)
s2 = p.structure
self.analyzer = RelaxationAnalyzer(s1, s2)
def test_vol_and_para_changes(self):
for k, v in self.analyzer.get_percentage_lattice_parameter_changes().items():
self.assertAlmostEqual(-0.0092040921155279731, v)
latt_change = v
vol_change = self.analyzer.get_percentage_volume_change()
self.assertAlmostEqual(-0.0273589101391,
vol_change)
# This is a simple cubic cell, so the latt and vol change are simply
# Related. So let's test that.
self.assertAlmostEqual((1 + latt_change) ** 3 - 1, vol_change)
def test_get_percentage_bond_dist_changes(self):
for k, v in self.analyzer.get_percentage_bond_dist_changes().items():
for k2, v2 in v.items():
self.assertAlmostEqual(-0.009204092115527862, v2)
class VoronoiConnectivityTest(PymatgenTest):
def test_connectivity_array(self):
vc = VoronoiConnectivity(self.get_structure("LiFePO4"))
ca = vc.connectivity_array
np.set_printoptions(threshold=np.NAN, linewidth=np.NAN, suppress=np.NAN)
expected = np.array([0, 1.96338392, 0, 0.04594495])
self.assertTrue(np.allclose(ca[15, :4, ca.shape[2] // 2], expected))
expected = np.array([0, 0, 0])
self.assertTrue(np.allclose(ca[1, -3:, 51], expected))
site = vc.get_sitej(27, 51)
self.assertEqual(site.specie, Element('O'))
expected = np.array([-0.29158, 0.74889, 0.95684])
self.assertTrue(np.allclose(site.frac_coords, expected))
class MiscFunctionTest(PymatgenTest):
def test_average_coordination_number(self):
xdatcar = Xdatcar(os.path.join(test_dir, 'XDATCAR.MD'))
coordination_numbers = average_coordination_number(xdatcar.structures,
freq=1)
self.assertAlmostEqual(coordination_numbers['Fe'], 4.771903318390836, 5,
"Coordination number not calculated properly.")
def test_solid_angle(self):
center = [2.294508207929496, 4.4078057081404, 2.299997773791287]
coords = [[1.627286218099362, 3.081185538926995, 3.278749383217061],
[1.776793751092763, 2.93741167455471, 3.058701096568852],
[3.318412187495734, 2.997331084033472, 2.022167590167672],
[3.874524708023352, 4.425301459451914, 2.771990305592935],
[2.055778446743566, 4.437449313863041, 4.061046832034642]]
self.assertAlmostEqual(solid_angle(center, coords), 1.83570965938, 7,
"Wrong result returned by solid_angle")
def test_contains_peroxide(self):
for f in ['LiFePO4', 'NaFePO4', 'Li3V2(PO4)3', 'Li2O']:
self.assertFalse(contains_peroxide(self.get_structure(f)))
for f in ['Li2O2', "K2O2"]:
self.assertTrue(contains_peroxide(self.get_structure(f)))
def test_oxide_type(self):
el_li = Element("Li")
el_o = Element("O")
latt = Lattice([[3.985034, 0.0, 0.0],
[0.0, 4.881506, 0.0],
[0.0, 0.0, 2.959824]])
elts = [el_li, el_li, el_o, el_o, el_o, el_o]
coords = list()
coords.append([0.500000, 0.500000, 0.500000])
coords.append([0.0, 0.0, 0.0])
coords.append([0.632568, 0.085090, 0.500000])
coords.append([0.367432, 0.914910, 0.500000])
coords.append([0.132568, 0.414910, 0.000000])
coords.append([0.867432, 0.585090, 0.000000])
struct = Structure(latt, elts, coords)
self.assertEqual(oxide_type(struct, 1.1), "superoxide")
el_li = Element("Li")
el_o = Element("O")
elts = [el_li, el_o, el_o, el_o]
latt = Lattice.from_parameters(3.999911, 3.999911, 3.999911, 133.847504,
102.228244, 95.477342)
coords = [[0.513004, 0.513004, 1.000000],
[0.017616, 0.017616, 0.000000],
[0.649993, 0.874790, 0.775203],
[0.099587, 0.874790, 0.224797]]
struct = Structure(latt, elts, coords)
self.assertEqual(oxide_type(struct, 1.1), "ozonide")
latt = Lattice.from_parameters(3.159597, 3.159572, 7.685205, 89.999884,
89.999674, 60.000510)
el_li = Element("Li")
el_o = Element("O")
elts = [el_li, el_li, el_li, el_li, el_o, el_o, el_o, el_o]
coords = [[0.666656, 0.666705, 0.750001],
[0.333342, 0.333378, 0.250001],
[0.000001, 0.000041, 0.500001],
[0.000001, 0.000021, 0.000001],
[0.333347, 0.333332, 0.649191],
[0.333322, 0.333353, 0.850803],
[0.666666, 0.666686, 0.350813],
[0.666665, 0.666684, 0.149189]]
struct = Structure(latt, elts, coords)
self.assertEqual(oxide_type(struct, 1.1), "peroxide")
el_li = Element("Li")
el_o = Element("O")
el_h = Element("H")
latt = Lattice.from_parameters(3.565276, 3.565276, 4.384277, 90.000000,
90.000000, 90.000000)
elts = [el_h, el_h, el_li, el_li, el_o, el_o]
coords = [[0.000000, 0.500000, 0.413969],
[0.500000, 0.000000, 0.586031],
[0.000000, 0.000000, 0.000000],
[0.500000, 0.500000, 0.000000],
[0.000000, 0.500000, 0.192672],
[0.500000, 0.000000, 0.807328]]
struct = Structure(latt, elts, coords)
self.assertEqual(oxide_type(struct, 1.1), "hydroxide")
el_li = Element("Li")
el_n = Element("N")
el_h = Element("H")
latt = Lattice.from_parameters(3.565276, 3.565276, 4.384277, 90.000000,
90.000000, 90.000000)
elts = [el_h, el_h, el_li, el_li, el_n, el_n]
coords = [[0.000000, 0.500000, 0.413969],
[0.500000, 0.000000, 0.586031],
[0.000000, 0.000000, 0.000000],
[0.500000, 0.500000, 0.000000],
[0.000000, 0.500000, 0.192672],
[0.500000, 0.000000, 0.807328]]
struct = Structure(latt, elts, coords)
self.assertEqual(oxide_type(struct, 1.1), "None")
el_o = Element("O")
latt = Lattice.from_parameters(4.389828, 5.369789, 5.369789, 70.786622,
69.244828, 69.244828)
elts = [el_o, el_o, el_o, el_o, el_o, el_o, el_o, el_o]
coords = [[0.844609, 0.273459, 0.786089],
[0.155391, 0.213911, 0.726541],
[0.155391, 0.726541, 0.213911],
[0.844609, 0.786089, 0.273459],
[0.821680, 0.207748, 0.207748],
[0.178320, 0.792252, 0.792252],
[0.132641, 0.148222, 0.148222],
[0.867359, 0.851778, 0.851778]]
struct = Structure(latt, elts, coords)
self.assertEqual(oxide_type(struct, 1.1), "None")
def test_sulfide_type(self):
# NaS2 -> polysulfide
latt = Lattice.tetragonal(9.59650, 11.78850)
species = ["Na"] * 2 + ["S"] * 2
coords = [[0.00000, 0.00000, 0.17000],
[0.27600, 0.25000, 0.12500],
[0.03400, 0.25000, 0.29600],
[0.14700, 0.11600, 0.40000]]
struct = Structure.from_spacegroup(122, latt, species, coords)
self.assertEqual(sulfide_type(struct), "polysulfide")
# NaCl type NaS -> sulfide
latt = Lattice.cubic(5.75)
species = ["Na", "S"]
coords = [[0.00000, 0.00000, 0.00000],
[0.50000, 0.50000, 0.50000]]
struct = Structure.from_spacegroup(225, latt, species, coords)
self.assertEqual(sulfide_type(struct), "sulfide")
# Na2S2O3 -> None (sulfate)
latt = Lattice.monoclinic(6.40100, 8.10000, 8.47400, 96.8800)
species = ["Na"] * 2 + ["S"] * 2 + ["O"] * 3
coords = [[0.29706, 0.62396, 0.08575],
[0.37673, 0.30411, 0.45416],
[0.52324, 0.10651, 0.21126],
[0.29660, -0.04671, 0.26607],
[0.17577, 0.03720, 0.38049],
[0.38604, -0.20144, 0.33624],
[0.16248, -0.08546, 0.11608]]
struct = Structure.from_spacegroup(14, latt, species, coords)
self.assertEqual(sulfide_type(struct), None)
# Na3PS3O -> sulfide
latt = Lattice.orthorhombic(9.51050, 11.54630, 5.93230)
species = ["Na"] * 2 + ["S"] * 2 + ["P", "O"]
coords = [[0.19920, 0.11580, 0.24950],
[0.00000, 0.36840, 0.29380],
[0.32210, 0.36730, 0.22530],
[0.50000, 0.11910, 0.27210],
[0.50000, 0.29400, 0.35500],
[0.50000, 0.30300, 0.61140]]
struct = Structure.from_spacegroup(36, latt, species, coords)
self.assertEqual(sulfide_type(struct), "sulfide")
class OrderParametersTest(PymatgenTest):
def setUp(self):
self.single_bond = Structure(
Lattice.from_lengths_and_angles(
[10, 10, 10], [90, 90, 90]),
["H", "H", "H"], [[1, 0, 0], [0, 0, 0], [6, 0, 0]],
validate_proximity=False,
to_unit_cell=False, coords_are_cartesian=True,
site_properties=None)
self.linear = Structure(
Lattice.from_lengths_and_angles(
[10, 10, 10], [90, 90, 90]),
["H", "H", "H"], [[1, 0, 0], [0, 0, 0], [2, 0, 0]],
validate_proximity=False,
to_unit_cell=False, coords_are_cartesian=True,
site_properties=None)
self.bent45 = Structure(
Lattice.from_lengths_and_angles(
[10, 10, 10], [90, 90, 90]), ["H", "H", "H"],
[[0, 0, 0], [0.707, 0.707, 0], [0.707, 0, 0]],
validate_proximity=False,
to_unit_cell=False, coords_are_cartesian=True,
site_properties=None)
self.cubic = Structure(
Lattice.from_lengths_and_angles(
[1, 1, 1], [90, 90, 90]),
["H"], [[0, 0, 0]], validate_proximity=False,
to_unit_cell=False, coords_are_cartesian=False,
site_properties=None)
self.bcc = Structure(
Lattice.from_lengths_and_angles(
[1, 1, 1], [90, 90, 90]),
["H", "H"], [[0, 0, 0], [0.5, 0.5, 0.5]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=False, site_properties=None)
self.fcc = Structure(
Lattice.from_lengths_and_angles(
[1, 1, 1], [90, 90, 90]), ["H", "H", "H", "H"],
[[0, 0, 0], [0, 0.5, 0.5], [0.5, 0, 0.5], [0.5, 0.5, 0]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=False, site_properties=None)
self.hcp = Structure(
Lattice.from_lengths_and_angles(
[1, 1, 1.633], [90, 90, 120]), ["H", "H"],
[[0.3333, 0.6667, 0.25], [0.6667, 0.3333, 0.75]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=False, site_properties=None)
self.diamond = Structure(
Lattice.from_lengths_and_angles(
[1, 1, 1], [90, 90, 90]), ["H", "H", "H", "H", "H", "H", "H", "H"],
[[0, 0, 0.5], [0.75, 0.75, 0.75], [0, 0.5, 0], [0.75, 0.25, 0.25],
[0.5, 0, 0], [0.25, 0.75, 0.25], [0.5, 0.5, 0.5],
[0.25, 0.25, 0.75]], validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=False, site_properties=None)
self.trigonal_off_plane = Structure(
Lattice.from_lengths_and_angles(
[100, 100, 100], [90, 90, 90]),
["H", "H", "H", "H"],
[[0.50, 0.50, 0.50], [0.25, 0.75, 0.25], \
[0.25, 0.25, 0.75], [0.75, 0.25, 0.25]], \
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.regular_triangle = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["H", "H", "H", "H"],
[[15, 15.28867, 15.65], [14.5, 15, 15], [15.5, 15, 15], \
[15, 15.866, 15]], validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.trigonal_planar = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["H", "H", "H", "H"],
[[15, 15.28867, 15], [14.5, 15, 15], [15.5, 15, 15], \
[15, 15.866, 15]], validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.square_planar = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["H", "H", "H", "H", "H"],
[[15, 15, 15], [14.75, 14.75, 15], [14.75, 15.25, 15], \
[15.25, 14.75, 15], [15.25, 15.25, 15]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.square = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["H", "H", "H", "H", "H"],
[[15, 15, 15.707], [14.75, 14.75, 15], [14.75, 15.25, 15], \
[15.25, 14.75, 15], [15.25, 15.25, 15]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.T_shape = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["H", "H", "H", "H"],
[[15, 15, 15], [15, 15, 15.5], [15, 15.5, 15],
[15, 14.5, 15]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.square_pyramid = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["H", "H", "H", "H", "H", "H"],
[[15, 15, 15], [15, 15, 15.3535], [14.75, 14.75, 15],
[14.75, 15.25, 15], [15.25, 14.75, 15], [15.25, 15.25, 15]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.pentagonal_planar = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["Xe", "F", "F", "F", "F", "F"],
[[0, -1.6237, 0], [1.17969, 0, 0], [-1.17969, 0, 0], \
[1.90877, -2.24389, 0], [-1.90877, -2.24389, 0], [0, -3.6307, 0]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.pentagonal_pyramid = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["Xe", "F", "F", "F", "F", "F", "F"],
[[0, -1.6237, 0], [0, -1.6237, 1.17969], [1.17969, 0, 0], \
[-1.17969, 0, 0], [1.90877, -2.24389, 0], \
[-1.90877, -2.24389, 0], [0, -3.6307, 0]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.pentagonal_bipyramid = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]),
["Xe", "F", "F", "F", "F", "F", "F", "F"],
[[0, -1.6237, 0], [0, -1.6237, -1.17969], \
[0, -1.6237, 1.17969], [1.17969, 0, 0], \
[-1.17969, 0, 0], [1.90877, -2.24389, 0], \
[-1.90877, -2.24389, 0], [0, -3.6307, 0]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.hexagonal_pyramid = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), \
["H", "Li", "C", "C", "C", "C", "C", "C"],
[[0, 0, 0], [0, 0, 1.675], [0.71, 1.2298, 0], \
[-0.71, 1.2298, 0], [0.71, -1.2298, 0], [-0.71, -1.2298, 0], \
[1.4199, 0, 0], [-1.4199, 0, 0]], \
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.hexagonal_bipyramid = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), \
["H", "Li", "Li", "C", "C", "C", "C", "C", "C"],
[[0, 0, 0], [0, 0, 1.675], [0, 0, -1.675], \
[0.71, 1.2298, 0], [-0.71, 1.2298, 0], \
[0.71, -1.2298, 0], [-0.71, -1.2298, 0], \
[1.4199, 0, 0], [-1.4199, 0, 0]], \
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.trigonal_pyramid = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["P", "Cl", "Cl", "Cl", "Cl"],
[[0, 0, 0], [0, 0, 2.14], [0, 2.02, 0],
[1.74937, -1.01, 0], [-1.74937, -1.01, 0]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.trigonal_bipyramidal = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]), ["P", "Cl", "Cl", "Cl", "Cl", "Cl"],
[[0, 0, 0], [0, 0, 2.14], [0, 2.02, 0],
[1.74937, -1.01, 0], [-1.74937, -1.01, 0], [0, 0, -2.14]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.cuboctahedron = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]),
["H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H", "H"],
[[15, 15, 15], [15, 14.5, 14.5], [15, 14.5, 15.5],
[15, 15.5, 14.5], [15, 15.5, 15.5],
[14.5, 15, 14.5], [14.5, 15, 15.5], [15.5, 15, 14.5], [15.5, 15, 15.5],
[14.5, 14.5, 15], [14.5, 15.5, 15], [15.5, 14.5, 15], [15.5, 15.5, 15]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
self.see_saw = Structure(
Lattice.from_lengths_and_angles(
[30, 30, 30], [90, 90, 90]),
["H", "H", "H", "H", "H"],
[[15, 15, 15], [15, 15, 14], [15, 15, 16], [15, 14, 15], [14, 15, 15]],
validate_proximity=False, to_unit_cell=False,
coords_are_cartesian=True, site_properties=None)
def test_init(self):
self.assertIsNotNone(
OrderParameters(["cn"], parameters=None, cutoff=0.99))
def test_get_order_parameters(self):
# Set up everything.
op_types = ["cn", "bent", "bent", "tet", "oct", "bcc", "q2", "q4", \
"q6", "reg_tri", "sq", "sq_pyr_legacy", "tri_bipyr", "sgl_bd", \
"tri_plan", "sq_plan", "pent_plan", "sq_pyr", "tri_pyr", \
"pent_pyr", "hex_pyr", "pent_bipyr", "hex_bipyr", "T", "cuboct", \
"see_saw"]
op_paras = [None, {'TA': 1, 'IGW_TA': 1./0.0667}, \
{'TA': 45./180, 'IGW_TA': 1./0.0667}, None, \
None, None, None, None, None, None, None, None, None, \
None, None, None, None, None, None, None, None, None, \
None, None, None, None]
ops_044 = OrderParameters(op_types, parameters=op_paras, cutoff=0.44)
ops_071 = OrderParameters(op_types, parameters=op_paras, cutoff=0.71)
ops_087 = OrderParameters(op_types, parameters=op_paras, cutoff=0.87)
ops_099 = OrderParameters(op_types, parameters=op_paras, cutoff=0.99)
ops_101 = OrderParameters(op_types, parameters=op_paras, cutoff=1.01)
ops_501 = OrderParameters(op_types, parameters=op_paras, cutoff=5.01)
ops_voro = OrderParameters(op_types, parameters=op_paras)
# Single bond.
op_vals = ops_101.get_order_parameters(self.single_bond, 0)
self.assertAlmostEqual(int(op_vals[13] * 1000), 1000)
op_vals = ops_501.get_order_parameters(self.single_bond, 0)
self.assertAlmostEqual(int(op_vals[13] * 1000), 799)
op_vals = ops_101.get_order_parameters(self.linear, 0)
self.assertAlmostEqual(int(op_vals[13] * 1000), 0)
# Linear motif.
op_vals = ops_101.get_order_parameters(self.linear, 0)
self.assertAlmostEqual(int(op_vals[1] * 1000), 1000)
# 45 degrees-bent motif.
op_vals = ops_101.get_order_parameters(self.bent45, 0)
self.assertAlmostEqual(int(op_vals[2] * 1000), 1000)
# T-shape motif.
op_vals = ops_101.get_order_parameters(
self.T_shape, 0, indices_neighs=[1,2,3])
self.assertAlmostEqual(int(op_vals[23] * 1000), 1000)
# Cubic structure.
op_vals = ops_099.get_order_parameters(self.cubic, 0)
self.assertAlmostEqual(op_vals[0], 0.0)
self.assertIsNone(op_vals[3])
self.assertIsNone(op_vals[4])
self.assertIsNone(op_vals[5])
self.assertIsNone(op_vals[6])
self.assertIsNone(op_vals[7])
self.assertIsNone(op_vals[8])
op_vals = ops_101.get_order_parameters(self.cubic, 0)
self.assertAlmostEqual(op_vals[0], 6.0)
self.assertAlmostEqual(int(op_vals[3] * 1000), 23)
self.assertAlmostEqual(int(op_vals[4] * 1000), 1000)
self.assertAlmostEqual(int(op_vals[5] * 1000), 333)
self.assertAlmostEqual(int(op_vals[6] * 1000), 0)
self.assertAlmostEqual(int(op_vals[7] * 1000), 763)
self.assertAlmostEqual(int(op_vals[8] * 1000), 353)
# Bcc structure.
op_vals = ops_087.get_order_parameters(self.bcc, 0)
self.assertAlmostEqual(op_vals[0], 8.0)
self.assertAlmostEqual(int(op_vals[3] * 1000), 200)
self.assertAlmostEqual(int(op_vals[4] * 1000), 145)
self.assertAlmostEqual(int(op_vals[5] * 1000), 975)
self.assertAlmostEqual(int(op_vals[6] * 1000), 0)
self.assertAlmostEqual(int(op_vals[7] * 1000), 509)
self.assertAlmostEqual(int(op_vals[8] * 1000), 628)
# Fcc structure.
op_vals = ops_071.get_order_parameters(self.fcc, 0)
self.assertAlmostEqual(op_vals[0], 12.0)
self.assertAlmostEqual(int(op_vals[3] * 1000), 36)
self.assertAlmostEqual(int(op_vals[4] * 1000), 78)
self.assertAlmostEqual(int(op_vals[5] * 1000), 0)
self.assertAlmostEqual(int(op_vals[6] * 1000), 0)
self.assertAlmostEqual(int(op_vals[7] * 1000), 190)
self.assertAlmostEqual(int(op_vals[8] * 1000), 574)
# Hcp structure.
op_vals = ops_101.get_order_parameters(self.hcp, 0)
self.assertAlmostEqual(op_vals[0], 12.0)
self.assertAlmostEqual(int(op_vals[3] * 1000), 33)
self.assertAlmostEqual(int(op_vals[4] * 1000), 82)
self.assertAlmostEqual(int(op_vals[5] * 1000), -38)
self.assertAlmostEqual(int(op_vals[6] * 1000), 0)
self.assertAlmostEqual(int(op_vals[7] * 1000), 97)
self.assertAlmostEqual(int(op_vals[8] * 1000), 484)
# Diamond structure.
op_vals = ops_044.get_order_parameters(self.diamond, 0)
self.assertAlmostEqual(op_vals[0], 4.0)
self.assertAlmostEqual(int(op_vals[3] * 1000), 1000)
self.assertAlmostEqual(int(op_vals[4] * 1000), 37)
self.assertAlmostEqual(int(op_vals[5] * 1000), 727)
self.assertAlmostEqual(int(op_vals[6] * 1000), 0)
self.assertAlmostEqual(int(op_vals[7] * 1000), 509)
self.assertAlmostEqual(int(op_vals[8] * 1000), 628)
# Trigonal off-plane molecule.
op_vals = ops_044.get_order_parameters(self.trigonal_off_plane, 0)
self.assertAlmostEqual(op_vals[0], 3.0)
self.assertAlmostEqual(int(op_vals[3] * 1000), 1000)
# Trigonal-planar motif.
op_vals = ops_101.get_order_parameters(self.trigonal_planar, 0)
self.assertEqual(int(op_vals[0] + 0.5), 3)
self.assertAlmostEqual(int(op_vals[14] * 1000 + 0.5), 1000)
# Regular triangle motif.
op_vals = ops_101.get_order_parameters(self.regular_triangle, 0)
self.assertAlmostEqual(int(op_vals[9] * 1000), 999)
# Square-planar motif.
op_vals = ops_101.get_order_parameters(self.square_planar, 0)
self.assertAlmostEqual(int(op_vals[15] * 1000 + 0.5), 1000)
# Square motif.
op_vals = ops_101.get_order_parameters(self.square, 0)
self.assertAlmostEqual(int(op_vals[10] * 1000), 1000)
# Pentagonal planar.
op_vals = ops_101.get_order_parameters(
self.pentagonal_planar.sites, 0, indices_neighs=[1,2,3,4,5])
self.assertAlmostEqual(int(op_vals[12] * 1000 + 0.5), 33)
self.assertAlmostEqual(int(op_vals[16] * 1000 + 0.5), 1000)
# Trigonal pyramid motif.
op_vals = ops_101.get_order_parameters(
self.trigonal_pyramid, 0, indices_neighs=[1,2,3,4])
self.assertAlmostEqual(int(op_vals[18] * 1000 + 0.5), 1000)
# Square pyramid motif.
op_vals = ops_101.get_order_parameters(self.square_pyramid, 0)
self.assertAlmostEqual(int(op_vals[11] * 1000 + 0.5), 1000)
self.assertAlmostEqual(int(op_vals[12] * 1000 + 0.5), 375)
self.assertAlmostEqual(int(op_vals[17] * 1000 + 0.5), 1000)
# Pentagonal pyramid motif.
op_vals = ops_101.get_order_parameters(
self.pentagonal_pyramid, 0, indices_neighs=[1,2,3,4,5,6])
self.assertAlmostEqual(int(op_vals[19] * 1000 + 0.5), 1000)
# Hexagonal pyramid motif.
op_vals = ops_101.get_order_parameters(
self.hexagonal_pyramid, 0, indices_neighs=[1,2,3,4,5,6,7])
self.assertAlmostEqual(int(op_vals[20] * 1000 + 0.5), 1000)
# Trigonal bipyramidal.
op_vals = ops_101.get_order_parameters(
self.trigonal_bipyramidal.sites, 0, indices_neighs=[1,2,3,4,5])
self.assertAlmostEqual(int(op_vals[12] * 1000 + 0.5), 1000)
# Pentagonal bipyramidal.
op_vals = ops_101.get_order_parameters(
self.pentagonal_bipyramid.sites, 0,
indices_neighs=[1,2,3,4,5,6,7])
self.assertAlmostEqual(int(op_vals[21] * 1000 + 0.5), 1000)
# Hexagonal bipyramid motif.
op_vals = ops_101.get_order_parameters(
self.hexagonal_bipyramid, 0, indices_neighs=[1,2,3,4,5,6,7,8])
self.assertAlmostEqual(int(op_vals[22] * 1000 + 0.5), 1000)
# Cuboctahedral motif.
op_vals = ops_101.get_order_parameters(
self.cuboctahedron, 0, indices_neighs=[i for i in range(1, 13)])
self.assertAlmostEqual(int(op_vals[24] * 1000 + 0.5), 1000)
# See-saw motif.
op_vals = ops_101.get_order_parameters(
self.see_saw, 0, indices_neighs=[i for i in range(1, 5)])
self.assertAlmostEqual(int(op_vals[25] * 1000 + 0.5), 1000)
# Test providing explicit neighbor lists.
op_vals = ops_101.get_order_parameters(self.bcc, 0, indices_neighs=[1])
self.assertIsNotNone(op_vals[0])
self.assertIsNone(op_vals[3])
with self.assertRaises(ValueError):
ops_101.get_order_parameters(self.bcc, 0, indices_neighs=[2])
def tearDown(self):
del self.single_bond
del self.linear
del self.bent45
del self.cubic
del self.fcc
del self.bcc
del self.hcp
del self.diamond
del self.regular_triangle
del self.square
del self.square_pyramid
del self.trigonal_off_plane
del self.trigonal_pyramid
del self.trigonal_planar
del self.square_planar
del self.pentagonal_pyramid
del self.hexagonal_pyramid
del self.pentagonal_bipyramid
del self.T_shape
del self.cuboctahedron
del self.see_saw
if __name__ == '__main__':
unittest.main()
|
setten/pymatgen
|
pymatgen/analysis/tests/test_structure_analyzer.py
|
Python
|
mit
| 32,291
|
[
"VASP",
"pymatgen"
] |
f9ec59037fbecebbd88e8a2ab84d36a2dd6e44baf7f607c2646eb48230d76aa7
|
import json
from algorithm.PassFilter import low_filter, high_filter
def loadRouteFromMap(mapName, routeName, direction = 1):
mapData = json.load(open(mapName, 'rb'))
for route in mapData['graphEdge']:
if route['name'] == routeName:
routes = route['fingerdata']
for n in range(len(routes)):
routes[n] = high_filter(low_filter(routes[n]))
return route
return None
def main():
mapDirectory = '/home/moe/PycharmProjects/dingge/app/map/'
print loadRouteFromMap(mapDirectory + 'map_1.json', 'A-B')
if __name__ == '__main__':
main()
|
icymorn/magnetic-info-process
|
data/MapLoader.py
|
Python
|
mit
| 614
|
[
"MOE"
] |
9d6552b13a818ac90351a44de5ddf660d9b51adb39bae85bb4ece041e6deb56e
|
# Copyright 2007-2009 by Peter Cock. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
import os
import unittest
from Bio import SeqIO
from Bio.Alphabet import single_letter_alphabet
from Bio.Seq import Seq, MutableSeq
from Bio.SeqRecord import SeqRecord
from Bio.SeqUtils import GC, seq1, seq3
from Bio.SeqUtils.lcc import lcc_simp, lcc_mult
from Bio.SeqUtils.CheckSum import crc32, crc64, gcg, seguid
from Bio.SeqUtils.CodonUsage import CodonAdaptationIndex
def u_crc32(seq):
# NOTE - On Python 2 crc32 could return a signed int, but on Python 3 it is
# always unsigned
# Docs suggest should use crc32(x) & 0xffffffff for consistency.
return crc32(seq) & 0xffffffff
def simple_LCC(s):
# Avoid cross platforms with printing floats by doing conversion explicitly
return "%0.2f" % lcc_simp(s)
def windowed_LCC(s):
return ", ".join("%0.2f" % v for v in lcc_mult(s, 20))
class SeqUtilsTests(unittest.TestCase):
def setUp(self):
# Example of crc64 collision from Sebastian Bassi using the
# immunoglobulin lambda light chain variable region from Homo sapiens
# Both sequences share the same CRC64 checksum: 44CAAD88706CC153
self.str_light_chain_one = "QSALTQPASVSGSPGQSITISCTGTSSDVGSYNLVSWYQQHPGK" \
+ "APKLMIYEGSKRPSGVSNRFSGSKSGNTASLTISGLQAEDEADY" \
+ "YCSSYAGSSTLVFGGGTKLTVL"
self.str_light_chain_two = "QSALTQPASVSGSPGQSITISCTGTSSDVGSYNLVSWYQQHPGK" \
+ "APKLMIYEGSKRPSGVSNRFSGSKSGNTASLTISGLQAEDEADY" \
+ "YCCSYAGSSTWVFGGGTKLTVL"
def test_codon_usage_ecoli(self):
"""Test Codon Adaptation Index (CAI) using default E. coli data."""
CAI = CodonAdaptationIndex()
self.assertEqual("%0.5f" % CAI.cai_for_gene("ATGCGTATCGATCGCGATACGATTAGGCGGATG"),
"0.09978")
def test_codon_usage_custom(self):
"""Test Codon Adaptation Index (CAI) using FASTA file for background."""
# We need a FASTA file of CDS sequences to count the codon usage...
dna_fasta_filename = "fasta.tmp"
dna_genbank_filename = "GenBank/NC_005816.gb"
record = SeqIO.read(dna_genbank_filename, "genbank")
records = []
for feature in record.features:
if feature.type == "CDS" and not feature.sub_features:
start = feature.location.start.position
end = feature.location.end.position
table = int(feature.qualifiers["transl_table"][0])
if feature.strand == -1:
seq = record.seq[start:end].reverse_complement()
else:
seq = record.seq[start:end]
# Double check we have the CDS sequence expected
# TODO - Use any cds_start option if/when added to deal with the met
a = "M" + str(seq[3:].translate(table))
b = feature.qualifiers["translation"][0] + "*"
self.assertEqual(a, b, "%r vs %r" % (a, b))
records.append(SeqRecord(seq, id=feature.qualifiers["protein_id"][0],
description=feature.qualifiers["product"][0]))
with open(dna_fasta_filename, "w") as handle:
SeqIO.write(records, handle, "fasta")
CAI = CodonAdaptationIndex()
# Note - this needs a FASTA file which containing non-ambiguous DNA coding
# sequences - which should each be a whole number of codons.
CAI.generate_index(dna_fasta_filename)
# Now check codon usage index (CAI) using this species
self.assertEqual(record.annotations["source"],
"Yersinia pestis biovar Microtus str. 91001")
self.assertEqual("%0.5f" % CAI.cai_for_gene("ATGCGTATCGATCGCGATACGATTAGGCGGATG"),
"0.67213")
os.remove(dna_fasta_filename)
def test_crc_checksum_collision(self):
# Explicit testing of crc64 collision:
self.assertNotEqual(self.str_light_chain_one, self.str_light_chain_two)
self.assertNotEqual(crc32(self.str_light_chain_one), crc32(self.str_light_chain_two))
self.assertEqual(crc64(self.str_light_chain_one), crc64(self.str_light_chain_two))
self.assertNotEqual(gcg(self.str_light_chain_one), gcg(self.str_light_chain_two))
self.assertNotEqual(seguid(self.str_light_chain_one), seguid(self.str_light_chain_two))
def seq_checksums(self, seq_str, exp_crc32, exp_crc64, exp_gcg, exp_seguid,
exp_simple_LCC, exp_window_LCC):
for s in [seq_str,
Seq(seq_str, single_letter_alphabet),
MutableSeq(seq_str, single_letter_alphabet)]:
self.assertEqual(exp_crc32, u_crc32(s))
self.assertEqual(exp_crc64, crc64(s))
self.assertEqual(exp_gcg, gcg(s))
self.assertEqual(exp_seguid, seguid(s))
self.assertEqual(exp_simple_LCC, simple_LCC(s))
self.assertEqual(exp_window_LCC, windowed_LCC(s))
def test_checksum1(self):
self.seq_checksums(self.str_light_chain_one,
2994980265,
"CRC-44CAAD88706CC153",
9729,
"BpBeDdcNUYNsdk46JoJdw7Pd3BI",
"1.03",
"0.00, 1.00, 0.96, 0.96, 0.96, 0.65, 0.43, 0.35, 0.35, 0.35, 0.35, 0.53, 0.59, 0.26")
def test_checksum2(self):
self.seq_checksums(self.str_light_chain_two,
802105214,
"CRC-44CAAD88706CC153",
9647,
"X5XEaayob1nZLOc7eVT9qyczarY",
"1.07",
"0.00, 1.00, 0.96, 0.96, 0.96, 0.65, 0.43, 0.35, 0.35, 0.35, 0.35, 0.53, 0.59, 0.26")
def test_checksum3(self):
self.seq_checksums("ATGCGTATCGATCGCGATACGATTAGGCGGAT",
817679856,
"CRC-6234FF451DC6DFC6",
7959,
"8WCUbVjBgiRmM10gfR7XJNjbwnE",
"1.98",
"0.00, 2.00, 1.99, 1.99, 2.00, 1.99, 1.97, 1.99, 1.99, 1.99, 1.96, 1.96, 1.96, 1.96")
def test_GC(self):
seq = "ACGGGCTACCGTATAGGCAAGAGATGATGCCC"
self.assertEqual(GC(seq), 56.25)
def test_seq1_seq3(self):
s3 = "MetAlaTyrtrpcysthrLYSLEUILEGlYPrOGlNaSnaLapRoTyRLySSeRHisTrpLysThr"
s1 = "MAYWCTKLIGPQNAPYKSHWKT"
self.assertEqual(seq1(s3), s1)
self.assertEqual(seq3(s1).upper(), s3.upper())
self.assertEqual(seq1(seq3(s1)), s1)
self.assertEqual(seq3(seq1(s3)).upper(), s3.upper())
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
unittest.main(testRunner=runner)
|
updownlife/multipleK
|
dependencies/biopython-1.65/Tests/test_SeqUtils.py
|
Python
|
gpl-2.0
| 7,033
|
[
"Biopython"
] |
9ff288fee7d3b026e8de85157e14e2f9b58776e644b355030afae3f13be011c2
|
import numpy as np
#<Localized>
def gaussianType(x,u,v):
"""A gaussian type basis function
This takes parameters:
'mean', the locality of the curve
'variance', the spread of the curve.
Note that this is not a normal distribution, and is thus not normalized.
"""
#x.shape == (s,d)
#u.shape == (1,d)
#v.shape == (1,)
#return shape == (s,)
return (np.exp(-(np.linalg.norm(x-u, axis=1)**2)/(v**2)))
#</Localized>
#<Periodic>
#</Periodic>
#<Standard>
#</Standard>
|
azane/for-learnings-sake
|
bayes/regression/linearparametric/bases.py
|
Python
|
mit
| 541
|
[
"Gaussian"
] |
0099cbf078da213a2843958ac46085d16eaf97de33fe1f46300b397f55fd2e46
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""EMSL Api.
Usage:
EMSL_api.py list_basis [--basis=<basis_name>...]
[--atom=<atom_name>...]
[--db_path=<db_path> |--db_dump_path=<db_dump_path>]
[--average_mo_number]
EMSL_api.py list_atoms --basis=<basis_name>
[--db_path=<db_path> |--db_dump_path=<db_dump_path>]
EMSL_api.py get_basis_data --basis=<basis_name>
[--atom=<atom_name>...]
[--db_path=<db_path> |--db_dump_path=<db_dump_path>]
[(--save [--path=<path>])]
[--check=<program_name>]
[--treat_l]
EMSL_api.py list_formats
EMSL_api.py create_db --format=<format>
[--db_path=<db_path> |--db_dump_path=<db_dump_path>]
[--no-contraction]
EMSL_api.py (-h | --help)
EMSL_api.py --version
Options:
-h --help Show this screen.
--version Show version.
--no-contraction Basis functions are not contracted
<db_path> is the path to the SQLite3 file containing the Basis sets.
By default is $EMSL_API_ROOT/db/Gausian_uk.db
Example of use:
./EMSL_api.py list_basis --atom Al --atom U
./EMSL_api.py list_basis --atom S --basis 'cc-pV*' --average_mo_number
./EMSL_api.py list_atoms --basis ANO-RCC
./EMSL_api.py get_basis_data --basis 3-21++G*
"""
version = "0.8.1"
import os
from src.misc.docopt import docopt
from src.EMSL_dump import EMSL_dump
from src.EMSL_local import EMSL_local
if __name__ == '__main__':
arguments = docopt(__doc__, version='EMSL Api ' + version)
# ___
# | ._ o _|_
# _|_ | | | |_
#
if arguments["--db_path"]:
db_path = arguments["--db_path"]
db_dump_path = None
elif arguments["--db_dump_path"]:
db_path = None
db_dump_path = arguments["--db_dump_path"]
else:
db_dump_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"db/GAMESS-US.dump")
db_path = None
# db_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
# "db/GAMESS-US.db")
# Check the db
# try:
# if not(arguments['create_db']):
# from src.EMSL_local import checkSQLite3
# db_path, db_path_changed = checkSQLite3(db_path)
# except:
# raise
# _ _ _ ______ _
# | | (_) | | | ___ \ (_)
# | | _ ___| |_ | |_/ / __ _ ___ _ ___
# | | | / __| __| | ___ \/ _` / __| / __|
# | |___| \__ \ |_ | |_/ / (_| \__ \ \__ \
# \_____/_|___/\__| \____/ \__,_|___/_|___/
if arguments["list_basis"]:
e = EMSL_local(db_path=db_path, db_dump_path=db_dump_path)
l = e.list_basis_available(arguments["--atom"],
arguments["--basis"],
arguments["--average_mo_number"])
if arguments["--average_mo_number"]:
for name, des, avg in l:
des_str = "{0:<50}".format(des)
print "- '{0}' ({1}) || {2}".format(name, avg, des_str)
else:
for name, des in l:
des_str = "{0:<50}".format(des)
print "- '{0}' || {1}".format(name, des_str)
# _ _ _ _____ _ _
# | | (_) | | | ___| | | |
# | | _ ___| |_ | |__ | | ___ _ __ ___ ___ _ __ | |_ ___
# | | | / __| __| | __|| |/ _ \ '_ ` _ \ / _ \ '_ \| __/ __|
# | |___| \__ \ |_ | |___| | __/ | | | | | __/ | | | |_\__ \
# \_____/_|___/\__| \____/|_|\___|_| |_| |_|\___|_| |_|\__|___/
elif arguments["list_atoms"]:
e = EMSL_local(db_path=db_path, db_dump_path=db_dump_path)
basis_name = arguments["--basis"]
l = e.get_list_element_available(basis_name)
print ", ".join(l)
# ______ _ _ _
# | ___ \ (_) | | | |
# | |_/ / __ _ ___ _ ___ __| | __ _| |_ __ _
# | ___ \/ _` / __| / __| / _` |/ _` | __/ _` |
# | |_/ / (_| \__ \ \__ \ | (_| | (_| | || (_| |
# \____/ \__,_|___/_|___/ \__,_|\__,_|\__\__,_|
elif arguments["get_basis_data"]:
e = EMSL_local(db_path=db_path, db_dump_path=db_dump_path)
basis_name = arguments["--basis"][0]
elts = arguments["--atom"]
l_atom_basis = e.get_basis(basis_name, elts,
arguments["--treat_l"],
arguments["--check"])
# Add separation between atoms, and a empty last line
str_ = "\n\n".join(l_atom_basis) + "\n"
if arguments["--save"]:
if arguments["--path"]:
path = arguments["--path"]
else:
# The defaut path is bais
path = "_".join([basis_name, ".".join(elts)])
path = "/tmp/" + path + ".bs"
with open(path, 'w') as f:
f.write(str_ + "\n")
print path
else:
print str_
# _ _ _ __ _
# | | (_) | | / _| | |
# | | _ ___| |_ | |_ ___ _ __ _ __ ___ __ _| |_ ___
# | | | / __| __| | _/ _ \| '__| '_ ` _ \ / _` | __/ __|
# | |___| \__ \ |_ | || (_) | | | | | | | | (_| | |_\__ \
# \_____/_|___/\__| |_| \___/|_| |_| |_| |_|\__,_|\__|___/
elif arguments["list_formats"]:
for i in EMSL_dump.get_list_format():
print i
# _____ _ _ _
# / __ \ | | | | |
# | / \/_ __ ___ __ _| |_ ___ __| | |__
# | | | '__/ _ \/ _` | __/ _ \ / _` | '_ \
# | \__/\ | | __/ (_| | || __/ | (_| | |_) |
# \____/_| \___|\__,_|\__\___| \__,_|_.__/
elif arguments["create_db"]:
db_path = arguments["--db_path"]
format = arguments["--format"]
contraction = not arguments["--no-contraction"]
e = EMSL_dump(db_path=db_path,
format=format,
contraction=contraction)
e.new_db()
# _
# / | _ _. ._ o ._ _
# \_ | (/_ (_| | | | | | (_|
# _|
# Clean up on exit
# if not(arguments['create_db']) and db_path_changed:
# os.system("rm -f /dev/shm/%d.db" % (os.getpid()))
|
TApplencourt/EMSL_Basis_Set_Exchange_Local
|
EMSL_api.py
|
Python
|
mit
| 6,539
|
[
"GAMESS"
] |
6aa1af4b2e104fc25bff64bf19130896dceff742f039aee1c502cc6da149439b
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Generate a Markdown document listing all supported sites"""
import os
import sys
import collections
import util
from gallery_dl import extractor
CATEGORY_MAP = {
"2chan" : "Futaba Channel",
"35photo" : "35PHOTO",
"adultempire" : "Adult Empire",
"allgirlbooru" : "All girl",
"archivedmoe" : "Archived.Moe",
"archiveofsins" : "Archive of Sins",
"artstation" : "ArtStation",
"aryion" : "Eka's Portal",
"atfbooru" : "ATFBooru",
"b4k" : "arch.b4k.co",
"baraag" : "baraag",
"bbc" : "BBC",
"bcy" : "半次元",
"comicvine" : "Comic Vine",
"deviantart" : "DeviantArt",
"drawfriends" : "Draw Friends",
"dynastyscans" : "Dynasty Reader",
"e621" : "e621",
"erome" : "EroMe",
"e-hentai" : "E-Hentai",
"exhentai" : "ExHentai",
"fallenangels" : "Fallen Angels Scans",
"fanbox" : "pixivFANBOX",
"fashionnova" : "Fashion Nova",
"furaffinity" : "Fur Affinity",
"hbrowse" : "HBrowse",
"hentai2read" : "Hentai2Read",
"hentaicosplays" : "Hentai Cosplay",
"hentaifoundry" : "Hentai Foundry",
"hentaifox" : "HentaiFox",
"hentaihand" : "HentaiHand",
"hentaihere" : "HentaiHere",
"hentaiimg" : "Hentai Image",
"hitomi" : "Hitomi.la",
"idolcomplex" : "Idol Complex",
"illusioncardsbooru": "Illusion Game Cards",
"imagebam" : "ImageBam",
"imagefap" : "ImageFap",
"imgbb" : "ImgBB",
"imgbox" : "imgbox",
"imagechest" : "ImageChest",
"imgth" : "imgth",
"imgur" : "imgur",
"joyreactor" : "JoyReactor",
"kabeuchi" : "かべうち",
"kireicake" : "Kirei Cake",
"kissgoddess" : "Kiss Goddess",
"lineblog" : "LINE BLOG",
"livedoor" : "livedoor Blog",
"omgmiamiswimwear": "Omg Miami Swimwear",
"mangadex" : "MangaDex",
"mangafox" : "Manga Fox",
"mangahere" : "Manga Here",
"mangakakalot" : "MangaKakalot",
"manganelo" : "Manganato",
"mangapark" : "MangaPark",
"mangasee" : "MangaSee",
"mastodon.social": "mastodon.social",
"myhentaigallery": "My Hentai Gallery",
"myportfolio" : "Adobe Portfolio",
"naverwebtoon" : "NaverWebtoon",
"nhentai" : "nhentai",
"nijie" : "nijie",
"nozomi" : "Nozomi.la",
"nsfwalbum" : "NSFWalbum.com",
"nyafuu" : "Nyafuu Archive",
"paheal" : "rule #34",
"photovogue" : "PhotoVogue",
"pornimagesxxx" : "Porn Image",
"pornreactor" : "PornReactor",
"powermanga" : "PowerManga",
"readcomiconline": "Read Comic Online",
"rbt" : "RebeccaBlackTech",
"redgifs" : "RedGIFs",
"rule34" : "Rule 34",
"rule34us" : "Rule 34",
"sankaku" : "Sankaku Channel",
"sankakucomplex" : "Sankaku Complex",
"seiga" : "Niconico Seiga",
"seisoparty" : "Seiso",
"senmanga" : "Sen Manga",
"sensescans" : "Sense-Scans",
"sexcom" : "Sex.com",
"simplyhentai" : "Simply Hentai",
"slickpic" : "SlickPic",
"slideshare" : "SlideShare",
"smugmug" : "SmugMug",
"speakerdeck" : "Speaker Deck",
"subscribestar" : "SubscribeStar",
"tbib" : "The Big ImageBoard",
"thatpervert" : "ThatPervert",
"thebarchive" : "The /b/ Archive",
"thecollection" : "The /co/llection",
"theloudbooru" : "The Loud Booru",
"tumblrgallery" : "TumblrGallery",
"vanillarock" : "もえぴりあ",
"vidyart" : "/v/idyart",
"vk" : "VK",
"vsco" : "VSCO",
"wakarimasen" : "Wakarimasen Archive",
"wallpapercave" : "Wallpaper Cave",
"webtoons" : "Webtoon",
"wikiart" : "WikiArt.org",
"xhamster" : "xHamster",
"xvideos" : "XVideos",
"yandere" : "yande.re",
"zzzz" : "ZzZz",
}
SUBCATEGORY_MAP = {
"art" : "Art",
"audio" : "Audio",
"doujin" : "Doujin",
"gallery": "Galleries",
"image" : "individual Images",
"index" : "Site Index",
"issue" : "Comic Issues",
"manga" : "Manga",
"media" : "Media Files",
"popular": "Popular Images",
"recent" : "Recent Images",
"search" : "Search Results",
"status" : "Images from Statuses",
"tag" : "Tag Searches",
"user" : "User Profiles",
"watch" : "Watches",
"following" : "",
"related-pin" : "related Pins",
"related-board": "",
"artstation": {
"artwork": "Artwork Listings",
},
"atfbooru": {
"favorite": "",
},
"danbooru": {
"favorite": "",
},
"desktopography": {
"site": "",
},
"deviantart": {
"stash": "Sta.sh",
"watch-posts": "",
},
"fanbox": {
"redirect": "",
},
"hentaifoundry": {
"story": "",
},
"instagram": {
"posts": "",
"saved": "Saved Posts",
"tagged": "Tagged Posts",
},
"kemonoparty": {
"discord": "Discord Servers",
"discord-server": "",
},
"mangadex": {
"feed" : "Followed Feed",
},
"pinterest": {
"board": "",
"pinit": "pin.it Links",
},
"pixiv": {
"me" : "pixiv.me Links",
"pixivision": "pixivision",
"sketch": "Sketch",
"work": "individual Images",
},
"sankaku": {
"books": "Book Searches",
},
"sexcom": {
"pins": "User Pins",
},
"smugmug": {
"path": "Images from Users and Folders",
},
"twitter": {
"media": "Media Timelines",
"replies": "",
"list-members": "List Members",
},
"wallhaven": {
"collections": "",
},
"wallpapercave": {
"image": "individual Images, Search Results",
},
"weasyl": {
"journals" : "",
"submissions": "",
},
"wikiart": {
"artists": "Artist Listings",
},
}
BASE_MAP = {
"foolfuuka" : "FoolFuuka 4chan Archives",
"foolslide" : "FoOlSlide Instances",
"gelbooru_v01": "Gelbooru Beta 0.1.11",
"gelbooru_v02": "Gelbooru Beta 0.2",
"lolisafe" : "lolisafe and chibisafe",
"moebooru" : "Moebooru and MyImouto",
}
_OAUTH = '<a href="https://github.com/mikf/gallery-dl#oauth">OAuth</a>'
_COOKIES = '<a href="https://github.com/mikf/gallery-dl#cookies">Cookies</a>'
_APIKEY_DB = \
'<a href="configuration.rst#extractorderpibooruapi-key">API Key</a>'
_APIKEY_WH = \
'<a href="configuration.rst#extractorwallhavenapi-key">API Key</a>'
_APIKEY_WY = \
'<a href="configuration.rst#extractorweasylapi-key">API Key</a>'
AUTH_MAP = {
"aryion" : "Supported",
"atfbooru" : "Supported",
"baraag" : _OAUTH,
"danbooru" : "Supported",
"derpibooru" : _APIKEY_DB,
"deviantart" : _OAUTH,
"e621" : "Supported",
"e-hentai" : "Supported",
"exhentai" : "Supported",
"fanbox" : _COOKIES,
"fantia" : _COOKIES,
"flickr" : _OAUTH,
"furaffinity" : _COOKIES,
"idolcomplex" : "Supported",
"imgbb" : "Supported",
"inkbunny" : "Supported",
"instagram" : "Supported",
"kemonoparty" : "Supported",
"mangadex" : "Supported",
"mangoxo" : "Supported",
"mastodon.social": _OAUTH,
"newgrounds" : "Supported",
"nijie" : "Required",
"patreon" : _COOKIES,
"pawoo" : _OAUTH,
"pillowfort" : "Supported",
"pinterest" : _COOKIES,
"pixiv" : _OAUTH,
"ponybooru" : "API Key",
"reddit" : _OAUTH,
"sankaku" : "Supported",
"seiga" : "Required",
"seisoparty" : "Supported",
"smugmug" : _OAUTH,
"subscribestar" : "Supported",
"tapas" : "Supported",
"tsumino" : "Supported",
"tumblr" : _OAUTH,
"twitter" : "Supported",
"wallhaven" : _APIKEY_WH,
"weasyl" : _APIKEY_WY,
}
IGNORE_LIST = (
"directlink",
"oauth",
"recursive",
"test",
"ytdl",
"generic",
)
def domain(cls):
"""Return the web-domain related to an extractor class"""
try:
url = sys.modules[cls.__module__].__doc__.split()[-1]
if url.startswith("http"):
return url
except Exception:
pass
if hasattr(cls, "root") and cls.root:
return cls.root + "/"
if hasattr(cls, "https"):
scheme = "https" if cls.https else "http"
netloc = cls.__doc__.split()[-1]
return "{}://{}/".format(scheme, netloc)
test = next(cls._get_tests(), None)
if test:
url = test[0]
return url[:url.find("/", 8)+1]
return ""
def category_text(c):
"""Return a human-readable representation of a category"""
return CATEGORY_MAP.get(c) or c.capitalize()
def subcategory_text(c, sc):
"""Return a human-readable representation of a subcategory"""
if c in SUBCATEGORY_MAP:
scm = SUBCATEGORY_MAP[c]
if sc in scm:
return scm[sc]
if sc in SUBCATEGORY_MAP:
return SUBCATEGORY_MAP[sc]
sc = sc.capitalize()
return sc if sc.endswith("s") else sc + "s"
def category_key(c):
"""Generate sorting keys by category"""
return category_text(c[0]).lower()
def subcategory_key(sc):
"""Generate sorting keys by subcategory"""
return "A" if sc == "issue" else sc
def build_extractor_list():
"""Generate a sorted list of lists of extractor classes"""
categories = collections.defaultdict(lambda: collections.defaultdict(list))
default = categories[""]
domains = {}
for extr in extractor._list_classes():
category = extr.category
if category in IGNORE_LIST:
continue
if category:
default[category].append(extr.subcategory)
if category not in domains:
domains[category] = domain(extr)
else:
base = categories[extr.basecategory]
for category, root in extr.instances:
base[category].append(extr.subcategory)
if category not in domains:
if not root:
# use domain from first matching test
for url, _ in extr._get_tests():
if extr.from_url(url).category == category:
root = url[:url.index("/", 8)]
break
else:
continue
domains[category] = root + "/"
# sort subcategory lists
for base in categories.values():
for subcategories in base.values():
subcategories.sort(key=subcategory_key)
# add e-hentai.org
default["e-hentai"] = default["exhentai"]
domains["e-hentai"] = domains["exhentai"].replace("x", "-")
# add hentai-cosplays sister sites (hentai-img, porn-images-xxx)
default["hentaiimg"] = default["hentaicosplays"]
domains["hentaiimg"] = "https://hentai-img.com/"
default["pornimagesxxx"] = default["hentaicosplays"]
domains["pornimagesxxx"] = "https://porn-images-xxx.com/"
return categories, domains
# define table columns
COLUMNS = (
("Site", 20,
lambda c, scs, d: category_text(c)),
("URL" , 35,
lambda c, scs, d: d),
("Capabilities", 50,
lambda c, scs, d: ", ".join(subcategory_text(c, sc) for sc in scs
if subcategory_text(c, sc))),
("Authentication", 16,
lambda c, scs, d: AUTH_MAP.get(c, "")),
)
def generate_output(columns, categories, domains):
thead = []
append = thead.append
append("<tr>")
for column in columns:
append(" <th>" + column[0] + "</th>")
append("</tr>")
tbody = []
append = tbody.append
for name, base in categories.items():
if name and base:
name = BASE_MAP.get(name) or (name.capitalize() + " Instances")
append('\n<tr>\n <td colspan="4"><strong>' +
name + '</strong></td>\n</tr>')
clist = base.items()
else:
clist = sorted(base.items(), key=category_key)
for category, subcategories in clist:
append("<tr>")
for column in columns:
domain = domains[category]
content = column[2](category, subcategories, domain)
append(" <td>" + content + "</td>")
append("</tr>")
TEMPLATE = """# Supported Sites
<!-- auto-generated by {} -->
Consider all sites to be NSFW unless otherwise known.
<table>
<thead valign="bottom">
{}
</thead>
<tbody valign="top">
{}
</tbody>
</table>
"""
return TEMPLATE.format(
"/".join(os.path.normpath(__file__).split(os.sep)[-2:]),
"\n".join(thead),
"\n".join(tbody),
)
categories, domains = build_extractor_list()
outfile = sys.argv[1] if len(sys.argv) > 1 else "supportedsites.md"
with open(util.path("docs", outfile), "w") as fp:
fp.write(generate_output(COLUMNS, categories, domains))
|
mikf/gallery-dl
|
scripts/supportedsites.py
|
Python
|
gpl-2.0
| 13,530
|
[
"MOE"
] |
d6a7ab4a64f38b2591eda38e815fb8298798145431ff6b068fba6dfa19e5f723
|
# -*- coding: utf-8 -*-
#
# This file is part of cclib (http://cclib.github.io), a library for parsing
# and interpreting the results of computational chemistry packages.
#
# Copyright (C) 2006-2014, the cclib development team
#
# The library is free software, distributed under the terms of
# the GNU Lesser General Public version 2.1 or later. You should have
# received a copy of the license along with cclib. You can also access
# the full license online at http://www.gnu.org/copyleft/lgpl.html.
"""Example analyses and calculations based on data parsed by cclib."""
from .cda import CDA
from .cspa import CSPA
from .density import Density
from .fragments import FragmentAnalysis
from .lpa import LPA
from .mbo import MBO
from .mpa import MPA
from .nuclear import Nuclear
from .opa import OPA
from .volume import Volume
|
ghutchis/cclib
|
src/cclib/method/__init__.py
|
Python
|
lgpl-2.1
| 826
|
[
"cclib"
] |
15a5cc92adac53eb691aab2491744e3524b079664ed7232483e778689342a611
|
import random, math
class Connection:
weight = None
deltaWeight = None
def __init__(self, weight = None, deltaWeight = None):
self.weight = random.random() if weight == None else weight
self.deltaWeight = 0.0 if deltaWeight == None else deltaWeight
def toJSON(self):
return {'__class__': 'Connection', '__weight__': self.weight, '__deltaWeight__': self.deltaWeight}
@classmethod
def fromJSON(cls, JSON):
if '__class__' in JSON:
if JSON['__class__'] == 'Connection':
return Connection(JSON['__weight__'], JSON['__deltaWeight__'])
class Neuron:
eta = 0.10 # [0.0..1.0] overall net training rate
alpha = 0.5 # [0.0..n] multiplier of last weight change (momentum)
output = None
outputWeights = None
index = None
layer = None
gradient = None
def __init__(self, numOutputs, index, layer, eta = None, alpha = None, output = None, gradient = None, outputWeights = None):
self.index = index
self.layer = layer
self.eta = 0.10 if eta == None else eta
self.alpha = 0.5 if alpha == None else alpha
self.output = 0.0 if output == None else output
self.gradient = 0.0 if gradient == None else gradient
if outputWeights == None:
self.outputWeights = list()
for c in range(numOutputs):
self.outputWeights.append(Connection())
else:
self.outputWeights = outputWeights
@classmethod
def transferFunction(cls, x):
return math.tanh(x)
@classmethod
def transferFunctionDerivative(cls, x):
return 1.0 - x * x
def feedForward(self, prevLayer):
sum = 0.0
for n in range(len(prevLayer)):
sum += prevLayer[n].output * prevLayer[n].outputWeights[self.index].weight
self.output = Neuron.transferFunction(sum)
def calcOutputGradients(self, targetVal):
self.gradient = (targetVal - self.output) * Neuron.transferFunctionDerivative(self.output)
def calcHiddenGradients(self, nextLayer):
sum = 0.0
for n in range(len(nextLayer) - 1):
sum += self.outputWeights[n].weight * nextLayer[n].gradient
self.gradient = sum * Neuron.transferFunctionDerivative(self.output)
def updateInputWeights(self, prevLayer):
for n in range(len(prevLayer)):
neuron = prevLayer[n]
oldDeltaWeight = neuron.outputWeights[self.index].deltaWeight;
newDeltaWeight = self.eta * neuron.output * self.gradient + self.alpha * oldDeltaWeight;
neuron.outputWeights[self.index].deltaWeight = newDeltaWeight
neuron.outputWeights[self.index].weight += newDeltaWeight
def toJSON(self):
return {'__class__': 'Neuron', '__eta__': self.eta, '__alpha__': self.alpha, '__gradient__': self.gradient, '__index__': self.index, '__layer__': self.layer, '__output__': self.output, '__outputWeights__': [connection.toJSON() for connection in self.outputWeights]}
@classmethod
def fromJSON(cls, JSON):
if '__class__' in JSON:
if JSON['__class__'] == 'Neuron':
index = JSON['__index__']
layer = JSON['__layer__']
eta = JSON['__eta__']
alpha = JSON['__alpha__']
output = JSON['__output__']
gradient = JSON['__gradient__']
outputWeights = [Connection.fromJSON(connection) for connection in JSON['__outputWeights__']]
return Neuron(None, index, layer, eta, alpha, output, gradient, outputWeights)
class Net:
layers = None
netError = None
averageError = None
smoothingFactor = None
def __init__(self, topology, netError = None, averageError = None, smoothingFactor = None, layers = None, eta = None, alpha = None):
self.netError = 0.0 if netError == None else netError
self.averageError = 0.0 if averageError == None else averageError
self.smoothingFactor = 100.0 if smoothingFactor == None else smoothingFactor
if layers == None:
numLayers = len(topology)
self.layers = list()
for layerNum in range(numLayers):
self.layers.append(list())
numOutputs = 0 if layerNum + 1 == len(topology) else int(topology[layerNum + 1])
for neuronNum in range(int(topology[layerNum]) + 1):
self.layers[-1].append(Neuron(numOutputs, neuronNum, layerNum, eta, alpha))
self.layers[-1][-1].output = 1
else:
self.layers = layers
def feedForward(self, inputVals):
assert len(inputVals) + 1 == len(self.layers[0])
for i in range(len(inputVals)):
self.layers[0][i].output = inputVals[i]
for layerNum in range(1, len(self.layers)):
prevLayer = self.layers[layerNum - 1]
for n in range(len(self.layers[layerNum]) - 1):
self.layers[layerNum][n].feedForward(prevLayer)
def backPropagate(self, targetVals):
outputLayer = self.layers[-1]
self.netError = 0.0
for n in range(len(outputLayer) - 1):
self.netError += (targetVals[n] - outputLayer[n].output) * (targetVals[n] - outputLayer[n].output)
self.netError = math.sqrt(self.netError / (len(outputLayer) - 1))
self.averageError = (self.averageError * self.smoothingFactor + self.netError) / (self.smoothingFactor + 1.0);
for n in range(len(outputLayer) - 1):
outputLayer[n].calcOutputGradients(targetVals[n])
for layerNum in reversed(range(1, len(self.layers) - 1)):
hiddenLayer = self.layers[layerNum]
nextLayer = self.layers[layerNum + 1]
for n in range(len(hiddenLayer)):
hiddenLayer[n].calcHiddenGradients(nextLayer)
for layerNum in reversed(range(1, len(self.layers))):
layer = self.layers[layerNum]
prevLayer = self.layers[layerNum - 1]
for n in range(len(layer) - 1):
layer[n].updateInputWeights(prevLayer)
def toJSON(self):
return {'__class__': 'Net', '__netError__': self.netError, '__averageError__': self.averageError, '__smoothingFactor__': self.smoothingFactor, '__layers__': [[neuron.toJSON() for neuron in layer] for layer in self.layers]}
@classmethod
def fromJSON(cls, JSON):
if '__class__' in JSON:
if JSON['__class__'] == 'Net':
netError = JSON['__netError__']
averageError = JSON['__averageError__']
smoothingFactor = JSON['__smoothingFactor__']
layers = [[Neuron.fromJSON(neuron) for neuron in layer] for layer in JSON['__layers__']]
return Net(None, netError, averageError, smoothingFactor, layers)
|
aaiijmrtt/LEARNING
|
v2/NN.py
|
Python
|
mit
| 5,972
|
[
"NEURON"
] |
57ae1996fbb66264263e8118a4c567ef70b0900e98bb0da429cd8fd00378f6c0
|
"""
=============================================
Whitening evoked data with a noise covariance
=============================================
Evoked data are loaded and then whitened using a given noise covariance
matrix. It's an excellent quality check to see if baseline signals match
the assumption of Gaussian white noise during the baseline period.
Covariance estimation and diagnostic plots are based on [1]_.
References
----------
.. [1] Engemann D. and Gramfort A. (2015) Automated model selection in
covariance estimation and spatial whitening of MEG and EEG signals, vol.
108, 328-342, NeuroImage.
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Denis A. Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne import io
from mne.datasets import sample
from mne.cov import compute_covariance
print(__doc__)
###############################################################################
# Set parameters
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
raw = io.read_raw_fif(raw_fname, preload=True)
raw.filter(1, 40, n_jobs=1, fir_design='firwin')
raw.info['bads'] += ['MEG 2443'] # bads + 1 more
events = mne.read_events(event_fname)
# let's look at rare events, button presses
event_id, tmin, tmax = 2, -0.2, 0.5
reject = dict(mag=4e-12, grad=4000e-13, eeg=80e-6)
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=('meg', 'eeg'),
baseline=None, reject=reject, preload=True)
# Uncomment next line to use fewer samples and study regularization effects
# epochs = epochs[:20] # For your data, use as many samples as you can!
###############################################################################
# Compute covariance using automated regularization
method_params = dict(diagonal_fixed=dict(mag=0.01, grad=0.01, eeg=0.01))
noise_covs = compute_covariance(epochs, tmin=None, tmax=0, method='auto',
return_estimators=True, verbose=True, n_jobs=1,
projs=None, rank=None,
method_params=method_params)
# With "return_estimator=True" all estimated covariances sorted
# by log-likelihood are returned.
print('Covariance estimates sorted from best to worst')
for c in noise_covs:
print("%s : %s" % (c['method'], c['loglik']))
###############################################################################
# Show the evoked data:
evoked = epochs.average()
evoked.plot(time_unit='s') # plot evoked response
###############################################################################
# We can then show whitening for our various noise covariance estimates.
#
# Here we should look to see if baseline signals match the
# assumption of Gaussian white noise. we expect values centered at
# 0 within 2 standard deviations for 95% of the time points.
#
# For the Global field power we expect a value of 1.
evoked.plot_white(noise_covs, time_unit='s')
|
adykstra/mne-python
|
examples/visualization/plot_evoked_whitening.py
|
Python
|
bsd-3-clause
| 3,115
|
[
"Gaussian"
] |
5e0d13a1fdde492620d4168d500e5cf9ceb9ba0090ea2611167686a382c8411c
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.