repository_name stringclasses 316 values | func_path_in_repository stringlengths 6 223 | func_name stringlengths 1 134 | language stringclasses 1 value | func_code_string stringlengths 57 65.5k | func_documentation_string stringlengths 1 46.3k | split_name stringclasses 1 value | func_code_url stringlengths 91 315 | called_functions listlengths 1 156 ⌀ | enclosing_scope stringlengths 2 1.48M |
|---|---|---|---|---|---|---|---|---|---|
mugurbil/gnm | gnm/utils.py | update_params | python | def update_params(state, t):
mu = (1.-t)*state['x'] + t*state['mu']
L = state['L'] / np.sqrt(2.*t - t**2)
return mu, L | Update parameters
updates mean and precision to the step size
Inputs:
state :
mu :
mean
L :
cholesky factor of the precision matrix
t :
step size
Outputs:
mu :
updated mean
L :
updated cholesky factor of the precision matrix | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/utils.py#L22-L42 | null | # -*- coding: utf-8 -*-
"""
Functions needed by the gnm sampler
1. test
2. update_params
3. log_K
4. multi_normal
5. det
6. optimize
7. function
"""
__all__ = ['test','update_params','log_K','multi_normal', 'det','optimize','function']
import numpy as np
la = np.linalg
def test():
import quickstart
def log_K(Z, X, t):
"""
Log K
Log of the proposal probability density function for gnm
Inputs :
Z :
proposed to
x :
proposed from
Outputs :
log of the probability density function
"""
m, L = update_params(X, t)
z = Z['x']
return np.log(det(L))-la.norm(np.dot(L.T,z-m))**2/2.
def multi_normal(X, t):
"""
Multivariate normal sampler:
Generates normal samples with mean m, precision matrix LL'
Inputs:
x :
propose from
Outputs:
normal with mean m and precision LL'
"""
m, L = update_params(X, t)
z = np.random.standard_normal(np.shape(m)) # generate i.i.d N(0,1)
return la.solve(L.T,z)+m
def det(L):
"""
Determinant
Compute the determinant given a lower triangular matrix
Inputs:
L :
lower triangular matrix
Outputs:
det_L :
determinant of L
"""
size_L = L.shape
if np.size(L) == 1:
return np.array(L)
else:
try:
assert np.all(np.tril(L)==L)
except AssertionError:
print 'Error: Input is not a lower triangular matrix.'
return 0
try:
assert size_L[0] == size_L[1]
except AssertionError:
print 'Error: Not a square matrix.'
return 0
det_L = 1.
for i in xrange(size_L[1]):
det_L = det_L*L[i,i]
return det_L
def optimize(t, f_0, d_f_0, f_t, d_f_t, t1=0.05, t2=0.5):
"""
Third order approximation to find the minimum of the function
f : function to be optimized over
Inputs :
t :
previous step size
f_0 : f(0),
function value at 0
d_f_0: f'(0),
the derivative of the function at 0
f_t : f(t),
function value at t
d_f_t : f'(t),
the derivative of the function at t
t1 :
step size reduction if minimum is at 0 or it can't be found
t2 :
step size reduction if minimum is at 1
Outputs :
alpha_new :
the new step size that minimizes the function
"""
if t <= 0 :
print("Error: please enter non-negative t")
return t
a = (t*d_f_t-2*f_t+2*f_0+t*d_f_0)/(t**3)
b = (f_t-f_0-t*d_f_0)/(t**2)
c = d_f_0
A = 3*a
B = b-t*a
C = B**2-A*c
if C == 0. :
if c>0 or d_f_t>0 :
t_new = t1*t
else :
t_new = t2*t
elif A == 0 :
t_new = -c/2./B
elif C > 0 :
t_new = (-B+np.sqrt(C))/A
else :
t_new = t1*t
# check the bounds on new step size
if t_new < t1*t :
t_new = t1*t
elif t_new > t2*t :
t_new = t2*t
return t_new
class function(object):
def __init__(self, f, args):
"""
Init
Initialize the developer function class
Inputs :
f : user defined function
---
Inputs of f :
x :
input value
args :
the arguments that the function takes
Outputs of f :
chi_x :
boolean flag indicating whether the function is
defined at x or not
f_x : f(x),
function value at x
J_x : f'(x),
the jacobian of the function evaluated at x
Demo :
chi_x, f_x, J_x = f(x,args)
---
args :
the arguments that the user defined function takes
"""
self._f = f
self._args = args
self._count = 0
def __call__(self, x):
"""
Call
Calls the user defined function
Inputs:
x :
input value
Outputs:
chi_x, f_x, J_x = f(x,args)
"""
self._count += 1
x = np.reshape(np.array(x), (-1))
chi_x, f_x, J_x = self._f(x, self.args)
f_x = np.reshape(np.array(f_x), (-1,1))
return chi_x, f_x, np.array(J_x)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
Inputs :
x_min :
lower bound on the domain
x_max :
upper bound on the domain
** Warning **
x_min and x_max must be arrays of the same dimension
Optional inputs :
dx : (2*10^-4)
the ratio of dx to the size of the domain
N : (1000)
number of test points
eps_max : (10^-4)
the maximum value error is allowed to be to confirm convergence
p : (2)
to specify the norm of the error (p-norm)
l_max : (40)
maximum number of tries to reduce dx
r : (0.5)
dx will be multiplied by this constant each step the error
exceeds error_bound until l_max is reached
** Warning **
keep in mind machine precision when changing l_max and r
Outputs :
error :
* 1 if did it did not pass the checks,
* 0 if converged,
* eps, the error of the numerical gradient point if no convergence
"""
x_min = np.reshape(np.array(x_min), (-1))
x_max = np.reshape(np.array(x_max), (-1))
# begin checks
try:
for i in xrange(np.size(x_min)):
assert x_min[i] < x_max[i]
except:
print("Error: All values of x_min should be less than the "
+"corresponding values for x_max.")
exit(0)
# end checks
# begin test
k = 0
while k < N :
# reset dx each time
D_x = (x_max - x_min) * dx
# Sample random points in the state space
x = np.random.uniform(x_min + D_x, x_max - D_x)
# Compute jacobian at x
l = 0
test_convergence = 1
while test_convergence:
chi_x, f_x, J_x = self.__call__(x)
D_f_x = J_x * 0.
for j in xrange(np.size(x)):
# calculate the derivative of each component of f
d_x = D_x * 0.
d_x[j] = D_x[j]
chi_x_r, f_x_r, J_x_r = self.__call__(x + d_x)
chi_x_l, f_x_l, J_x_l = self.__call__(x - d_x)
# check if the function is defined on these points
if( not(chi_x and chi_x_r and chi_x_l)):
# discard this trial if one of the values is not defined
test_convergence = 0 # break outer loop
break
d_f = (f_x_r - f_x_l) / (2. * d_x[j])
D_f_x[:,j] = d_f[:,0]
eps = la.norm(D_f_x - J_x, p) / np.size(J_x)
if (eps < eps_max):
test_convergence = 0 # break outer loop
k += 1
else:
D_x = D_x * r
if (l > l_max): # numerical gradient did not converge
return eps
l += 1
return 0
# end test
"""
Properties:
1. f
2. args
3. count
"""
@property
def f(self):
return self._f
@property
def args(self):
return self._args
@property
def count(self):
return self._count
## end function ## |
mugurbil/gnm | gnm/utils.py | log_K | python | def log_K(Z, X, t):
m, L = update_params(X, t)
z = Z['x']
return np.log(det(L))-la.norm(np.dot(L.T,z-m))**2/2. | Log K
Log of the proposal probability density function for gnm
Inputs :
Z :
proposed to
x :
proposed from
Outputs :
log of the probability density function | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/utils.py#L44-L58 | [
"def det(L):\n \"\"\"\n Determinant\n Compute the determinant given a lower triangular matrix\n Inputs: \n L :\n lower triangular matrix\n Outputs: \n det_L : \n determinant of L\n \"\"\"\n size_L = L.shape\n if np.size(L) == 1:\n return np.array(L)\n else: \n try: \n assert np.all(np.tril(L)==L)\n except AssertionError:\n print 'Error: Input is not a lower triangular matrix.'\n return 0\n try:\n assert size_L[0] == size_L[1]\n except AssertionError:\n print 'Error: Not a square matrix.'\n return 0\n det_L = 1.\n for i in xrange(size_L[1]):\n det_L = det_L*L[i,i]\n return det_L\n",
"def update_params(state, t):\n \"\"\"\n Update parameters\n updates mean and precision to the step size\n Inputs:\n state :\n mu :\n mean\n L :\n cholesky factor of the precision matrix\n t : \n step size\n Outputs:\n mu :\n updated mean\n L :\n updated cholesky factor of the precision matrix \n \"\"\"\n mu = (1.-t)*state['x'] + t*state['mu']\n L = state['L'] / np.sqrt(2.*t - t**2)\n return mu, L\n"
] | # -*- coding: utf-8 -*-
"""
Functions needed by the gnm sampler
1. test
2. update_params
3. log_K
4. multi_normal
5. det
6. optimize
7. function
"""
__all__ = ['test','update_params','log_K','multi_normal', 'det','optimize','function']
import numpy as np
la = np.linalg
def test():
import quickstart
def update_params(state, t):
"""
Update parameters
updates mean and precision to the step size
Inputs:
state :
mu :
mean
L :
cholesky factor of the precision matrix
t :
step size
Outputs:
mu :
updated mean
L :
updated cholesky factor of the precision matrix
"""
mu = (1.-t)*state['x'] + t*state['mu']
L = state['L'] / np.sqrt(2.*t - t**2)
return mu, L
def multi_normal(X, t):
"""
Multivariate normal sampler:
Generates normal samples with mean m, precision matrix LL'
Inputs:
x :
propose from
Outputs:
normal with mean m and precision LL'
"""
m, L = update_params(X, t)
z = np.random.standard_normal(np.shape(m)) # generate i.i.d N(0,1)
return la.solve(L.T,z)+m
def det(L):
"""
Determinant
Compute the determinant given a lower triangular matrix
Inputs:
L :
lower triangular matrix
Outputs:
det_L :
determinant of L
"""
size_L = L.shape
if np.size(L) == 1:
return np.array(L)
else:
try:
assert np.all(np.tril(L)==L)
except AssertionError:
print 'Error: Input is not a lower triangular matrix.'
return 0
try:
assert size_L[0] == size_L[1]
except AssertionError:
print 'Error: Not a square matrix.'
return 0
det_L = 1.
for i in xrange(size_L[1]):
det_L = det_L*L[i,i]
return det_L
def optimize(t, f_0, d_f_0, f_t, d_f_t, t1=0.05, t2=0.5):
"""
Third order approximation to find the minimum of the function
f : function to be optimized over
Inputs :
t :
previous step size
f_0 : f(0),
function value at 0
d_f_0: f'(0),
the derivative of the function at 0
f_t : f(t),
function value at t
d_f_t : f'(t),
the derivative of the function at t
t1 :
step size reduction if minimum is at 0 or it can't be found
t2 :
step size reduction if minimum is at 1
Outputs :
alpha_new :
the new step size that minimizes the function
"""
if t <= 0 :
print("Error: please enter non-negative t")
return t
a = (t*d_f_t-2*f_t+2*f_0+t*d_f_0)/(t**3)
b = (f_t-f_0-t*d_f_0)/(t**2)
c = d_f_0
A = 3*a
B = b-t*a
C = B**2-A*c
if C == 0. :
if c>0 or d_f_t>0 :
t_new = t1*t
else :
t_new = t2*t
elif A == 0 :
t_new = -c/2./B
elif C > 0 :
t_new = (-B+np.sqrt(C))/A
else :
t_new = t1*t
# check the bounds on new step size
if t_new < t1*t :
t_new = t1*t
elif t_new > t2*t :
t_new = t2*t
return t_new
class function(object):
def __init__(self, f, args):
"""
Init
Initialize the developer function class
Inputs :
f : user defined function
---
Inputs of f :
x :
input value
args :
the arguments that the function takes
Outputs of f :
chi_x :
boolean flag indicating whether the function is
defined at x or not
f_x : f(x),
function value at x
J_x : f'(x),
the jacobian of the function evaluated at x
Demo :
chi_x, f_x, J_x = f(x,args)
---
args :
the arguments that the user defined function takes
"""
self._f = f
self._args = args
self._count = 0
def __call__(self, x):
"""
Call
Calls the user defined function
Inputs:
x :
input value
Outputs:
chi_x, f_x, J_x = f(x,args)
"""
self._count += 1
x = np.reshape(np.array(x), (-1))
chi_x, f_x, J_x = self._f(x, self.args)
f_x = np.reshape(np.array(f_x), (-1,1))
return chi_x, f_x, np.array(J_x)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
Inputs :
x_min :
lower bound on the domain
x_max :
upper bound on the domain
** Warning **
x_min and x_max must be arrays of the same dimension
Optional inputs :
dx : (2*10^-4)
the ratio of dx to the size of the domain
N : (1000)
number of test points
eps_max : (10^-4)
the maximum value error is allowed to be to confirm convergence
p : (2)
to specify the norm of the error (p-norm)
l_max : (40)
maximum number of tries to reduce dx
r : (0.5)
dx will be multiplied by this constant each step the error
exceeds error_bound until l_max is reached
** Warning **
keep in mind machine precision when changing l_max and r
Outputs :
error :
* 1 if did it did not pass the checks,
* 0 if converged,
* eps, the error of the numerical gradient point if no convergence
"""
x_min = np.reshape(np.array(x_min), (-1))
x_max = np.reshape(np.array(x_max), (-1))
# begin checks
try:
for i in xrange(np.size(x_min)):
assert x_min[i] < x_max[i]
except:
print("Error: All values of x_min should be less than the "
+"corresponding values for x_max.")
exit(0)
# end checks
# begin test
k = 0
while k < N :
# reset dx each time
D_x = (x_max - x_min) * dx
# Sample random points in the state space
x = np.random.uniform(x_min + D_x, x_max - D_x)
# Compute jacobian at x
l = 0
test_convergence = 1
while test_convergence:
chi_x, f_x, J_x = self.__call__(x)
D_f_x = J_x * 0.
for j in xrange(np.size(x)):
# calculate the derivative of each component of f
d_x = D_x * 0.
d_x[j] = D_x[j]
chi_x_r, f_x_r, J_x_r = self.__call__(x + d_x)
chi_x_l, f_x_l, J_x_l = self.__call__(x - d_x)
# check if the function is defined on these points
if( not(chi_x and chi_x_r and chi_x_l)):
# discard this trial if one of the values is not defined
test_convergence = 0 # break outer loop
break
d_f = (f_x_r - f_x_l) / (2. * d_x[j])
D_f_x[:,j] = d_f[:,0]
eps = la.norm(D_f_x - J_x, p) / np.size(J_x)
if (eps < eps_max):
test_convergence = 0 # break outer loop
k += 1
else:
D_x = D_x * r
if (l > l_max): # numerical gradient did not converge
return eps
l += 1
return 0
# end test
"""
Properties:
1. f
2. args
3. count
"""
@property
def f(self):
return self._f
@property
def args(self):
return self._args
@property
def count(self):
return self._count
## end function ## |
mugurbil/gnm | gnm/utils.py | multi_normal | python | def multi_normal(X, t):
m, L = update_params(X, t)
z = np.random.standard_normal(np.shape(m)) # generate i.i.d N(0,1)
return la.solve(L.T,z)+m | Multivariate normal sampler:
Generates normal samples with mean m, precision matrix LL'
Inputs:
x :
propose from
Outputs:
normal with mean m and precision LL' | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/utils.py#L60-L72 | [
"def update_params(state, t):\n \"\"\"\n Update parameters\n updates mean and precision to the step size\n Inputs:\n state :\n mu :\n mean\n L :\n cholesky factor of the precision matrix\n t : \n step size\n Outputs:\n mu :\n updated mean\n L :\n updated cholesky factor of the precision matrix \n \"\"\"\n mu = (1.-t)*state['x'] + t*state['mu']\n L = state['L'] / np.sqrt(2.*t - t**2)\n return mu, L\n"
] | # -*- coding: utf-8 -*-
"""
Functions needed by the gnm sampler
1. test
2. update_params
3. log_K
4. multi_normal
5. det
6. optimize
7. function
"""
__all__ = ['test','update_params','log_K','multi_normal', 'det','optimize','function']
import numpy as np
la = np.linalg
def test():
import quickstart
def update_params(state, t):
"""
Update parameters
updates mean and precision to the step size
Inputs:
state :
mu :
mean
L :
cholesky factor of the precision matrix
t :
step size
Outputs:
mu :
updated mean
L :
updated cholesky factor of the precision matrix
"""
mu = (1.-t)*state['x'] + t*state['mu']
L = state['L'] / np.sqrt(2.*t - t**2)
return mu, L
def log_K(Z, X, t):
"""
Log K
Log of the proposal probability density function for gnm
Inputs :
Z :
proposed to
x :
proposed from
Outputs :
log of the probability density function
"""
m, L = update_params(X, t)
z = Z['x']
return np.log(det(L))-la.norm(np.dot(L.T,z-m))**2/2.
def det(L):
"""
Determinant
Compute the determinant given a lower triangular matrix
Inputs:
L :
lower triangular matrix
Outputs:
det_L :
determinant of L
"""
size_L = L.shape
if np.size(L) == 1:
return np.array(L)
else:
try:
assert np.all(np.tril(L)==L)
except AssertionError:
print 'Error: Input is not a lower triangular matrix.'
return 0
try:
assert size_L[0] == size_L[1]
except AssertionError:
print 'Error: Not a square matrix.'
return 0
det_L = 1.
for i in xrange(size_L[1]):
det_L = det_L*L[i,i]
return det_L
def optimize(t, f_0, d_f_0, f_t, d_f_t, t1=0.05, t2=0.5):
"""
Third order approximation to find the minimum of the function
f : function to be optimized over
Inputs :
t :
previous step size
f_0 : f(0),
function value at 0
d_f_0: f'(0),
the derivative of the function at 0
f_t : f(t),
function value at t
d_f_t : f'(t),
the derivative of the function at t
t1 :
step size reduction if minimum is at 0 or it can't be found
t2 :
step size reduction if minimum is at 1
Outputs :
alpha_new :
the new step size that minimizes the function
"""
if t <= 0 :
print("Error: please enter non-negative t")
return t
a = (t*d_f_t-2*f_t+2*f_0+t*d_f_0)/(t**3)
b = (f_t-f_0-t*d_f_0)/(t**2)
c = d_f_0
A = 3*a
B = b-t*a
C = B**2-A*c
if C == 0. :
if c>0 or d_f_t>0 :
t_new = t1*t
else :
t_new = t2*t
elif A == 0 :
t_new = -c/2./B
elif C > 0 :
t_new = (-B+np.sqrt(C))/A
else :
t_new = t1*t
# check the bounds on new step size
if t_new < t1*t :
t_new = t1*t
elif t_new > t2*t :
t_new = t2*t
return t_new
class function(object):
def __init__(self, f, args):
"""
Init
Initialize the developer function class
Inputs :
f : user defined function
---
Inputs of f :
x :
input value
args :
the arguments that the function takes
Outputs of f :
chi_x :
boolean flag indicating whether the function is
defined at x or not
f_x : f(x),
function value at x
J_x : f'(x),
the jacobian of the function evaluated at x
Demo :
chi_x, f_x, J_x = f(x,args)
---
args :
the arguments that the user defined function takes
"""
self._f = f
self._args = args
self._count = 0
def __call__(self, x):
"""
Call
Calls the user defined function
Inputs:
x :
input value
Outputs:
chi_x, f_x, J_x = f(x,args)
"""
self._count += 1
x = np.reshape(np.array(x), (-1))
chi_x, f_x, J_x = self._f(x, self.args)
f_x = np.reshape(np.array(f_x), (-1,1))
return chi_x, f_x, np.array(J_x)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
Inputs :
x_min :
lower bound on the domain
x_max :
upper bound on the domain
** Warning **
x_min and x_max must be arrays of the same dimension
Optional inputs :
dx : (2*10^-4)
the ratio of dx to the size of the domain
N : (1000)
number of test points
eps_max : (10^-4)
the maximum value error is allowed to be to confirm convergence
p : (2)
to specify the norm of the error (p-norm)
l_max : (40)
maximum number of tries to reduce dx
r : (0.5)
dx will be multiplied by this constant each step the error
exceeds error_bound until l_max is reached
** Warning **
keep in mind machine precision when changing l_max and r
Outputs :
error :
* 1 if did it did not pass the checks,
* 0 if converged,
* eps, the error of the numerical gradient point if no convergence
"""
x_min = np.reshape(np.array(x_min), (-1))
x_max = np.reshape(np.array(x_max), (-1))
# begin checks
try:
for i in xrange(np.size(x_min)):
assert x_min[i] < x_max[i]
except:
print("Error: All values of x_min should be less than the "
+"corresponding values for x_max.")
exit(0)
# end checks
# begin test
k = 0
while k < N :
# reset dx each time
D_x = (x_max - x_min) * dx
# Sample random points in the state space
x = np.random.uniform(x_min + D_x, x_max - D_x)
# Compute jacobian at x
l = 0
test_convergence = 1
while test_convergence:
chi_x, f_x, J_x = self.__call__(x)
D_f_x = J_x * 0.
for j in xrange(np.size(x)):
# calculate the derivative of each component of f
d_x = D_x * 0.
d_x[j] = D_x[j]
chi_x_r, f_x_r, J_x_r = self.__call__(x + d_x)
chi_x_l, f_x_l, J_x_l = self.__call__(x - d_x)
# check if the function is defined on these points
if( not(chi_x and chi_x_r and chi_x_l)):
# discard this trial if one of the values is not defined
test_convergence = 0 # break outer loop
break
d_f = (f_x_r - f_x_l) / (2. * d_x[j])
D_f_x[:,j] = d_f[:,0]
eps = la.norm(D_f_x - J_x, p) / np.size(J_x)
if (eps < eps_max):
test_convergence = 0 # break outer loop
k += 1
else:
D_x = D_x * r
if (l > l_max): # numerical gradient did not converge
return eps
l += 1
return 0
# end test
"""
Properties:
1. f
2. args
3. count
"""
@property
def f(self):
return self._f
@property
def args(self):
return self._args
@property
def count(self):
return self._count
## end function ## |
mugurbil/gnm | gnm/utils.py | det | python | def det(L):
size_L = L.shape
if np.size(L) == 1:
return np.array(L)
else:
try:
assert np.all(np.tril(L)==L)
except AssertionError:
print 'Error: Input is not a lower triangular matrix.'
return 0
try:
assert size_L[0] == size_L[1]
except AssertionError:
print 'Error: Not a square matrix.'
return 0
det_L = 1.
for i in xrange(size_L[1]):
det_L = det_L*L[i,i]
return det_L | Determinant
Compute the determinant given a lower triangular matrix
Inputs:
L :
lower triangular matrix
Outputs:
det_L :
determinant of L | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/utils.py#L74-L102 | null | # -*- coding: utf-8 -*-
"""
Functions needed by the gnm sampler
1. test
2. update_params
3. log_K
4. multi_normal
5. det
6. optimize
7. function
"""
__all__ = ['test','update_params','log_K','multi_normal', 'det','optimize','function']
import numpy as np
la = np.linalg
def test():
import quickstart
def update_params(state, t):
"""
Update parameters
updates mean and precision to the step size
Inputs:
state :
mu :
mean
L :
cholesky factor of the precision matrix
t :
step size
Outputs:
mu :
updated mean
L :
updated cholesky factor of the precision matrix
"""
mu = (1.-t)*state['x'] + t*state['mu']
L = state['L'] / np.sqrt(2.*t - t**2)
return mu, L
def log_K(Z, X, t):
"""
Log K
Log of the proposal probability density function for gnm
Inputs :
Z :
proposed to
x :
proposed from
Outputs :
log of the probability density function
"""
m, L = update_params(X, t)
z = Z['x']
return np.log(det(L))-la.norm(np.dot(L.T,z-m))**2/2.
def multi_normal(X, t):
"""
Multivariate normal sampler:
Generates normal samples with mean m, precision matrix LL'
Inputs:
x :
propose from
Outputs:
normal with mean m and precision LL'
"""
m, L = update_params(X, t)
z = np.random.standard_normal(np.shape(m)) # generate i.i.d N(0,1)
return la.solve(L.T,z)+m
def optimize(t, f_0, d_f_0, f_t, d_f_t, t1=0.05, t2=0.5):
"""
Third order approximation to find the minimum of the function
f : function to be optimized over
Inputs :
t :
previous step size
f_0 : f(0),
function value at 0
d_f_0: f'(0),
the derivative of the function at 0
f_t : f(t),
function value at t
d_f_t : f'(t),
the derivative of the function at t
t1 :
step size reduction if minimum is at 0 or it can't be found
t2 :
step size reduction if minimum is at 1
Outputs :
alpha_new :
the new step size that minimizes the function
"""
if t <= 0 :
print("Error: please enter non-negative t")
return t
a = (t*d_f_t-2*f_t+2*f_0+t*d_f_0)/(t**3)
b = (f_t-f_0-t*d_f_0)/(t**2)
c = d_f_0
A = 3*a
B = b-t*a
C = B**2-A*c
if C == 0. :
if c>0 or d_f_t>0 :
t_new = t1*t
else :
t_new = t2*t
elif A == 0 :
t_new = -c/2./B
elif C > 0 :
t_new = (-B+np.sqrt(C))/A
else :
t_new = t1*t
# check the bounds on new step size
if t_new < t1*t :
t_new = t1*t
elif t_new > t2*t :
t_new = t2*t
return t_new
class function(object):
def __init__(self, f, args):
"""
Init
Initialize the developer function class
Inputs :
f : user defined function
---
Inputs of f :
x :
input value
args :
the arguments that the function takes
Outputs of f :
chi_x :
boolean flag indicating whether the function is
defined at x or not
f_x : f(x),
function value at x
J_x : f'(x),
the jacobian of the function evaluated at x
Demo :
chi_x, f_x, J_x = f(x,args)
---
args :
the arguments that the user defined function takes
"""
self._f = f
self._args = args
self._count = 0
def __call__(self, x):
"""
Call
Calls the user defined function
Inputs:
x :
input value
Outputs:
chi_x, f_x, J_x = f(x,args)
"""
self._count += 1
x = np.reshape(np.array(x), (-1))
chi_x, f_x, J_x = self._f(x, self.args)
f_x = np.reshape(np.array(f_x), (-1,1))
return chi_x, f_x, np.array(J_x)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
Inputs :
x_min :
lower bound on the domain
x_max :
upper bound on the domain
** Warning **
x_min and x_max must be arrays of the same dimension
Optional inputs :
dx : (2*10^-4)
the ratio of dx to the size of the domain
N : (1000)
number of test points
eps_max : (10^-4)
the maximum value error is allowed to be to confirm convergence
p : (2)
to specify the norm of the error (p-norm)
l_max : (40)
maximum number of tries to reduce dx
r : (0.5)
dx will be multiplied by this constant each step the error
exceeds error_bound until l_max is reached
** Warning **
keep in mind machine precision when changing l_max and r
Outputs :
error :
* 1 if did it did not pass the checks,
* 0 if converged,
* eps, the error of the numerical gradient point if no convergence
"""
x_min = np.reshape(np.array(x_min), (-1))
x_max = np.reshape(np.array(x_max), (-1))
# begin checks
try:
for i in xrange(np.size(x_min)):
assert x_min[i] < x_max[i]
except:
print("Error: All values of x_min should be less than the "
+"corresponding values for x_max.")
exit(0)
# end checks
# begin test
k = 0
while k < N :
# reset dx each time
D_x = (x_max - x_min) * dx
# Sample random points in the state space
x = np.random.uniform(x_min + D_x, x_max - D_x)
# Compute jacobian at x
l = 0
test_convergence = 1
while test_convergence:
chi_x, f_x, J_x = self.__call__(x)
D_f_x = J_x * 0.
for j in xrange(np.size(x)):
# calculate the derivative of each component of f
d_x = D_x * 0.
d_x[j] = D_x[j]
chi_x_r, f_x_r, J_x_r = self.__call__(x + d_x)
chi_x_l, f_x_l, J_x_l = self.__call__(x - d_x)
# check if the function is defined on these points
if( not(chi_x and chi_x_r and chi_x_l)):
# discard this trial if one of the values is not defined
test_convergence = 0 # break outer loop
break
d_f = (f_x_r - f_x_l) / (2. * d_x[j])
D_f_x[:,j] = d_f[:,0]
eps = la.norm(D_f_x - J_x, p) / np.size(J_x)
if (eps < eps_max):
test_convergence = 0 # break outer loop
k += 1
else:
D_x = D_x * r
if (l > l_max): # numerical gradient did not converge
return eps
l += 1
return 0
# end test
"""
Properties:
1. f
2. args
3. count
"""
@property
def f(self):
return self._f
@property
def args(self):
return self._args
@property
def count(self):
return self._count
## end function ## |
mugurbil/gnm | gnm/utils.py | optimize | python | def optimize(t, f_0, d_f_0, f_t, d_f_t, t1=0.05, t2=0.5):
if t <= 0 :
print("Error: please enter non-negative t")
return t
a = (t*d_f_t-2*f_t+2*f_0+t*d_f_0)/(t**3)
b = (f_t-f_0-t*d_f_0)/(t**2)
c = d_f_0
A = 3*a
B = b-t*a
C = B**2-A*c
if C == 0. :
if c>0 or d_f_t>0 :
t_new = t1*t
else :
t_new = t2*t
elif A == 0 :
t_new = -c/2./B
elif C > 0 :
t_new = (-B+np.sqrt(C))/A
else :
t_new = t1*t
# check the bounds on new step size
if t_new < t1*t :
t_new = t1*t
elif t_new > t2*t :
t_new = t2*t
return t_new | Third order approximation to find the minimum of the function
f : function to be optimized over
Inputs :
t :
previous step size
f_0 : f(0),
function value at 0
d_f_0: f'(0),
the derivative of the function at 0
f_t : f(t),
function value at t
d_f_t : f'(t),
the derivative of the function at t
t1 :
step size reduction if minimum is at 0 or it can't be found
t2 :
step size reduction if minimum is at 1
Outputs :
alpha_new :
the new step size that minimizes the function | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/utils.py#L104-L156 | null | # -*- coding: utf-8 -*-
"""
Functions needed by the gnm sampler
1. test
2. update_params
3. log_K
4. multi_normal
5. det
6. optimize
7. function
"""
__all__ = ['test','update_params','log_K','multi_normal', 'det','optimize','function']
import numpy as np
la = np.linalg
def test():
import quickstart
def update_params(state, t):
"""
Update parameters
updates mean and precision to the step size
Inputs:
state :
mu :
mean
L :
cholesky factor of the precision matrix
t :
step size
Outputs:
mu :
updated mean
L :
updated cholesky factor of the precision matrix
"""
mu = (1.-t)*state['x'] + t*state['mu']
L = state['L'] / np.sqrt(2.*t - t**2)
return mu, L
def log_K(Z, X, t):
"""
Log K
Log of the proposal probability density function for gnm
Inputs :
Z :
proposed to
x :
proposed from
Outputs :
log of the probability density function
"""
m, L = update_params(X, t)
z = Z['x']
return np.log(det(L))-la.norm(np.dot(L.T,z-m))**2/2.
def multi_normal(X, t):
"""
Multivariate normal sampler:
Generates normal samples with mean m, precision matrix LL'
Inputs:
x :
propose from
Outputs:
normal with mean m and precision LL'
"""
m, L = update_params(X, t)
z = np.random.standard_normal(np.shape(m)) # generate i.i.d N(0,1)
return la.solve(L.T,z)+m
def det(L):
"""
Determinant
Compute the determinant given a lower triangular matrix
Inputs:
L :
lower triangular matrix
Outputs:
det_L :
determinant of L
"""
size_L = L.shape
if np.size(L) == 1:
return np.array(L)
else:
try:
assert np.all(np.tril(L)==L)
except AssertionError:
print 'Error: Input is not a lower triangular matrix.'
return 0
try:
assert size_L[0] == size_L[1]
except AssertionError:
print 'Error: Not a square matrix.'
return 0
det_L = 1.
for i in xrange(size_L[1]):
det_L = det_L*L[i,i]
return det_L
class function(object):
def __init__(self, f, args):
"""
Init
Initialize the developer function class
Inputs :
f : user defined function
---
Inputs of f :
x :
input value
args :
the arguments that the function takes
Outputs of f :
chi_x :
boolean flag indicating whether the function is
defined at x or not
f_x : f(x),
function value at x
J_x : f'(x),
the jacobian of the function evaluated at x
Demo :
chi_x, f_x, J_x = f(x,args)
---
args :
the arguments that the user defined function takes
"""
self._f = f
self._args = args
self._count = 0
def __call__(self, x):
"""
Call
Calls the user defined function
Inputs:
x :
input value
Outputs:
chi_x, f_x, J_x = f(x,args)
"""
self._count += 1
x = np.reshape(np.array(x), (-1))
chi_x, f_x, J_x = self._f(x, self.args)
f_x = np.reshape(np.array(f_x), (-1,1))
return chi_x, f_x, np.array(J_x)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
Inputs :
x_min :
lower bound on the domain
x_max :
upper bound on the domain
** Warning **
x_min and x_max must be arrays of the same dimension
Optional inputs :
dx : (2*10^-4)
the ratio of dx to the size of the domain
N : (1000)
number of test points
eps_max : (10^-4)
the maximum value error is allowed to be to confirm convergence
p : (2)
to specify the norm of the error (p-norm)
l_max : (40)
maximum number of tries to reduce dx
r : (0.5)
dx will be multiplied by this constant each step the error
exceeds error_bound until l_max is reached
** Warning **
keep in mind machine precision when changing l_max and r
Outputs :
error :
* 1 if did it did not pass the checks,
* 0 if converged,
* eps, the error of the numerical gradient point if no convergence
"""
x_min = np.reshape(np.array(x_min), (-1))
x_max = np.reshape(np.array(x_max), (-1))
# begin checks
try:
for i in xrange(np.size(x_min)):
assert x_min[i] < x_max[i]
except:
print("Error: All values of x_min should be less than the "
+"corresponding values for x_max.")
exit(0)
# end checks
# begin test
k = 0
while k < N :
# reset dx each time
D_x = (x_max - x_min) * dx
# Sample random points in the state space
x = np.random.uniform(x_min + D_x, x_max - D_x)
# Compute jacobian at x
l = 0
test_convergence = 1
while test_convergence:
chi_x, f_x, J_x = self.__call__(x)
D_f_x = J_x * 0.
for j in xrange(np.size(x)):
# calculate the derivative of each component of f
d_x = D_x * 0.
d_x[j] = D_x[j]
chi_x_r, f_x_r, J_x_r = self.__call__(x + d_x)
chi_x_l, f_x_l, J_x_l = self.__call__(x - d_x)
# check if the function is defined on these points
if( not(chi_x and chi_x_r and chi_x_l)):
# discard this trial if one of the values is not defined
test_convergence = 0 # break outer loop
break
d_f = (f_x_r - f_x_l) / (2. * d_x[j])
D_f_x[:,j] = d_f[:,0]
eps = la.norm(D_f_x - J_x, p) / np.size(J_x)
if (eps < eps_max):
test_convergence = 0 # break outer loop
k += 1
else:
D_x = D_x * r
if (l > l_max): # numerical gradient did not converge
return eps
l += 1
return 0
# end test
"""
Properties:
1. f
2. args
3. count
"""
@property
def f(self):
return self._f
@property
def args(self):
return self._args
@property
def count(self):
return self._count
## end function ## |
mugurbil/gnm | gnm/gnm.py | sampler.prior | python | def prior(self, m, H):
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m) | Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L93-L129 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.static | python | def static(self, max_steps, step_size):
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.") | Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L152-L194 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.dynamic | python | def dynamic(self, max_steps, opts={}):
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts | Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L197-L222 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.sample | python | def sample(self, n_samples, divs=1, visual=False, safe=False):
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs)) | Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L225-L252 | [
"def save(self, path=\"chain.dat\"):\n \"\"\"\nSave\n Save data to file\nInputs : \n path :\n specifies the path name of the file to be loaded to\n \"\"\"\n # create dictionary for data\n dic = {}\n dic['chain'] = self._chain.tolist()\n dic['step_count'] = self._step_count.tolist()\n dic['n_samples'] = self._n_samples\n dic['n_accepted'] = self._n_accepted\n dic['x'] = self._X['x'].tolist()\n dic['f'] = self._X['f'].tolist()\n dic['J'] = self._X['J'].tolist()\n\n # write data to file\n file = open(path, 'w')\n json.dump(dic, file)\n file.close()\n",
"def _sample(self, n_samples):\n \"\"\"\nSample\n Generate samples for posterior distribution using Gauss-Newton \n proposal parameters\nInputs : \n n_samples :\n number of samples to generate\nHidden Outputs :\n chain :\n chain of samples\n n_samples :\n length of chain\n n_accepted :\n number of proposals accepted\n step_count :\n count of the steps accepted\n \"\"\"\n try : \n n_samples = int(n_samples)\n except :\n raise TypeError(\"number of samples has to be an integer\")\n exit()\n\n # fetch info\n X = self._proposal_params(self._X)\n k_max = self._max_steps\n\n # initialize \n chain = np.zeros((n_samples, self._n)) \n n_accepted = 0\n step_count = np.zeros(k_max+2)\n\n # begin outer loop\n for i in xrange(n_samples):\n accepted = False # check if sample is accepted\n r_ = [1] # list of step sizes\n Z_ = [X] # initialize list of Z s\n self._r_ = r_ \n log_P_z_x = 0. + X['log_p'] \n\n k = 0 # back-off steps taken so far\n while k <= k_max:\n # get proposal\n chi_z = False\n while not chi_z:\n z = multi_normal(X, r_[-1])\n chi_z, f_z, J_z = self._f(z)\n Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})\n Z_.append(Z)\n self._Z_ = Z_\n\n log_P_z_x += log_K(Z, X, r_[-1])\n\n # N is the Numerator of the acceptance, N = P_x_z\n self._N_is_0 = False # check to see if N = 0, to use in _log_P\n log_N = self._log_P(X, Z, k)\n\n # calculating acceptance probability\n if self._N_is_0 == True :\n A_z_x = 0.\n elif log_N >= log_P_z_x :\n A_z_x = 1.\n else :\n A_z_x = np.exp(log_N - log_P_z_x)\n\n # acceptance rejection\n if np.random.rand() <= A_z_x:\n accepted = True\n break\n else : \n log_P_z_x += np.log(1. - A_z_x)\n self._back_off()\n k += 1 \n # end of steps for loop\n if accepted == True :\n chain[i,:] = z[:,0] \n X = Z\n # for statistics\n n_accepted += 1 \n step_count[k+1] += 1\n else :\n chain[i,:] = X['x'][:,0]\n # for statistics\n step_count[0] += 1\n # end outer loop\n\n # update stored info\n self._X = X\n\n # outputs\n if self._n_samples == 0 :\n self._chain = chain\n self._step_count = step_count\n else :\n self._chain = np.append(self._chain, chain, axis=0)\n self._step_count = np.add(self._step_count, step_count)\n self._n_samples += n_samples\n self._n_accepted += n_accepted\n"
] | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.save | python | def save(self, path="chain.dat"):
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close() | Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L254-L275 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.load | python | def load(self, path="chain.dat"):
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J'] | Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L277-L298 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.acor | python | def acor(self, k = 5):
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t | Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L316-L336 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.posterior | python | def posterior(self, x):
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0 | Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L338-L361 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler.error_bars | python | def error_bars(self, n_bins, d_min, d_max):
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error | Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L363-L428 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler._sample | python | def _sample(self, n_samples):
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted | Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L432-L530 | [
"def multi_normal(X, t):\n \"\"\"\n Multivariate normal sampler:\n Generates normal samples with mean m, precision matrix LL' \n Inputs:\n x : \n propose from \n Outputs:\n normal with mean m and precision LL'\n \"\"\"\n m, L = update_params(X, t)\n z = np.random.standard_normal(np.shape(m)) # generate i.i.d N(0,1)\n return la.solve(L.T,z)+m\n",
"def log_K(Z, X, t):\n \"\"\"\n Log K\n Log of the proposal probability density function for gnm\n Inputs :\n Z :\n proposed to\n x : \n proposed from \n Outputs : \n log of the probability density function\n \"\"\"\n m, L = update_params(X, t)\n z = Z['x']\n return np.log(det(L))-la.norm(np.dot(L.T,z-m))**2/2. \n",
"def _proposal_params(self, state):\n \"\"\"\nProposal parameters\n Calculate parameters needed for the proposal. \nInputs :\n state : \n x : \n the present sample, the place to linearize around\n f : f(x), \n function value at x\n J : f'(x), \n the jacobian of the function evaluated at x\nOutputs :\n state :\n mu : \n the mean vector\n L :\n the lower triangular cholesky factor of P \n log_p : log(p(x))\n log of the posterior density\n \"\"\"\n x = state['x']\n f = state['f']\n J = state['J']\n JJ = np.dot(J.T,J) \n\n if self._prior: \n m = self._m\n H = self._H\n Hm = self._Hm\n # LL' = P = H+J'J \n L = la.cholesky(H+JJ) \n # mu = (P^-1)(Hm-J'f+J'Jx)\n mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x))) \n else: \n # P = J'J\n L = la.cholesky(JJ)\n # mu = x-(P^-1)J'f\n mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))\n\n state['L'] = L \n state['mu'] = mu\n state['log_p'] = self._log_post(x,f)\n return state\n",
"def _log_P(self, X , Z, k):\n \"\"\"\nLog of the probability of transition from z to x with k steps\n log ( P_k (x, z) )\nInputs : \n X :\n state to be proposed to\n Z : \n state to be proposed from\n k : \n number of recursions, depth\n \"\"\"\n r_ = self._r_\n Z_ = self._Z_\n # zero case\n if k == 0 :\n log_P = Z['log_p'] + log_K(X, Z, r_[k])\n # recursice case\n else :\n P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )\n P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) ) \n # flag\n if P_zk_z <= P_z_zk :\n self._N_is_0 = True\n log_P = -np.inf\n else : \n log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])\n return log_P\n"
] | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler._proposal_params | python | def _proposal_params(self, state):
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state | Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L533-L576 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler._log_P | python | def _log_P(self, X , Z, k):
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P | Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L578-L605 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler._back_off | python | def _back_off(self):
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new) | Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs : | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L607-L633 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _log_post(self,x,f_x):
"""
Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability
"""
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
mugurbil/gnm | gnm/gnm.py | sampler._log_post | python | def _log_post(self,x,f_x):
# least squares part -||f(x)||^2/2
log_likelihood = (-la.norm(f_x)**2)/(2.)
# prior part -(x-m)'H(x-m)/2
if self._prior:
m = self._m
H = self._H
log_prior = self._ln_H_-np.dot((x-m).T,np.dot(H,x-m))/2.
return log_prior+log_likelihood
else:
return log_likelihood | Log of the posterior density
This is used to calculete acceptance probability for sampling.
Inputs :
x :
input value
f_x : f(x),
function value at x
Outputs :
log(p_x) : log[pi(x)]-||f(x)||^2/(2)
log of the posterior probability | train | https://github.com/mugurbil/gnm/blob/4f9711fb9d78cc02820c25234bc3ab9615014f11/gnm/gnm.py#L635-L658 | null | class sampler(object):
def __init__(self, x, model, args):
"""
Init
Initialize the GNM sampler class
Inputs :
x :
initial guess
model :
user defined data model function
args :
arguments for the model
"""
self._args = args
self._f = function(model, args)
x = np.reshape(np.array(x), (-1,1))
self._n = np.size(x) # size of input space
try:
x = np.reshape(np.array(x), (-1))
out_x = model(x, self._args)
except TypeError as e:
raise TypeError(str(e)[:-7]+" needed)")
"""
except IndexError as e:
raise IndexError("initial guess size does not fit model()\n "
+str(e))
except Exception as e:
print("Error: Model function could not be evaluated.")
print(" - Check size of intial guess.")
print(" - Check definition of the model.")
print(str(e))
print(type(e))
raise RuntimeError("model() could not be evaluated\n ")
"""
try:
chi_x, f_x, J_x = out_x
f_x = np.reshape(np.array(f_x), (-1,1))
except:
raise TypeError("model() needs to have 3 outputs: chi_x, f_x, J_x")
try:
assert chi_x == True
except AssertionError:
raise ValueError("initial guess out of range")
try:
self._mn = np.size(f_x)
J_x = np.array(J_x)
assert np.shape(J_x) == (self._mn, self._n)
except:
raise TypeError("Shape of Jacobian, " + str(np.shape(J_x)) +
", is not correct, (%d, %d)." % (self._mn, self._n))
x = np.reshape(np.array(x), (-1,1))
self._X = {'x':x,'f':f_x,'J':J_x} # state of x
# prior parameters
self._prior = False
# back-off parameters
self._max_steps = 1
self._step_size = 0.1
self._dynamic = False
self._opts = {}
# sampler outputs
self._chain = None
self._n_samples = 0
self._n_accepted = 0
def prior(self, m, H):
"""
Set prior
Set prior values
Inputs :
m :
mean of the prior
H :
precision matrix of the prior
Hiddens :
ln_H_ : log(det(H))/2
calculate this once to use everytime log prior is called
Hm : < H, m >
calculate this once to use everytime proposal is called
"""
if self._prior == True:
raise Warning("prior information is already set")
else:
self._prior = True
# mean
self._m = np.reshape(np.array(m), (-1,1))
try :
assert np.size(self._m) == self._n
except :
raise TypeError("mean has to be an array of size n")
# precision
self._H = np.array(H)
try :
assert np.shape(self._H) == (self._n, self._n)
except :
raise TypeError("precision has to be a matrix of shape n by n")
# precalculations
self._ln_H_ = np.log(la.det(self._H))/2.
self._Hm = np.dot(self._H, self._m)
def Jtest(self, x_min, x_max, dx=0.0002, N=1000, eps_max=0.0001,
p=2, l_max=50, r=0.5):
"""
Gradient Checker
Test the function's jacobian against the numerical jacobian
"""
# check inputs x_min and x_max
try :
assert np.size(x_min) == self._n
except :
raise TypeError("dimension of x_min, %d, does not match the "
"dimension of input, %d" % (np.size(x_min), self._n))
try :
assert np.size(x_max) == self._n
except :
raise TypeError("dimension of x_max, %d, does not match the "
"dimension of input, %d." % (np.size(x_max), self._n))
# end checks and call developer function
return self._f.Jtest(x_min, x_max, dx=dx, N=N, eps_max=eps_max, p=p,
l_max=l_max, r=r)
def static(self, max_steps, step_size):
"""
Set Back-off to Static
Set the sampler parameters for static back off
Inputs :
max_steps :
maximum optimization steps to be taken
step_size :
the step size of the back-off
"""
self._dynamic = False
# begin checks
try :
self._max_steps = int(max_steps)
except :
print("Error: Input 1 (max_steps) has to be an int.")
return 0
try :
assert self._max_steps >= 0
except :
print("Warning: Input 1 (max_steps) has to be non-negative.")
print("Setting max_steps to 0.")
self._max_steps = 0
if max_steps > 0 :
try :
assert step_size == float(step_size)
except AssertionError :
print("Warning: Input 2 (step_size) is not a float. Converted.")
step_size = float(step_size)
except :
print("Error: Input 2 (step_size) has to be a float.")
return 0
try :
assert 0. < step_size < 1.
except :
print("Warning: Input 2 (step_size) has to be between 0 and 1.")
print("Setting step_size to 0.2.")
step_size = 0.2
self._step_size = step_size
if step_size**max_steps < 10**(-15):
print("Warning: Back-off gets dangerously small.")
# end checks
def dynamic(self, max_steps, opts={}):
"""
Dynamic Switch
Set the sampler parameters for dynamic back off
Inputs :
max_steps :
maximum back-off steps to be taken
Optional Inputs:
opts : ({})
dictionary containing fancy options
"""
self._dynamic = True
# begin checks
try :
self._max_steps = int(max_steps)
except :
raise TypeError("input 1 (max_steps) has to be an integer")
return 0
try :
assert self._max_steps >= 0
except :
raise Warning("input 1 (max_steps) has to be non-negative. Setting (max_steps) to 0.")
self._max_steps = 0
self._opts = opts
# end checks
def sample(self, n_samples, divs=1, visual=False, safe=False):
"""
Sample
Sampling
Inputs :
n_samples :
number of samples to generate
Optional Inputs :
divs : (1)
number of divisions
visual :
show progress
safe :
save the chain at every division
"""
if visual:
print("Sampling: 0%")
for i in xrange(divs):
self._sample(int(n_samples/divs))
if visual:
sys.stdout.write("\033[F") # curser up
print("Sampling: "+str(int(i*100./divs)+1)+'%')
if safe:
self.save(path="chain_{:}.dat".format(i))
if n_samples % divs != 0:
self._sample(n_samples % divs)
if safe:
self.save(path="chain_{:}.dat".format(divs))
def save(self, path="chain.dat"):
"""
Save
Save data to file
Inputs :
path :
specifies the path name of the file to be loaded to
"""
# create dictionary for data
dic = {}
dic['chain'] = self._chain.tolist()
dic['step_count'] = self._step_count.tolist()
dic['n_samples'] = self._n_samples
dic['n_accepted'] = self._n_accepted
dic['x'] = self._X['x'].tolist()
dic['f'] = self._X['f'].tolist()
dic['J'] = self._X['J'].tolist()
# write data to file
file = open(path, 'w')
json.dump(dic, file)
file.close()
def load(self, path="chain.dat"):
"""
Load
Load data from file
Inputs :
path :
specifies the path name of the file to be loaded from
"""
# read data from file
file = open(path, 'r')
dic = json.load(file)
file.close()
# get data from dictionary
self._chain = np.array(dic['chain'])
self._step_count = np.array(dic['step_count'])
self._n_samples = dic['n_samples']
self._n_accepted = dic['n_accepted']
self._X = {}
self._X['x'] = dic['x']
self._X['f'] = dic['f']
self._X['J'] = dic['J']
def burn(self, n_burned):
"""
Burn
Burn the inital samples to adjust for convergence of the chain
cut the first (n_burned) burn-in samples
Inputs :
chain :
the full Markov chain
n_burned :
number of samples to cut
Hidden Outputs :
chain :
chain with the firt n_burned samples cut
"""
self._chain = self._chain[n_burned:]
def acor(self, k = 5):
"""
Autocorrelation time of the chain
return the autocorrelation time for each parameters
Inputs :
k :
parameter in self-consistent window
Outputs :
t :
autocorrelation time of the chain
"""
try:
import acor
except ImportError:
print("Can't import acor, please download it.")
return 0
n = np.shape(self._chain)[1]
t = np.zeros(n)
for i in xrange(n):
t[i] = acor.acor(self._chain[:,i],k)[0]
return t
def posterior(self, x):
"""
Posterior density
** not normalized **
This is used to plot the theoretical curve for tests.
Inputs :
x :
input value
Outputs :
p : p(x)=pi(x)*exp{-||f(x)||^2/(2)}
posterior probability of x
"""
x = np.reshape(np.array(x), (-1,1))
chi_x, f_x, J_x = self._f(x)
if chi_x :
p = np.exp(-la.norm(f_x)**2/2.)
if self._prior:
m = self._m
H = self._H
p = p * np.exp(-np.dot((x-m).T,np.dot(H,x-m))/2.)
return p
else :
return 0
def error_bars(self, n_bins, d_min, d_max):
"""
Error Bars
create bars and error bars to plot
Inputs :
n_bins :
number of bins
plot_range : (shape) = (number of dimensions, 2)
matrix which contain the min and max for each dimension as rows
Outputs :
x :
domain
p_x :
estimated posterior using the chain on the domain
error :
estimated error for p_x
"""
# fetch data
chain = self._chain
len_chain = len(chain)
try:
n_dims = np.shape(chain)[1]
except:
n_dims = 1
# begin checks
try:
assert n_bins == int(n_bins)
except:
raise TypeError("number of bins has to be an integer")
d_min = np.reshape(np.array(d_min), (-1,1))
d_max = np.reshape(np.array(d_max), (-1,1))
try:
assert np.size(d_min) == n_dims
except:
raise TypeError("domain minimum has wrong size")
try:
assert np.size(d_max) == n_dims
except:
raise TypeError("domain maximum has wrong size")
# end checks
# initialize outputs
p_x = np.zeros(n_bins) # esitmate of posterior
error = np.zeros(n_bins) # error bars
x = np.zeros((n_dims, n_bins)) # centers of bins
# set dx
v = d_max-d_min
v_2 = np.dot(v.T, v)[0][0]
# bin count
for i in xrange(len_chain):
bin_no = int(np.floor(np.dot(chain[i].T-d_min,v)/v_2*n_bins)[0])
if n_bins > bin_no > -1:
p_x[bin_no] += 1.
# end count
dx = np.sqrt(v_2)/n_bins
p_x = p_x/(len_chain*dx)
# find error
for i in xrange(n_bins):
p = p_x[i]
error[i] = np.sqrt(p*(1./dx-p)/(len_chain))
x[:,i] = (d_min+v*(0.5+i)/n_bins)[0]
# end find
return x, p_x, error
# end error_bars
# internal methods
def _sample(self, n_samples):
"""
Sample
Generate samples for posterior distribution using Gauss-Newton
proposal parameters
Inputs :
n_samples :
number of samples to generate
Hidden Outputs :
chain :
chain of samples
n_samples :
length of chain
n_accepted :
number of proposals accepted
step_count :
count of the steps accepted
"""
try :
n_samples = int(n_samples)
except :
raise TypeError("number of samples has to be an integer")
exit()
# fetch info
X = self._proposal_params(self._X)
k_max = self._max_steps
# initialize
chain = np.zeros((n_samples, self._n))
n_accepted = 0
step_count = np.zeros(k_max+2)
# begin outer loop
for i in xrange(n_samples):
accepted = False # check if sample is accepted
r_ = [1] # list of step sizes
Z_ = [X] # initialize list of Z s
self._r_ = r_
log_P_z_x = 0. + X['log_p']
k = 0 # back-off steps taken so far
while k <= k_max:
# get proposal
chi_z = False
while not chi_z:
z = multi_normal(X, r_[-1])
chi_z, f_z, J_z = self._f(z)
Z = self._proposal_params({'x':z,'f':f_z,'J':J_z})
Z_.append(Z)
self._Z_ = Z_
log_P_z_x += log_K(Z, X, r_[-1])
# N is the Numerator of the acceptance, N = P_x_z
self._N_is_0 = False # check to see if N = 0, to use in _log_P
log_N = self._log_P(X, Z, k)
# calculating acceptance probability
if self._N_is_0 == True :
A_z_x = 0.
elif log_N >= log_P_z_x :
A_z_x = 1.
else :
A_z_x = np.exp(log_N - log_P_z_x)
# acceptance rejection
if np.random.rand() <= A_z_x:
accepted = True
break
else :
log_P_z_x += np.log(1. - A_z_x)
self._back_off()
k += 1
# end of steps for loop
if accepted == True :
chain[i,:] = z[:,0]
X = Z
# for statistics
n_accepted += 1
step_count[k+1] += 1
else :
chain[i,:] = X['x'][:,0]
# for statistics
step_count[0] += 1
# end outer loop
# update stored info
self._X = X
# outputs
if self._n_samples == 0 :
self._chain = chain
self._step_count = step_count
else :
self._chain = np.append(self._chain, chain, axis=0)
self._step_count = np.add(self._step_count, step_count)
self._n_samples += n_samples
self._n_accepted += n_accepted
# end sample
def _proposal_params(self, state):
"""
Proposal parameters
Calculate parameters needed for the proposal.
Inputs :
state :
x :
the present sample, the place to linearize around
f : f(x),
function value at x
J : f'(x),
the jacobian of the function evaluated at x
Outputs :
state :
mu :
the mean vector
L :
the lower triangular cholesky factor of P
log_p : log(p(x))
log of the posterior density
"""
x = state['x']
f = state['f']
J = state['J']
JJ = np.dot(J.T,J)
if self._prior:
m = self._m
H = self._H
Hm = self._Hm
# LL' = P = H+J'J
L = la.cholesky(H+JJ)
# mu = (P^-1)(Hm-J'f+J'Jx)
mu = la.solve(L.T,la.solve(L,Hm-np.dot(J.T,f)+np.dot(JJ,x)))
else:
# P = J'J
L = la.cholesky(JJ)
# mu = x-(P^-1)J'f
mu = x-la.solve(L.T,la.solve(L,np.dot(J.T,f)))
state['L'] = L
state['mu'] = mu
state['log_p'] = self._log_post(x,f)
return state
def _log_P(self, X , Z, k):
"""
Log of the probability of transition from z to x with k steps
log ( P_k (x, z) )
Inputs :
X :
state to be proposed to
Z :
state to be proposed from
k :
number of recursions, depth
"""
r_ = self._r_
Z_ = self._Z_
# zero case
if k == 0 :
log_P = Z['log_p'] + log_K(X, Z, r_[k])
# recursice case
else :
P_zk_z = np.exp( self._log_P(Z_[k], Z, k-1) )
P_z_zk = np.exp( self._log_P(Z, Z_[k], k-1) )
# flag
if P_zk_z <= P_z_zk :
self._N_is_0 = True
log_P = -np.inf
else :
log_P = np.log( P_zk_z - P_z_zk ) + log_K(X, Z, r_[k])
return log_P
def _back_off(self):
"""
Back off
Calculate the back off step size
Inputs :
Z_ :
list of states in current proposal
r_ :
list of back offs in current proposal
q :
step size reduction
dynamic :
set to True if you want to use the dynamic back-off
Outputs :
"""
q = self._step_size
r = self._r_[-1]
Z_ = self._Z_
if self._dynamic:
p_0 = la.norm(Z_[0]['f'])
dp_0 = p_0*2*la.norm(Z_[0]['J'])
p_r = la.norm(Z_[-1]['f'])
dp_r = p_0*2*la.norm(Z_[-1]['J'])
r_new = optimize(r, p_0**2, dp_0, p_r**2, dp_r)
else :
r_new = r * q
self._r_.append(r_new)
"""
Properties:
1. chain
2. n_samples
3. n_accepted
4. accept_rate
5. step_count
6. call_count
7. max_steps
8. step_size
"""
@property
def chain(self):
return self._chain
@property
def n_samples(self):
return self._n_samples
@property
def n_accepted(self):
return self._n_accepted
@property
def accept_rate(self):
return float(self._n_accepted)/self._n_samples
@property
def step_count(self):
return self._step_count
@property
def call_count(self):
return self._f.count
@property
def max_steps(self):
return self._max_steps
@property
def step_size(self):
return self._step_size |
akissa/sachannelupdate | sachannelupdate/base.py | getfiles | python | def getfiles(qfiles, dirname, names):
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname) | Get rule files in a directory | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L49-L56 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | deploy_file | python | def deploy_file(source, dest):
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close() | Deploy a file | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L59-L71 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | package | python | def package(dest, tardir, p_version):
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close() | Package files | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L74-L83 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | process | python | def process(dest, rulefiles):
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy | process rules | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L86-L103 | [
"def deploy_file(source, dest):\n \"\"\"Deploy a file\"\"\"\n date = datetime.utcnow().strftime('%Y-%m-%d')\n shandle = open(source)\n with open(dest, 'w') as handle:\n for line in shandle:\n if line == '# Updated: %date%\\n':\n newline = '# Updated: %s\\n' % date\n else:\n newline = line\n handle.write(newline)\n handle.flush()\n shandle.close()\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | get_counter | python | def get_counter(counterfile):
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num | Get the counter value | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L106-L116 | [
"def create_file(name, content):\n \"Generic to write file\"\n with open(name, 'w') as writefile:\n writefile.write(content)\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | update_dns | python | def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg) | Update the DNS record | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L119-L135 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | sign | python | def sign(config, s_filename):
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close() | sign the package | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L138-L152 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | hash_file | python | def hash_file(tar_filename):
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data) | hash the file | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L155-L164 | [
"def create_file(name, content):\n \"Generic to write file\"\n with open(name, 'w') as writefile:\n writefile.write(content)\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | upload | python | def upload(config, remote_loc, u_filename):
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode | Upload the files | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L167-L184 | [
"def get_sftp_conn(config):\n \"\"\"Make a SFTP connection, returns sftp client and connection objects\"\"\"\n remote = config.get('remote_location')\n parts = urlparse(remote)\n\n if ':' in parts.netloc:\n hostname, port = parts.netloc.split(':')\n else:\n hostname = parts.netloc\n port = 22\n port = int(port)\n\n username = config.get('remote_username') or getuser()\n luser = get_local_user(username)\n sshdir = get_ssh_dir(config, luser)\n hostkey = get_host_keys(hostname, sshdir)\n\n try:\n sftp = None\n keys = get_ssh_keys(sshdir)\n transport = Transport((hostname, port))\n while not keys.empty():\n try:\n key = PKey.from_private_key_file(keys.get())\n transport.connect(\n hostkey=hostkey,\n username=username,\n password=None,\n pkey=key)\n sftp = SFTPClient.from_transport(transport)\n break\n except (PasswordRequiredException, SSHException):\n pass\n if sftp is None:\n raise SaChannelUpdateTransportError(\"SFTP connection failed\")\n return sftp, transport\n except BaseException as msg:\n raise SaChannelUpdateTransportError(msg)\n",
"def get_remote_path(remote_location):\n \"\"\"Get the remote path from the remote location\"\"\"\n parts = urlparse(remote_location)\n return parts.path\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | queue_files | python | def queue_files(dirpath, queue):
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename)) | Add files in a directory to a queue | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L187-L193 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | get_cf_files | python | def get_cf_files(path, queue):
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname) | Get rule files in a directory and put them in a queue | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L196-L205 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | cleanup | python | def cleanup(dest, tardir, counterfile):
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile) | Remove existing rules | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L208-L221 | [
"def info(msg):\n \"\"\"print to stdout\"\"\"\n print(msg, file=sys.stdout)\n",
"def queue_files(dirpath, queue):\n \"\"\"Add files in a directory to a queue\"\"\"\n for root, _, files in os.walk(os.path.abspath(dirpath)):\n if not files:\n continue\n for filename in files:\n queue.put(os.path.join(root, filename))\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | check_required | python | def check_required(config):
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required") | Validate the input | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L224-L231 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def entry(config, delete_files=None):
"""Main function"""
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version)
|
akissa/sachannelupdate | sachannelupdate/base.py | entry | python | def entry(config, delete_files=None):
home_dir = config.get('home_dir', '/var/lib/sachannelupdate')
dns_ver = config.get('spamassassin_version', '1.4.3')
remote_loc = config.get('remote_location')
rule_dir = os.path.join(home_dir, 'rules')
dest = os.path.join(home_dir, 'deploy')
tardir = os.path.join(home_dir, 'archives')
counterfile = os.path.join(home_dir, 'db', 'counters')
check_required(config)
if delete_files:
cleanup(dest, tardir, counterfile)
return
cffiles = Queue()
get_cf_files(rule_dir, cffiles)
if process(dest, cffiles):
version = get_counter(counterfile)
filename = '%s.tar.gz' % version
path = os.path.join(tardir, filename)
package(dest, tardir, version)
sign(config, path)
hash_file(path)
if upload(config, remote_loc, path):
if update_dns(config, str(version), dns_ver):
create_file(counterfile, "%d" % version) | Main function | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/base.py#L234-L262 | [
"def process(dest, rulefiles):\n \"\"\"process rules\"\"\"\n deploy = False\n while not rulefiles.empty():\n rulefile = rulefiles.get()\n base = os.path.basename(rulefile)\n dest = os.path.join(dest, base)\n if os.path.exists(dest):\n # check if older\n oldtime = os.stat(rulefile).st_mtime\n newtime = os.stat(dest).st_mtime\n if oldtime > newtime:\n deploy = True\n deploy_file(rulefile, dest)\n else:\n deploy = True\n deploy_file(rulefile, dest)\n return deploy\n",
"def cleanup(dest, tardir, counterfile):\n \"\"\"Remove existing rules\"\"\"\n thefiles = Queue()\n # dest directory files\n queue_files(dest, thefiles)\n # tar directory files\n queue_files(tardir, thefiles)\n while not thefiles.empty():\n d_file = thefiles.get()\n info(\"Deleting file: %s\" % d_file)\n os.unlink(d_file)\n if os.path.exists(counterfile):\n info(\"Deleting the counter file %s\" % counterfile)\n os.unlink(counterfile)\n",
"def upload(config, remote_loc, u_filename):\n \"\"\"Upload the files\"\"\"\n rcode = False\n try:\n sftp, transport = get_sftp_conn(config)\n remote_dir = get_remote_path(remote_loc)\n for part in ['sha1', 'asc']:\n local_file = '%s.%s' % (u_filename, part)\n remote_file = os.path.join(remote_dir, local_file)\n sftp.put(local_file, remote_file)\n sftp.put(remote_dir, os.path.join(remote_dir, u_filename))\n rcode = True\n except BaseException:\n pass\n finally:\n if 'transport' in locals():\n transport.close()\n return rcode\n",
"def package(dest, tardir, p_version):\n \"\"\"Package files\"\"\"\n os.chdir(dest)\n p_filename = '%s.tar.gz' % p_version\n p_path = os.path.join(tardir, p_filename)\n tar = tarfile.open(p_path, mode='w:gz')\n for cf_file in os.listdir('.'):\n if os.path.isfile(cf_file):\n tar.add(cf_file)\n tar.close()\n",
"def sign(config, s_filename):\n \"\"\"sign the package\"\"\"\n gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')\n gpg_pass = config.get('gpg_passphrase')\n gpg_keyid = config.get('gpg_keyid')\n gpg = GPG(gnupghome=gpg_home)\n try:\n plaintext = open(s_filename, 'rb')\n signature = gpg.sign_file(\n plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)\n with open('%s.asc' % s_filename, 'wb') as handle:\n handle.write(str(signature))\n finally:\n if 'plaintext' in locals():\n plaintext.close()\n",
"def create_file(name, content):\n \"Generic to write file\"\n with open(name, 'w') as writefile:\n writefile.write(content)\n",
"def get_counter(counterfile):\n \"\"\"Get the counter value\"\"\"\n try:\n version_num = open(counterfile).read()\n version_num = int(version_num) + 1\n except (ValueError, IOError):\n version_num = 1\n create_file(counterfile, \"%d\" % version_num)\n except BaseException as msg:\n raise SaChannelUpdateError(msg)\n return version_num\n",
"def update_dns(config, record, sa_version):\n \"Update the DNS record\"\n try:\n domain = config.get('domain_name', 'sa.baruwa.com.')\n dns_key = config.get('domain_key')\n dns_ip = config.get('domain_ip', '127.0.0.1')\n keyring = tsigkeyring.from_text({domain: dns_key})\n transaction = update.Update(\n domain,\n keyring=keyring,\n keyalgorithm=tsig.HMAC_SHA512)\n txtrecord = '%s.%s' % (sa_version, domain)\n transaction.replace(txtrecord, 120, 'txt', record)\n query.tcp(transaction, dns_ip)\n return True\n except DNSException, msg:\n raise SaChannelUpdateDNSError(msg)\n",
"def hash_file(tar_filename):\n \"\"\"hash the file\"\"\"\n hasher = sha1()\n with open(tar_filename, 'rb') as afile:\n buf = afile.read(BLOCKSIZE)\n while len(buf) > 0:\n hasher.update(buf)\n buf = afile.read(BLOCKSIZE)\n data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))\n create_file('%s.sha1' % tar_filename, data)\n",
"def get_cf_files(path, queue):\n \"\"\"Get rule files in a directory and put them in a queue\"\"\"\n for root, _, files in os.walk(os.path.abspath(path)):\n if not files:\n continue\n for filename in files:\n fullname = os.path.join(root, filename)\n if os.path.isfile(fullname) and fullname.endswith('.cf') or \\\n fullname.endswith('.post'):\n queue.put(fullname)\n",
"def check_required(config):\n \"\"\"Validate the input\"\"\"\n if config.get('domain_key') is None:\n raise CfgError(\"The domain_key option is required\")\n if config.get('remote_loc') is None:\n raise CfgError(\"The remote_location option is required\")\n if config.get('gpg_keyid') is None:\n raise CfgError(\"The gpg_keyid option is required\")\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Utility for pushing updates to Spamassassin update channels
Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
"""
import os
import tarfile
import datetime
from Queue import Queue
from hashlib import sha1
from datetime import datetime
from gnupg import GPG
from dns.exception import DNSException
from dns import tsig, query, tsigkeyring, update
from sachannelupdate.utils import info
from sachannelupdate.exceptions import SaChannelUpdateConfigError \
as CfgError, SaChannelUpdateDNSError, SaChannelUpdateError
from sachannelupdate.transports import get_sftp_conn, get_remote_path
BLOCKSIZE = 65536
HASHTMPL = """%s %s\n"""
def create_file(name, content):
"Generic to write file"
with open(name, 'w') as writefile:
writefile.write(content)
def getfiles(qfiles, dirname, names):
"""Get rule files in a directory"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('.cf') or \
fullname.endswith('.post'):
qfiles.put(fullname)
def deploy_file(source, dest):
"""Deploy a file"""
date = datetime.utcnow().strftime('%Y-%m-%d')
shandle = open(source)
with open(dest, 'w') as handle:
for line in shandle:
if line == '# Updated: %date%\n':
newline = '# Updated: %s\n' % date
else:
newline = line
handle.write(newline)
handle.flush()
shandle.close()
def package(dest, tardir, p_version):
"""Package files"""
os.chdir(dest)
p_filename = '%s.tar.gz' % p_version
p_path = os.path.join(tardir, p_filename)
tar = tarfile.open(p_path, mode='w:gz')
for cf_file in os.listdir('.'):
if os.path.isfile(cf_file):
tar.add(cf_file)
tar.close()
def process(dest, rulefiles):
"""process rules"""
deploy = False
while not rulefiles.empty():
rulefile = rulefiles.get()
base = os.path.basename(rulefile)
dest = os.path.join(dest, base)
if os.path.exists(dest):
# check if older
oldtime = os.stat(rulefile).st_mtime
newtime = os.stat(dest).st_mtime
if oldtime > newtime:
deploy = True
deploy_file(rulefile, dest)
else:
deploy = True
deploy_file(rulefile, dest)
return deploy
def get_counter(counterfile):
"""Get the counter value"""
try:
version_num = open(counterfile).read()
version_num = int(version_num) + 1
except (ValueError, IOError):
version_num = 1
create_file(counterfile, "%d" % version_num)
except BaseException as msg:
raise SaChannelUpdateError(msg)
return version_num
def update_dns(config, record, sa_version):
"Update the DNS record"
try:
domain = config.get('domain_name', 'sa.baruwa.com.')
dns_key = config.get('domain_key')
dns_ip = config.get('domain_ip', '127.0.0.1')
keyring = tsigkeyring.from_text({domain: dns_key})
transaction = update.Update(
domain,
keyring=keyring,
keyalgorithm=tsig.HMAC_SHA512)
txtrecord = '%s.%s' % (sa_version, domain)
transaction.replace(txtrecord, 120, 'txt', record)
query.tcp(transaction, dns_ip)
return True
except DNSException, msg:
raise SaChannelUpdateDNSError(msg)
def sign(config, s_filename):
"""sign the package"""
gpg_home = config.get('gpg_dir', '/var/lib/sachannelupdate/gnupg')
gpg_pass = config.get('gpg_passphrase')
gpg_keyid = config.get('gpg_keyid')
gpg = GPG(gnupghome=gpg_home)
try:
plaintext = open(s_filename, 'rb')
signature = gpg.sign_file(
plaintext, keyid=gpg_keyid, passphrase=gpg_pass, detach=True)
with open('%s.asc' % s_filename, 'wb') as handle:
handle.write(str(signature))
finally:
if 'plaintext' in locals():
plaintext.close()
def hash_file(tar_filename):
"""hash the file"""
hasher = sha1()
with open(tar_filename, 'rb') as afile:
buf = afile.read(BLOCKSIZE)
while len(buf) > 0:
hasher.update(buf)
buf = afile.read(BLOCKSIZE)
data = HASHTMPL % (hasher.hexdigest(), os.path.basename(tar_filename))
create_file('%s.sha1' % tar_filename, data)
def upload(config, remote_loc, u_filename):
"""Upload the files"""
rcode = False
try:
sftp, transport = get_sftp_conn(config)
remote_dir = get_remote_path(remote_loc)
for part in ['sha1', 'asc']:
local_file = '%s.%s' % (u_filename, part)
remote_file = os.path.join(remote_dir, local_file)
sftp.put(local_file, remote_file)
sftp.put(remote_dir, os.path.join(remote_dir, u_filename))
rcode = True
except BaseException:
pass
finally:
if 'transport' in locals():
transport.close()
return rcode
def queue_files(dirpath, queue):
"""Add files in a directory to a queue"""
for root, _, files in os.walk(os.path.abspath(dirpath)):
if not files:
continue
for filename in files:
queue.put(os.path.join(root, filename))
def get_cf_files(path, queue):
"""Get rule files in a directory and put them in a queue"""
for root, _, files in os.walk(os.path.abspath(path)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if os.path.isfile(fullname) and fullname.endswith('.cf') or \
fullname.endswith('.post'):
queue.put(fullname)
def cleanup(dest, tardir, counterfile):
"""Remove existing rules"""
thefiles = Queue()
# dest directory files
queue_files(dest, thefiles)
# tar directory files
queue_files(tardir, thefiles)
while not thefiles.empty():
d_file = thefiles.get()
info("Deleting file: %s" % d_file)
os.unlink(d_file)
if os.path.exists(counterfile):
info("Deleting the counter file %s" % counterfile)
os.unlink(counterfile)
def check_required(config):
"""Validate the input"""
if config.get('domain_key') is None:
raise CfgError("The domain_key option is required")
if config.get('remote_loc') is None:
raise CfgError("The remote_location option is required")
if config.get('gpg_keyid') is None:
raise CfgError("The gpg_keyid option is required")
|
akissa/sachannelupdate | sachannelupdate/cli.py | main | python | def main():
parser = OptionParser()
parser.add_option(
'-c', '--config',
help='configuration file',
dest='filename',
type='str',
default='/etc/sachannelupdate/sachannelupdate.ini')
parser.add_option(
'-d', '--delete',
help='Deletes existing rules',
dest='cleanup',
action="store_true",
default=False,)
options, _ = parser.parse_args()
if not os.path.isfile(options.filename):
raise SaChannelUpdateConfigError(
"The configuration file: %s does not exist" % options.filename)
config = ConfigParser()
config.read(options.filename)
try:
# pylint: disable=protected-access
entry(config._sections['settings'], options.cleanup)
except BaseException as msg:
error(msg) | Main function | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/cli.py#L29-L55 | [
"def error(msg):\n \"\"\"print to stderr\"\"\"\n print(msg, file=sys.stderr)\n",
"def entry(config, delete_files=None):\n \"\"\"Main function\"\"\"\n home_dir = config.get('home_dir', '/var/lib/sachannelupdate')\n dns_ver = config.get('spamassassin_version', '1.4.3')\n remote_loc = config.get('remote_location')\n rule_dir = os.path.join(home_dir, 'rules')\n dest = os.path.join(home_dir, 'deploy')\n tardir = os.path.join(home_dir, 'archives')\n counterfile = os.path.join(home_dir, 'db', 'counters')\n\n check_required(config)\n\n if delete_files:\n cleanup(dest, tardir, counterfile)\n return\n\n cffiles = Queue()\n get_cf_files(rule_dir, cffiles)\n\n if process(dest, cffiles):\n version = get_counter(counterfile)\n filename = '%s.tar.gz' % version\n path = os.path.join(tardir, filename)\n package(dest, tardir, version)\n sign(config, path)\n hash_file(path)\n if upload(config, remote_loc, path):\n if update_dns(config, str(version), dns_ver):\n create_file(counterfile, \"%d\" % version)\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: CLI functions
"""
import os
from optparse import OptionParser
from ConfigParser import ConfigParser
from sachannelupdate import entry, error, SaChannelUpdateConfigError
|
akissa/sachannelupdate | sachannelupdate/transports.py | get_key_files | python | def get_key_files(kfiles, dirname, names):
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('_rsa') or \
fullname.endswith('_dsa'):
kfiles.put(fullname) | Return key files | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/transports.py#L35-L42 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Transports
"""
import os
from Queue import Queue
from pwd import getpwnam
from getpass import getuser
from urlparse import urlparse
from paramiko.util import load_host_keys
from paramiko import Transport, SFTPClient, PKey, PasswordRequiredException, \
SSHException
from sachannelupdate.exceptions import SaChannelUpdateTransportError
def get_ssh_keys(sshdir):
"""Get SSH keys"""
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_rsa') or
fullname.endswith('_dsa')):
keys.put(fullname)
return keys
def get_remote_path(remote_location):
"""Get the remote path from the remote location"""
parts = urlparse(remote_location)
return parts.path
def get_ssh_dir(config, username):
"""Get the users ssh dir"""
sshdir = config.get('ssh_config_dir')
if not sshdir:
sshdir = os.path.expanduser('~/.ssh')
if not os.path.isdir(sshdir):
pwentry = getpwnam(username)
sshdir = os.path.join(pwentry.pw_dir, '.ssh')
if not os.path.isdir(sshdir):
sshdir = None
return sshdir
def get_local_user(username):
"""Get the local username"""
try:
_ = getpwnam(username)
luser = username
except KeyError:
luser = getuser()
return luser
def get_host_keys(hostname, sshdir):
"""get host key"""
hostkey = None
try:
host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))
except IOError:
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
return hostkey
def get_sftp_conn(config):
"""Make a SFTP connection, returns sftp client and connection objects"""
remote = config.get('remote_location')
parts = urlparse(remote)
if ':' in parts.netloc:
hostname, port = parts.netloc.split(':')
else:
hostname = parts.netloc
port = 22
port = int(port)
username = config.get('remote_username') or getuser()
luser = get_local_user(username)
sshdir = get_ssh_dir(config, luser)
hostkey = get_host_keys(hostname, sshdir)
try:
sftp = None
keys = get_ssh_keys(sshdir)
transport = Transport((hostname, port))
while not keys.empty():
try:
key = PKey.from_private_key_file(keys.get())
transport.connect(
hostkey=hostkey,
username=username,
password=None,
pkey=key)
sftp = SFTPClient.from_transport(transport)
break
except (PasswordRequiredException, SSHException):
pass
if sftp is None:
raise SaChannelUpdateTransportError("SFTP connection failed")
return sftp, transport
except BaseException as msg:
raise SaChannelUpdateTransportError(msg)
|
akissa/sachannelupdate | sachannelupdate/transports.py | get_ssh_keys | python | def get_ssh_keys(sshdir):
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_rsa') or
fullname.endswith('_dsa')):
keys.put(fullname)
return keys | Get SSH keys | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/transports.py#L45-L56 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Transports
"""
import os
from Queue import Queue
from pwd import getpwnam
from getpass import getuser
from urlparse import urlparse
from paramiko.util import load_host_keys
from paramiko import Transport, SFTPClient, PKey, PasswordRequiredException, \
SSHException
from sachannelupdate.exceptions import SaChannelUpdateTransportError
def get_key_files(kfiles, dirname, names):
"""Return key files"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('_rsa') or \
fullname.endswith('_dsa'):
kfiles.put(fullname)
def get_remote_path(remote_location):
"""Get the remote path from the remote location"""
parts = urlparse(remote_location)
return parts.path
def get_ssh_dir(config, username):
"""Get the users ssh dir"""
sshdir = config.get('ssh_config_dir')
if not sshdir:
sshdir = os.path.expanduser('~/.ssh')
if not os.path.isdir(sshdir):
pwentry = getpwnam(username)
sshdir = os.path.join(pwentry.pw_dir, '.ssh')
if not os.path.isdir(sshdir):
sshdir = None
return sshdir
def get_local_user(username):
"""Get the local username"""
try:
_ = getpwnam(username)
luser = username
except KeyError:
luser = getuser()
return luser
def get_host_keys(hostname, sshdir):
"""get host key"""
hostkey = None
try:
host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))
except IOError:
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
return hostkey
def get_sftp_conn(config):
"""Make a SFTP connection, returns sftp client and connection objects"""
remote = config.get('remote_location')
parts = urlparse(remote)
if ':' in parts.netloc:
hostname, port = parts.netloc.split(':')
else:
hostname = parts.netloc
port = 22
port = int(port)
username = config.get('remote_username') or getuser()
luser = get_local_user(username)
sshdir = get_ssh_dir(config, luser)
hostkey = get_host_keys(hostname, sshdir)
try:
sftp = None
keys = get_ssh_keys(sshdir)
transport = Transport((hostname, port))
while not keys.empty():
try:
key = PKey.from_private_key_file(keys.get())
transport.connect(
hostkey=hostkey,
username=username,
password=None,
pkey=key)
sftp = SFTPClient.from_transport(transport)
break
except (PasswordRequiredException, SSHException):
pass
if sftp is None:
raise SaChannelUpdateTransportError("SFTP connection failed")
return sftp, transport
except BaseException as msg:
raise SaChannelUpdateTransportError(msg)
|
akissa/sachannelupdate | sachannelupdate/transports.py | get_ssh_dir | python | def get_ssh_dir(config, username):
sshdir = config.get('ssh_config_dir')
if not sshdir:
sshdir = os.path.expanduser('~/.ssh')
if not os.path.isdir(sshdir):
pwentry = getpwnam(username)
sshdir = os.path.join(pwentry.pw_dir, '.ssh')
if not os.path.isdir(sshdir):
sshdir = None
return sshdir | Get the users ssh dir | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/transports.py#L65-L75 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Transports
"""
import os
from Queue import Queue
from pwd import getpwnam
from getpass import getuser
from urlparse import urlparse
from paramiko.util import load_host_keys
from paramiko import Transport, SFTPClient, PKey, PasswordRequiredException, \
SSHException
from sachannelupdate.exceptions import SaChannelUpdateTransportError
def get_key_files(kfiles, dirname, names):
"""Return key files"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('_rsa') or \
fullname.endswith('_dsa'):
kfiles.put(fullname)
def get_ssh_keys(sshdir):
"""Get SSH keys"""
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_rsa') or
fullname.endswith('_dsa')):
keys.put(fullname)
return keys
def get_remote_path(remote_location):
"""Get the remote path from the remote location"""
parts = urlparse(remote_location)
return parts.path
def get_local_user(username):
"""Get the local username"""
try:
_ = getpwnam(username)
luser = username
except KeyError:
luser = getuser()
return luser
def get_host_keys(hostname, sshdir):
"""get host key"""
hostkey = None
try:
host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))
except IOError:
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
return hostkey
def get_sftp_conn(config):
"""Make a SFTP connection, returns sftp client and connection objects"""
remote = config.get('remote_location')
parts = urlparse(remote)
if ':' in parts.netloc:
hostname, port = parts.netloc.split(':')
else:
hostname = parts.netloc
port = 22
port = int(port)
username = config.get('remote_username') or getuser()
luser = get_local_user(username)
sshdir = get_ssh_dir(config, luser)
hostkey = get_host_keys(hostname, sshdir)
try:
sftp = None
keys = get_ssh_keys(sshdir)
transport = Transport((hostname, port))
while not keys.empty():
try:
key = PKey.from_private_key_file(keys.get())
transport.connect(
hostkey=hostkey,
username=username,
password=None,
pkey=key)
sftp = SFTPClient.from_transport(transport)
break
except (PasswordRequiredException, SSHException):
pass
if sftp is None:
raise SaChannelUpdateTransportError("SFTP connection failed")
return sftp, transport
except BaseException as msg:
raise SaChannelUpdateTransportError(msg)
|
akissa/sachannelupdate | sachannelupdate/transports.py | get_local_user | python | def get_local_user(username):
try:
_ = getpwnam(username)
luser = username
except KeyError:
luser = getuser()
return luser | Get the local username | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/transports.py#L78-L85 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Transports
"""
import os
from Queue import Queue
from pwd import getpwnam
from getpass import getuser
from urlparse import urlparse
from paramiko.util import load_host_keys
from paramiko import Transport, SFTPClient, PKey, PasswordRequiredException, \
SSHException
from sachannelupdate.exceptions import SaChannelUpdateTransportError
def get_key_files(kfiles, dirname, names):
"""Return key files"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('_rsa') or \
fullname.endswith('_dsa'):
kfiles.put(fullname)
def get_ssh_keys(sshdir):
"""Get SSH keys"""
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_rsa') or
fullname.endswith('_dsa')):
keys.put(fullname)
return keys
def get_remote_path(remote_location):
"""Get the remote path from the remote location"""
parts = urlparse(remote_location)
return parts.path
def get_ssh_dir(config, username):
"""Get the users ssh dir"""
sshdir = config.get('ssh_config_dir')
if not sshdir:
sshdir = os.path.expanduser('~/.ssh')
if not os.path.isdir(sshdir):
pwentry = getpwnam(username)
sshdir = os.path.join(pwentry.pw_dir, '.ssh')
if not os.path.isdir(sshdir):
sshdir = None
return sshdir
def get_host_keys(hostname, sshdir):
"""get host key"""
hostkey = None
try:
host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))
except IOError:
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
return hostkey
def get_sftp_conn(config):
"""Make a SFTP connection, returns sftp client and connection objects"""
remote = config.get('remote_location')
parts = urlparse(remote)
if ':' in parts.netloc:
hostname, port = parts.netloc.split(':')
else:
hostname = parts.netloc
port = 22
port = int(port)
username = config.get('remote_username') or getuser()
luser = get_local_user(username)
sshdir = get_ssh_dir(config, luser)
hostkey = get_host_keys(hostname, sshdir)
try:
sftp = None
keys = get_ssh_keys(sshdir)
transport = Transport((hostname, port))
while not keys.empty():
try:
key = PKey.from_private_key_file(keys.get())
transport.connect(
hostkey=hostkey,
username=username,
password=None,
pkey=key)
sftp = SFTPClient.from_transport(transport)
break
except (PasswordRequiredException, SSHException):
pass
if sftp is None:
raise SaChannelUpdateTransportError("SFTP connection failed")
return sftp, transport
except BaseException as msg:
raise SaChannelUpdateTransportError(msg)
|
akissa/sachannelupdate | sachannelupdate/transports.py | get_host_keys | python | def get_host_keys(hostname, sshdir):
hostkey = None
try:
host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))
except IOError:
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
return hostkey | get host key | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/transports.py#L88-L101 | null | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Transports
"""
import os
from Queue import Queue
from pwd import getpwnam
from getpass import getuser
from urlparse import urlparse
from paramiko.util import load_host_keys
from paramiko import Transport, SFTPClient, PKey, PasswordRequiredException, \
SSHException
from sachannelupdate.exceptions import SaChannelUpdateTransportError
def get_key_files(kfiles, dirname, names):
"""Return key files"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('_rsa') or \
fullname.endswith('_dsa'):
kfiles.put(fullname)
def get_ssh_keys(sshdir):
"""Get SSH keys"""
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_rsa') or
fullname.endswith('_dsa')):
keys.put(fullname)
return keys
def get_remote_path(remote_location):
"""Get the remote path from the remote location"""
parts = urlparse(remote_location)
return parts.path
def get_ssh_dir(config, username):
"""Get the users ssh dir"""
sshdir = config.get('ssh_config_dir')
if not sshdir:
sshdir = os.path.expanduser('~/.ssh')
if not os.path.isdir(sshdir):
pwentry = getpwnam(username)
sshdir = os.path.join(pwentry.pw_dir, '.ssh')
if not os.path.isdir(sshdir):
sshdir = None
return sshdir
def get_local_user(username):
"""Get the local username"""
try:
_ = getpwnam(username)
luser = username
except KeyError:
luser = getuser()
return luser
def get_sftp_conn(config):
"""Make a SFTP connection, returns sftp client and connection objects"""
remote = config.get('remote_location')
parts = urlparse(remote)
if ':' in parts.netloc:
hostname, port = parts.netloc.split(':')
else:
hostname = parts.netloc
port = 22
port = int(port)
username = config.get('remote_username') or getuser()
luser = get_local_user(username)
sshdir = get_ssh_dir(config, luser)
hostkey = get_host_keys(hostname, sshdir)
try:
sftp = None
keys = get_ssh_keys(sshdir)
transport = Transport((hostname, port))
while not keys.empty():
try:
key = PKey.from_private_key_file(keys.get())
transport.connect(
hostkey=hostkey,
username=username,
password=None,
pkey=key)
sftp = SFTPClient.from_transport(transport)
break
except (PasswordRequiredException, SSHException):
pass
if sftp is None:
raise SaChannelUpdateTransportError("SFTP connection failed")
return sftp, transport
except BaseException as msg:
raise SaChannelUpdateTransportError(msg)
|
akissa/sachannelupdate | sachannelupdate/transports.py | get_sftp_conn | python | def get_sftp_conn(config):
remote = config.get('remote_location')
parts = urlparse(remote)
if ':' in parts.netloc:
hostname, port = parts.netloc.split(':')
else:
hostname = parts.netloc
port = 22
port = int(port)
username = config.get('remote_username') or getuser()
luser = get_local_user(username)
sshdir = get_ssh_dir(config, luser)
hostkey = get_host_keys(hostname, sshdir)
try:
sftp = None
keys = get_ssh_keys(sshdir)
transport = Transport((hostname, port))
while not keys.empty():
try:
key = PKey.from_private_key_file(keys.get())
transport.connect(
hostkey=hostkey,
username=username,
password=None,
pkey=key)
sftp = SFTPClient.from_transport(transport)
break
except (PasswordRequiredException, SSHException):
pass
if sftp is None:
raise SaChannelUpdateTransportError("SFTP connection failed")
return sftp, transport
except BaseException as msg:
raise SaChannelUpdateTransportError(msg) | Make a SFTP connection, returns sftp client and connection objects | train | https://github.com/akissa/sachannelupdate/blob/a1c3c3d86b874f9c92c2407e2608963165d3ae98/sachannelupdate/transports.py#L104-L141 | [
"def get_ssh_keys(sshdir):\n \"\"\"Get SSH keys\"\"\"\n keys = Queue()\n for root, _, files in os.walk(os.path.abspath(sshdir)):\n if not files:\n continue\n for filename in files:\n fullname = os.path.join(root, filename)\n if (os.path.isfile(fullname) and fullname.endswith('_rsa') or\n fullname.endswith('_dsa')):\n keys.put(fullname)\n return keys\n",
"def get_ssh_dir(config, username):\n \"\"\"Get the users ssh dir\"\"\"\n sshdir = config.get('ssh_config_dir')\n if not sshdir:\n sshdir = os.path.expanduser('~/.ssh')\n if not os.path.isdir(sshdir):\n pwentry = getpwnam(username)\n sshdir = os.path.join(pwentry.pw_dir, '.ssh')\n if not os.path.isdir(sshdir):\n sshdir = None\n return sshdir\n",
"def get_local_user(username):\n \"\"\"Get the local username\"\"\"\n try:\n _ = getpwnam(username)\n luser = username\n except KeyError:\n luser = getuser()\n return luser\n",
"def get_host_keys(hostname, sshdir):\n \"\"\"get host key\"\"\"\n hostkey = None\n\n try:\n host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))\n except IOError:\n host_keys = {}\n\n if hostname in host_keys:\n hostkeytype = host_keys[hostname].keys()[0]\n hostkey = host_keys[hostname][hostkeytype]\n\n return hostkey\n"
] | # -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4
# sachannelupdate - Utility for pushing updates to Spamassassin update channels
# Copyright (C) 2015 Andrew Colin Kissa <andrew@topdog.za.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
sachannelupdate: Transports
"""
import os
from Queue import Queue
from pwd import getpwnam
from getpass import getuser
from urlparse import urlparse
from paramiko.util import load_host_keys
from paramiko import Transport, SFTPClient, PKey, PasswordRequiredException, \
SSHException
from sachannelupdate.exceptions import SaChannelUpdateTransportError
def get_key_files(kfiles, dirname, names):
"""Return key files"""
for name in names:
fullname = os.path.join(dirname, name)
if os.path.isfile(fullname) and \
fullname.endswith('_rsa') or \
fullname.endswith('_dsa'):
kfiles.put(fullname)
def get_ssh_keys(sshdir):
"""Get SSH keys"""
keys = Queue()
for root, _, files in os.walk(os.path.abspath(sshdir)):
if not files:
continue
for filename in files:
fullname = os.path.join(root, filename)
if (os.path.isfile(fullname) and fullname.endswith('_rsa') or
fullname.endswith('_dsa')):
keys.put(fullname)
return keys
def get_remote_path(remote_location):
"""Get the remote path from the remote location"""
parts = urlparse(remote_location)
return parts.path
def get_ssh_dir(config, username):
"""Get the users ssh dir"""
sshdir = config.get('ssh_config_dir')
if not sshdir:
sshdir = os.path.expanduser('~/.ssh')
if not os.path.isdir(sshdir):
pwentry = getpwnam(username)
sshdir = os.path.join(pwentry.pw_dir, '.ssh')
if not os.path.isdir(sshdir):
sshdir = None
return sshdir
def get_local_user(username):
"""Get the local username"""
try:
_ = getpwnam(username)
luser = username
except KeyError:
luser = getuser()
return luser
def get_host_keys(hostname, sshdir):
"""get host key"""
hostkey = None
try:
host_keys = load_host_keys(os.path.join(sshdir, 'known_hosts'))
except IOError:
host_keys = {}
if hostname in host_keys:
hostkeytype = host_keys[hostname].keys()[0]
hostkey = host_keys[hostname][hostkeytype]
return hostkey
|
synw/goerr | goerr/messages.py | Msg.fatal | python | def fatal(self, i: int=None) -> str:
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head | Returns a fatal error message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/messages.py#L9-L16 | [
"def red(self, *msg):\n color = '\\033[91m'\n return self._msg(color, *msg)\n"
] | class Msg():
"""
Class to handle the messages
"""
def error(self, i: int=None) -> str:
"""
Returns an error message
"""
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def warning(self, i: int=None) -> str:
"""
Returns a warning message
"""
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head
def info(self, i: int=None) -> str:
"""
Returns an info message
"""
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head
def via(self, i: int=None) -> str:
"""
Returns an via message
"""
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head
def debug(self, i: int=None) -> str:
"""
Returns a debug message
"""
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head
|
synw/goerr | goerr/messages.py | Msg.error | python | def error(self, i: int=None) -> str:
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head | Returns an error message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/messages.py#L18-L25 | [
"def red(self, *msg):\n color = '\\033[91m'\n return self._msg(color, *msg)\n"
] | class Msg():
"""
Class to handle the messages
"""
def fatal(self, i: int=None) -> str:
"""
Returns a fatal error message
"""
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def warning(self, i: int=None) -> str:
"""
Returns a warning message
"""
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head
def info(self, i: int=None) -> str:
"""
Returns an info message
"""
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head
def via(self, i: int=None) -> str:
"""
Returns an via message
"""
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head
def debug(self, i: int=None) -> str:
"""
Returns a debug message
"""
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head
|
synw/goerr | goerr/messages.py | Msg.warning | python | def warning(self, i: int=None) -> str:
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head | Returns a warning message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/messages.py#L27-L34 | [
"def purple(self, *msg):\n color = '\\033[95m'\n return self._msg(color, *msg)\n"
] | class Msg():
"""
Class to handle the messages
"""
def fatal(self, i: int=None) -> str:
"""
Returns a fatal error message
"""
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def error(self, i: int=None) -> str:
"""
Returns an error message
"""
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def info(self, i: int=None) -> str:
"""
Returns an info message
"""
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head
def via(self, i: int=None) -> str:
"""
Returns an via message
"""
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head
def debug(self, i: int=None) -> str:
"""
Returns a debug message
"""
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head
|
synw/goerr | goerr/messages.py | Msg.info | python | def info(self, i: int=None) -> str:
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head | Returns an info message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/messages.py#L36-L43 | [
"def blue(self, *msg):\n color = '\\033[94m'\n return self._msg(color, *msg)\n"
] | class Msg():
"""
Class to handle the messages
"""
def fatal(self, i: int=None) -> str:
"""
Returns a fatal error message
"""
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def error(self, i: int=None) -> str:
"""
Returns an error message
"""
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def warning(self, i: int=None) -> str:
"""
Returns a warning message
"""
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head
def via(self, i: int=None) -> str:
"""
Returns an via message
"""
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head
def debug(self, i: int=None) -> str:
"""
Returns a debug message
"""
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head
|
synw/goerr | goerr/messages.py | Msg.via | python | def via(self, i: int=None) -> str:
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head | Returns an via message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/messages.py#L45-L52 | [
"def green(self, *msg):\n color = '\\033[92m'\n return self._msg(color, *msg)\n"
] | class Msg():
"""
Class to handle the messages
"""
def fatal(self, i: int=None) -> str:
"""
Returns a fatal error message
"""
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def error(self, i: int=None) -> str:
"""
Returns an error message
"""
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def warning(self, i: int=None) -> str:
"""
Returns a warning message
"""
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head
def info(self, i: int=None) -> str:
"""
Returns an info message
"""
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head
def debug(self, i: int=None) -> str:
"""
Returns a debug message
"""
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head
|
synw/goerr | goerr/messages.py | Msg.debug | python | def debug(self, i: int=None) -> str:
head = "[" + colors.yellow("debug") + "]"
if i is not None:
head = str(i) + " " + head
return head | Returns a debug message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/messages.py#L54-L61 | [
"def yellow(self, *msg):\n color = '\\033[93m'\n return self._msg(color, *msg)\n"
] | class Msg():
"""
Class to handle the messages
"""
def fatal(self, i: int=None) -> str:
"""
Returns a fatal error message
"""
head = "[" + colors.red("\033[1mfatal error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def error(self, i: int=None) -> str:
"""
Returns an error message
"""
head = "[" + colors.red("error") + "]"
if i is not None:
head = str(i) + " " + head
return head
def warning(self, i: int=None) -> str:
"""
Returns a warning message
"""
head = "[" + colors.purple("\033[1mwarning") + "]"
if i is not None:
head = str(i) + " " + head
return head
def info(self, i: int=None) -> str:
"""
Returns an info message
"""
head = "[" + colors.blue("info") + "]"
if i is not None:
head = str(i) + " " + head
return head
def via(self, i: int=None) -> str:
"""
Returns an via message
"""
head = "[" + colors.green("via") + "]"
if i is not None:
head = str(i) + " " + head
return head
|
synw/goerr | goerr/__init__.py | Err.panic | python | def panic(self, *args):
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) | Creates a fatal error and exit | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L59-L65 | [
"def _err(self, errclass: str=\"error\", *args) -> \"Err\":\n \"\"\"\n Creates an error\n \"\"\"\n error = self._new_err(errclass, *args)\n if self.log_errs is True:\n sep = \" \"\n if self.log_format == \"csv\":\n sep = \",\"\n msg = str(datetime.now()) + sep + \\\n self._errmsg(error, msgformat=self.log_format)\n self.logger.error(msg)\n print(self._errmsg(error))\n self._add(error)\n return error\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
# pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err.warning | python | def warning(self, *args) -> "Err":
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error | Creates a warning message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L67-L73 | [
"def _create_err(self, errclass: str, *args) -> \"Err\":\n \"\"\"\n Create an error\n \"\"\"\n error = self._new_err(errclass, *args)\n self._add(error)\n return error\n",
"def _errmsg(self, error: \"Err\", tb: bool=False, i: int=None,\n msgformat: str=\"terminal\") -> str:\n \"\"\"\n Get the error message\n \"\"\"\n if msgformat == \"terminal\":\n msg = self._headline(error, i)\n if error.ex is not None:\n msg += \"\\n\" + \"line \" + colors.bold(str(error.line))\n msg += \": \" + colors.yellow(error.code)\n msg += \"\\n\" + str(error.file)\n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += \"\\n\" + error.tb\n elif msgformat == \"csv\":\n sep = \",\"\n msg = error.msg + sep\n msg += str(error.line) + sep + error.code + sep\n msg += str(error.file)\n elif msgformat == \"text\":\n sep = \",\"\n msg = error.msg\n if error.ex is not None:\n msg += sep + str(error.line) + sep + error.code + sep\n msg += str(error.file) + sep \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += sep + error.tb\n elif msgformat == \"dict\":\n msg = {\"date\": datetime.now()}\n if error.ex is not None:\n msg[\"msg\"] = error.msg\n msg[\"line\"] = error.line\n msg[\"code\"] = error.code\n msg[\"file\"] = error.file \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg[\"traceback\"] = error.tb\n return msg\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err.info | python | def info(self, *args) -> "Err":
error = self._create_err("info", *args)
print(self._errmsg(error))
return error | Creates an info message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L75-L81 | [
"def _create_err(self, errclass: str, *args) -> \"Err\":\n \"\"\"\n Create an error\n \"\"\"\n error = self._new_err(errclass, *args)\n self._add(error)\n return error\n",
"def _errmsg(self, error: \"Err\", tb: bool=False, i: int=None,\n msgformat: str=\"terminal\") -> str:\n \"\"\"\n Get the error message\n \"\"\"\n if msgformat == \"terminal\":\n msg = self._headline(error, i)\n if error.ex is not None:\n msg += \"\\n\" + \"line \" + colors.bold(str(error.line))\n msg += \": \" + colors.yellow(error.code)\n msg += \"\\n\" + str(error.file)\n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += \"\\n\" + error.tb\n elif msgformat == \"csv\":\n sep = \",\"\n msg = error.msg + sep\n msg += str(error.line) + sep + error.code + sep\n msg += str(error.file)\n elif msgformat == \"text\":\n sep = \",\"\n msg = error.msg\n if error.ex is not None:\n msg += sep + str(error.line) + sep + error.code + sep\n msg += str(error.file) + sep \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += sep + error.tb\n elif msgformat == \"dict\":\n msg = {\"date\": datetime.now()}\n if error.ex is not None:\n msg[\"msg\"] = error.msg\n msg[\"line\"] = error.line\n msg[\"code\"] = error.code\n msg[\"file\"] = error.file \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg[\"traceback\"] = error.tb\n return msg\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err.debug | python | def debug(self, *args) -> "Err":
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error | Creates a debug message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L83-L89 | [
"def _create_err(self, errclass: str, *args) -> \"Err\":\n \"\"\"\n Create an error\n \"\"\"\n error = self._new_err(errclass, *args)\n self._add(error)\n return error\n",
"def _errmsg(self, error: \"Err\", tb: bool=False, i: int=None,\n msgformat: str=\"terminal\") -> str:\n \"\"\"\n Get the error message\n \"\"\"\n if msgformat == \"terminal\":\n msg = self._headline(error, i)\n if error.ex is not None:\n msg += \"\\n\" + \"line \" + colors.bold(str(error.line))\n msg += \": \" + colors.yellow(error.code)\n msg += \"\\n\" + str(error.file)\n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += \"\\n\" + error.tb\n elif msgformat == \"csv\":\n sep = \",\"\n msg = error.msg + sep\n msg += str(error.line) + sep + error.code + sep\n msg += str(error.file)\n elif msgformat == \"text\":\n sep = \",\"\n msg = error.msg\n if error.ex is not None:\n msg += sep + str(error.line) + sep + error.code + sep\n msg += str(error.file) + sep \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += sep + error.tb\n elif msgformat == \"dict\":\n msg = {\"date\": datetime.now()}\n if error.ex is not None:\n msg[\"msg\"] = error.msg\n msg[\"line\"] = error.line\n msg[\"code\"] = error.code\n msg[\"file\"] = error.file \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg[\"traceback\"] = error.tb\n return msg\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._create_err | python | def _create_err(self, errclass: str, *args) -> "Err":
error = self._new_err(errclass, *args)
self._add(error)
return error | Create an error | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L91-L97 | [
"def _new_err(self, errclass: str, *args) -> 'Err':\n \"\"\"\n Error constructor\n \"\"\"\n # get the message or exception\n ex, msg = self._get_args(*args)\n # construct the error\n # handle exception\n ftb = None # type: str\n function = None # type: str\n errtype = None # type: str\n file = None # type: str\n line = None # type: int\n code = None # type: str\n ex_msg = None # type: str\n caller = None # type: str\n caller_msg = None # type: str\n\n st = inspect.stack()\n\n if ex is not None:\n # get info from exception\n errobj, ex_msg, tb = sys.exc_info()\n tb = traceback.extract_tb(tb)\n file, line, function, code = tb[-1]\n # if called from an external lib\n if len(tb) > 1:\n file, line, caller, code = tb[0]\n else:\n call_stack = []\n for c in st:\n call_stack.append(c[3])\n caller = self._get_caller(call_stack, function)\n\n internals = [\n \"err\",\n \"_new_err\",\n \"fatal\",\n \"warning\",\n \"debug\",\n \"info\",\n \"<module>\"] \n if caller == function or caller in internals:\n caller = None\n # handle messages\n if msg is not None:\n caller_msg = msg\n msg = str(ex_msg)\n else:\n msg = str(ex_msg)\n ftb = traceback.format_exc()\n errtype = errobj.__name__\n if function is None:\n # for el in st:\n # print(el)\n function = st[3][3]\n if function == \"<module>\":\n function = None\n # init error object\n date = datetime.now()\n error = Err(\n function,\n date,\n msg,\n errtype,\n errclass,\n line,\n file,\n code,\n ftb,\n ex,\n caller,\n caller_msg)\n return error\n",
"def _add(self, error: \"Err\"):\n \"\"\"\n Adds an error to the trace if required\n \"\"\"\n if self.trace_errs is True:\n self.errors.append(error)\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._err | python | def _err(self, errclass: str="error", *args) -> "Err":
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error | Creates an error | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L99-L113 | [
"def _new_err(self, errclass: str, *args) -> 'Err':\n \"\"\"\n Error constructor\n \"\"\"\n # get the message or exception\n ex, msg = self._get_args(*args)\n # construct the error\n # handle exception\n ftb = None # type: str\n function = None # type: str\n errtype = None # type: str\n file = None # type: str\n line = None # type: int\n code = None # type: str\n ex_msg = None # type: str\n caller = None # type: str\n caller_msg = None # type: str\n\n st = inspect.stack()\n\n if ex is not None:\n # get info from exception\n errobj, ex_msg, tb = sys.exc_info()\n tb = traceback.extract_tb(tb)\n file, line, function, code = tb[-1]\n # if called from an external lib\n if len(tb) > 1:\n file, line, caller, code = tb[0]\n else:\n call_stack = []\n for c in st:\n call_stack.append(c[3])\n caller = self._get_caller(call_stack, function)\n\n internals = [\n \"err\",\n \"_new_err\",\n \"fatal\",\n \"warning\",\n \"debug\",\n \"info\",\n \"<module>\"] \n if caller == function or caller in internals:\n caller = None\n # handle messages\n if msg is not None:\n caller_msg = msg\n msg = str(ex_msg)\n else:\n msg = str(ex_msg)\n ftb = traceback.format_exc()\n errtype = errobj.__name__\n if function is None:\n # for el in st:\n # print(el)\n function = st[3][3]\n if function == \"<module>\":\n function = None\n # init error object\n date = datetime.now()\n error = Err(\n function,\n date,\n msg,\n errtype,\n errclass,\n line,\n file,\n code,\n ftb,\n ex,\n caller,\n caller_msg)\n return error\n",
"def _errmsg(self, error: \"Err\", tb: bool=False, i: int=None,\n msgformat: str=\"terminal\") -> str:\n \"\"\"\n Get the error message\n \"\"\"\n if msgformat == \"terminal\":\n msg = self._headline(error, i)\n if error.ex is not None:\n msg += \"\\n\" + \"line \" + colors.bold(str(error.line))\n msg += \": \" + colors.yellow(error.code)\n msg += \"\\n\" + str(error.file)\n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += \"\\n\" + error.tb\n elif msgformat == \"csv\":\n sep = \",\"\n msg = error.msg + sep\n msg += str(error.line) + sep + error.code + sep\n msg += str(error.file)\n elif msgformat == \"text\":\n sep = \",\"\n msg = error.msg\n if error.ex is not None:\n msg += sep + str(error.line) + sep + error.code + sep\n msg += str(error.file) + sep \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += sep + error.tb\n elif msgformat == \"dict\":\n msg = {\"date\": datetime.now()}\n if error.ex is not None:\n msg[\"msg\"] = error.msg\n msg[\"line\"] = error.line\n msg[\"code\"] = error.code\n msg[\"file\"] = error.file \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg[\"traceback\"] = error.tb\n return msg\n",
"def _add(self, error: \"Err\"):\n \"\"\"\n Adds an error to the trace if required\n \"\"\"\n if self.trace_errs is True:\n self.errors.append(error)\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._new_err | python | def _new_err(self, errclass: str, *args) -> 'Err':
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error | Error constructor | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L115-L188 | null | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._headline | python | def _headline(self, error, i: int) -> str:
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg | Format the error message's headline | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L190-L223 | null | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._errmsg | python | def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg | Get the error message | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L225-L263 | [
"def _headline(self, error, i: int) -> str:\n \"\"\"\n Format the error message's headline\n \"\"\"\n msgs = Msg()\n # get the error title\n if error.errclass == \"fatal\":\n msg = msgs.fatal(i)\n elif error.errclass == \"warning\":\n msg = msgs.warning(i)\n elif error.errclass == \"info\":\n msg = msgs.info(i)\n elif error.errclass == \"debug\":\n msg = msgs.debug(i)\n elif error.errclass == \"via\":\n msg = msgs.via(i)\n else:\n msg = msgs.error(i)\n # function name\n if error.function is not None:\n msg += \" from \" + colors.bold(error.function)\n if error.caller is not None:\n msg += \" called from \" + colors.bold(error.caller)\n if error.caller_msg is not None:\n msg += \"\\n\" + error.caller_msg\n if error.function is not None and error.msg is not None:\n msg += \": \"\n else:\n msg = msg + \" \"\n if error.errtype is not None:\n msg += error.errtype + \" : \"\n if error.msg is not None:\n msg += error.msg\n return msg\n",
"def yellow(self, *msg):\n color = '\\033[93m'\n return self._msg(color, *msg)\n",
"def bold(self, *msg):\n color = '\\033[1m'\n return self._msg(color, *msg)\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._print_errs | python | def _print_errs(self):
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1 | Prints the errors trace with tracebacks | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L271-L281 | [
"def _errmsg(self, error: \"Err\", tb: bool=False, i: int=None,\n msgformat: str=\"terminal\") -> str:\n \"\"\"\n Get the error message\n \"\"\"\n if msgformat == \"terminal\":\n msg = self._headline(error, i)\n if error.ex is not None:\n msg += \"\\n\" + \"line \" + colors.bold(str(error.line))\n msg += \": \" + colors.yellow(error.code)\n msg += \"\\n\" + str(error.file)\n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += \"\\n\" + error.tb\n elif msgformat == \"csv\":\n sep = \",\"\n msg = error.msg + sep\n msg += str(error.line) + sep + error.code + sep\n msg += str(error.file)\n elif msgformat == \"text\":\n sep = \",\"\n msg = error.msg\n if error.ex is not None:\n msg += sep + str(error.line) + sep + error.code + sep\n msg += str(error.file) + sep \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg += sep + error.tb\n elif msgformat == \"dict\":\n msg = {\"date\": datetime.now()}\n if error.ex is not None:\n msg[\"msg\"] = error.msg\n msg[\"line\"] = error.line\n msg[\"code\"] = error.code\n msg[\"file\"] = error.file \n if self.errs_traceback is True or tb is True:\n if error.tb is not None:\n msg[\"traceback\"] = error.tb\n return msg\n"
] | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._add | python | def _add(self, error: "Err"):
if self.trace_errs is True:
self.errors.append(error) | Adds an error to the trace if required | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L283-L288 | null | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._get_caller | python | def _get_caller(self, callers: List[str], function: str) -> str:
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True | Get the caller function from the provided function | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L290-L299 | null | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_args(self, *args) -> (Exception, str):
"""
Returns exception and message from the provided arguments
"""
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg
|
synw/goerr | goerr/__init__.py | Err._get_args | python | def _get_args(self, *args) -> (Exception, str):
ex = None
msg = None
for arg in args:
if isinstance(arg, str):
msg = arg
elif isinstance(arg, Exception):
ex = arg
return ex, msg | Returns exception and message from the provided arguments | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L301-L312 | null | class Err():
"""
Errors manager
"""
errors = [] # type: List[Err]
trace_errs = False # type: bool
errs_traceback = True # type: bool
logger = None # type: logging.Logger
log_errs = False # type: bool
log_format = "csv" # type: str
log_path = "errors.log" # type: str
test_errs_mode = False # type: bool
def __init__(self, function: str=None, date: datetime=datetime.now(),
msg: str=None, errtype: str=None, errclass: str=None,
line: int=None, file: str=None, code: str=None, tb: str=None,
ex: Exception=None, caller: str=None, caller_msg: str=None):
"""
Datastructure of an error
"""
self.date = date # type: datetime.datetime
self.function = function # type: str
self.msg = msg # type: str
self.errtype = errtype # type: str
self.errclass = errclass # type: str
self.line = line # type: int
self.file = file # type: str
self.code = code # type: str
self.tb = tb # type: str
self.ex = ex # type: Exception
self.caller = caller # type: str
self.caller_msg = caller_msg # type: str
self.new = self.err
def __repr__(self):
msg = "<goerror.Err object: " + str(self.errclass) + " error>"
return msg
def __str__(self):
return self.msg
def err(self, *args):
"""
Creates an error
"""
error = self._err("error", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err("fatal", *args)
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1) # pragma: no cover
def warning(self, *args) -> "Err":
"""
Creates a warning message
"""
error = self._create_err("warning", *args)
print(self._errmsg(error))
return error
def info(self, *args) -> "Err":
"""
Creates an info message
"""
error = self._create_err("info", *args)
print(self._errmsg(error))
return error
def debug(self, *args) -> "Err":
"""
Creates a debug message
"""
error = self._create_err("debug", *args)
print(self._errmsg(error))
return error
def _create_err(self, errclass: str, *args) -> "Err":
"""
Create an error
"""
error = self._new_err(errclass, *args)
self._add(error)
return error
def _err(self, errclass: str="error", *args) -> "Err":
"""
Creates an error
"""
error = self._new_err(errclass, *args)
if self.log_errs is True:
sep = " "
if self.log_format == "csv":
sep = ","
msg = str(datetime.now()) + sep + \
self._errmsg(error, msgformat=self.log_format)
self.logger.error(msg)
print(self._errmsg(error))
self._add(error)
return error
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtype = None # type: str
file = None # type: str
line = None # type: int
code = None # type: str
ex_msg = None # type: str
caller = None # type: str
caller_msg = None # type: str
st = inspect.stack()
if ex is not None:
# get info from exception
errobj, ex_msg, tb = sys.exc_info()
tb = traceback.extract_tb(tb)
file, line, function, code = tb[-1]
# if called from an external lib
if len(tb) > 1:
file, line, caller, code = tb[0]
else:
call_stack = []
for c in st:
call_stack.append(c[3])
caller = self._get_caller(call_stack, function)
internals = [
"err",
"_new_err",
"fatal",
"warning",
"debug",
"info",
"<module>"]
if caller == function or caller in internals:
caller = None
# handle messages
if msg is not None:
caller_msg = msg
msg = str(ex_msg)
else:
msg = str(ex_msg)
ftb = traceback.format_exc()
errtype = errobj.__name__
if function is None:
# for el in st:
# print(el)
function = st[3][3]
if function == "<module>":
function = None
# init error object
date = datetime.now()
error = Err(
function,
date,
msg,
errtype,
errclass,
line,
file,
code,
ftb,
ex,
caller,
caller_msg)
return error
def _headline(self, error, i: int) -> str:
"""
Format the error message's headline
"""
msgs = Msg()
# get the error title
if error.errclass == "fatal":
msg = msgs.fatal(i)
elif error.errclass == "warning":
msg = msgs.warning(i)
elif error.errclass == "info":
msg = msgs.info(i)
elif error.errclass == "debug":
msg = msgs.debug(i)
elif error.errclass == "via":
msg = msgs.via(i)
else:
msg = msgs.error(i)
# function name
if error.function is not None:
msg += " from " + colors.bold(error.function)
if error.caller is not None:
msg += " called from " + colors.bold(error.caller)
if error.caller_msg is not None:
msg += "\n" + error.caller_msg
if error.function is not None and error.msg is not None:
msg += ": "
else:
msg = msg + " "
if error.errtype is not None:
msg += error.errtype + " : "
if error.msg is not None:
msg += error.msg
return msg
def _errmsg(self, error: "Err", tb: bool=False, i: int=None,
msgformat: str="terminal") -> str:
"""
Get the error message
"""
if msgformat == "terminal":
msg = self._headline(error, i)
if error.ex is not None:
msg += "\n" + "line " + colors.bold(str(error.line))
msg += ": " + colors.yellow(error.code)
msg += "\n" + str(error.file)
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += "\n" + error.tb
elif msgformat == "csv":
sep = ","
msg = error.msg + sep
msg += str(error.line) + sep + error.code + sep
msg += str(error.file)
elif msgformat == "text":
sep = ","
msg = error.msg
if error.ex is not None:
msg += sep + str(error.line) + sep + error.code + sep
msg += str(error.file) + sep
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg += sep + error.tb
elif msgformat == "dict":
msg = {"date": datetime.now()}
if error.ex is not None:
msg["msg"] = error.msg
msg["line"] = error.line
msg["code"] = error.code
msg["file"] = error.file
if self.errs_traceback is True or tb is True:
if error.tb is not None:
msg["traceback"] = error.tb
return msg
def to_dict(self):
"""
Returns a dictionnary with the error elements
"""
return self._errmsg(self, msgformat="dict")
def _print_errs(self):
"""
Prints the errors trace with tracebacks
"""
i = 0
for error in self.errors:
print(self._errmsg(error, tb=True, i=i))
# for spacing
if self.errs_traceback is False:
print()
i += 1
def _add(self, error: "Err"):
"""
Adds an error to the trace if required
"""
if self.trace_errs is True:
self.errors.append(error)
def _get_caller(self, callers: List[str], function: str) -> str:
"""
Get the caller function from the provided function
"""
is_next = False
for c in callers:
if is_next is True:
return c
if function == c:
is_next = True
|
synw/goerr | goerr/__init__.py | Trace.trace | python | def trace(self):
if len(self.errors) > 0:
numerrs = len(self.errors)
print("========= Trace (" + str(numerrs) + ") =========")
self._print_errs()
self.errors = [] | Print the errors trace if there are some errors | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L331-L339 | [
"def _print_errs(self):\n \"\"\"\n Prints the errors trace with tracebacks\n \"\"\"\n i = 0\n for error in self.errors:\n print(self._errmsg(error, tb=True, i=i))\n # for spacing\n if self.errs_traceback is False:\n print()\n i += 1\n"
] | class Trace(Err):
"""
Tracess manager
"""
errors = [] # type: List[Err]
trace_errs = True # type: bool
errs_traceback = False # type: bool
def __repr__(self):
s = "s"
numerrs = len(self.errors)
if numerrs == 1:
s = ""
msg = "<goerror.Trace object: " + str(numerrs) + " error" + s + ">"
return msg
def via(self, *args):
"""
Creates an empty error to record in the stack
trace
"""
error = None
if len(self.errors) > 0:
error = self._err("via", *args)
return error
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err(*args)
self.trace()
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1)
|
synw/goerr | goerr/__init__.py | Trace.via | python | def via(self, *args):
error = None
if len(self.errors) > 0:
error = self._err("via", *args)
return error | Creates an empty error to record in the stack
trace | train | https://github.com/synw/goerr/blob/08b3809d6715bffe26899a769d96fa5de8573faf/goerr/__init__.py#L341-L349 | [
"def _err(self, errclass: str=\"error\", *args) -> \"Err\":\n \"\"\"\n Creates an error\n \"\"\"\n error = self._new_err(errclass, *args)\n if self.log_errs is True:\n sep = \" \"\n if self.log_format == \"csv\":\n sep = \",\"\n msg = str(datetime.now()) + sep + \\\n self._errmsg(error, msgformat=self.log_format)\n self.logger.error(msg)\n print(self._errmsg(error))\n self._add(error)\n return error\n"
] | class Trace(Err):
"""
Tracess manager
"""
errors = [] # type: List[Err]
trace_errs = True # type: bool
errs_traceback = False # type: bool
def __repr__(self):
s = "s"
numerrs = len(self.errors)
if numerrs == 1:
s = ""
msg = "<goerror.Trace object: " + str(numerrs) + " error" + s + ">"
return msg
def trace(self):
"""
Print the errors trace if there are some errors
"""
if len(self.errors) > 0:
numerrs = len(self.errors)
print("========= Trace (" + str(numerrs) + ") =========")
self._print_errs()
self.errors = []
def panic(self, *args):
"""
Creates a fatal error and exit
"""
self._err(*args)
self.trace()
if self.test_errs_mode is False: # pragma: no cover
sys.exit(1)
|
jeremylow/pyshk | pyshk/api.py | Api.get_user | python | def get_user(self, user_id=None, user_name=None):
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data | Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L267-L295 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def NewFromJSON(data):\n \"\"\"\n Create a new User instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a user.\n\n Returns:\n A User instance.\n \"\"\"\n if data.get('shakes', None):\n shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]\n else:\n shakes = None\n\n return User(\n id=data.get('id', None),\n name=data.get('name', None),\n profile_image_url=data.get('profile_image_url', None),\n about=data.get('about', None),\n website=data.get('website', None),\n shakes=shakes)\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.get_user_shakes | python | def get_user_shakes(self):
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes | Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L297-L307 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.get_shared_files_from_shake | python | def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']] | Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L309-L340 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.get_shared_file | python | def get_shared_file(self, sharekey=None):
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data) | Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L342-L356 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def NewFromJSON(data):\n \"\"\"\n Create a new SharedFile instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a SharedFile.\n\n Returns:\n A SharedFile instance.\n \"\"\"\n return SharedFile(\n sharekey=data.get('sharekey', None),\n name=data.get('name', None),\n user=User.NewFromJSON(data.get('user', None)),\n title=data.get('title', None),\n description=data.get('description', None),\n posted_at=data.get('posted_at', None),\n permalink=data.get('permalink', None),\n width=data.get('width', None),\n height=data.get('height', None),\n views=data.get('views', 0),\n likes=data.get('likes', 0),\n saves=data.get('saves', 0),\n comments=data.get('comments', None),\n nsfw=data.get('nsfw', False),\n image_url=data.get('image_url', None),\n source_url=data.get('source_url', None),\n saved=data.get('saved', False),\n liked=data.get('liked', False),\n )\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.like_shared_file | python | def like_shared_file(self, sharekey=None):
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error'])) | 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L358-L382 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def NewFromJSON(data):\n \"\"\"\n Create a new SharedFile instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a SharedFile.\n\n Returns:\n A SharedFile instance.\n \"\"\"\n return SharedFile(\n sharekey=data.get('sharekey', None),\n name=data.get('name', None),\n user=User.NewFromJSON(data.get('user', None)),\n title=data.get('title', None),\n description=data.get('description', None),\n posted_at=data.get('posted_at', None),\n permalink=data.get('permalink', None),\n width=data.get('width', None),\n height=data.get('height', None),\n views=data.get('views', 0),\n likes=data.get('likes', 0),\n saves=data.get('saves', 0),\n comments=data.get('comments', None),\n nsfw=data.get('nsfw', False),\n image_url=data.get('image_url', None),\n source_url=data.get('source_url', None),\n saved=data.get('saved', False),\n liked=data.get('liked', False),\n )\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.save_shared_file | python | def save_shared_file(self, sharekey=None):
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error'])) | Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L384-L402 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def NewFromJSON(data):\n \"\"\"\n Create a new SharedFile instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a SharedFile.\n\n Returns:\n A SharedFile instance.\n \"\"\"\n return SharedFile(\n sharekey=data.get('sharekey', None),\n name=data.get('name', None),\n user=User.NewFromJSON(data.get('user', None)),\n title=data.get('title', None),\n description=data.get('description', None),\n posted_at=data.get('posted_at', None),\n permalink=data.get('permalink', None),\n width=data.get('width', None),\n height=data.get('height', None),\n views=data.get('views', 0),\n likes=data.get('likes', 0),\n saves=data.get('saves', 0),\n comments=data.get('comments', None),\n nsfw=data.get('nsfw', False),\n image_url=data.get('image_url', None),\n source_url=data.get('source_url', None),\n saved=data.get('saved', False),\n liked=data.get('liked', False),\n )\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.get_friends_shake | python | def get_friends_shake(self, before=None, after=None):
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']] | Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L404-L423 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.get_magic_shake | python | def get_magic_shake(self, before=None, after=None):
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']] | From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L451-L472 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.get_comments | python | def get_comments(self, sharekey=None):
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']] | Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L474-L494 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.post_comment | python | def post_comment(self, sharekey=None, comment=None):
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data) | Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L496-L514 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def NewFromJSON(data):\n \"\"\"\n Create a new Comment instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a Comment.\n\n Returns:\n A Comment instance.\n \"\"\"\n return Comment(\n body=data.get('body', None),\n posted_at=data.get('posted_at', None),\n user=User.NewFromJSON(data.get('user', None))\n )\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.post_shared_file | python | def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data | Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L516-L557 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def _get_image_type(image):\n if imghdr.what(image) == 'jpeg':\n return 'image/jpeg'\n elif imghdr.what(image) == 'gif':\n return 'image/gif'\n elif imghdr.what(image) == 'png':\n return 'image/png'\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def update_shared_file(self,
sharekey=None,
title=None,
description=None):
"""
Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data)
|
jeremylow/pyshk | pyshk/api.py | Api.update_shared_file | python | def update_shared_file(self,
sharekey=None,
title=None,
description=None):
if not sharekey:
raise Exception(
"You must specify a sharekey for the sharedfile"
"you wish to update.")
if not (title or description):
raise Exception("You must specify a title or description.")
post_data = {}
if title:
post_data['title'] = title
if description:
post_data['description'] = description
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('POST', endpoint=endpoint, data=post_data)
return SharedFile.NewFromJSON(data) | Update the editable details (just the title and description) of a
SharedFile.
Args:
sharekey (str): Sharekey of the SharedFile to update.
title (Optional[str]): Title of the SharedFile.
description (Optional[str]): Description of the SharedFile
Returns:
SharedFile on success, 404 on Sharekey not found, 403 on
unauthorized. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/api.py#L559-L595 | [
"def _make_request(self, verb, endpoint=None, data=None, files=None):\n if not self.authenticated:\n raise ApiInstanceUnauthorized\n\n resource_url = self._get_url_endpoint(endpoint)\n\n timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))\n nonce = self.get_nonce()\n\n authorization_header = self._make_headers(\n verb=verb,\n endpoint=endpoint,\n nonce=nonce,\n timestamp=timestamp)\n\n if verb == \"GET\":\n req = requests.get(\n resource_url,\n headers={'Authorization': authorization_header},\n verify=False)\n elif verb == \"POST\":\n if data:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n data=data)\n elif files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files)\n elif data and files:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header},\n files=files,\n data=data)\n else:\n req = requests.post(\n resource_url,\n headers={'Authorization': authorization_header})\n\n if req.status_code == 401:\n raise ApiResponseUnauthorized(req)\n elif req.status_code == 404:\n raise NotFound404(req)\n elif req.status_code == 500:\n raise Exception(req)\n\n if self.testing:\n return req\n\n try:\n return req.json()\n except:\n print('returning req', req._content)\n return req\n",
"def NewFromJSON(data):\n \"\"\"\n Create a new SharedFile instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a SharedFile.\n\n Returns:\n A SharedFile instance.\n \"\"\"\n return SharedFile(\n sharekey=data.get('sharekey', None),\n name=data.get('name', None),\n user=User.NewFromJSON(data.get('user', None)),\n title=data.get('title', None),\n description=data.get('description', None),\n posted_at=data.get('posted_at', None),\n permalink=data.get('permalink', None),\n width=data.get('width', None),\n height=data.get('height', None),\n views=data.get('views', 0),\n likes=data.get('likes', 0),\n saves=data.get('saves', 0),\n comments=data.get('comments', None),\n nsfw=data.get('nsfw', False),\n image_url=data.get('image_url', None),\n source_url=data.get('source_url', None),\n saved=data.get('saved', False),\n liked=data.get('liked', False),\n )\n"
] | class Api(object):
def __init__(self,
consumer_key=None,
consumer_secret=None,
access_token_key=None,
access_token_secret=None,
base_url=None,
testing=False):
if base_url is None:
self.base_url = 'http://mlkshk.com'
else:
self.base_url = base_url
self.port = 80
self.authenticated = False
self.testing = False
if testing:
self.testing = True
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.access_token_key = access_token_key
self.access_token_secret = access_token_secret
auth_list = [consumer_key, consumer_secret,
access_token_key, access_token_secret]
if all(auth_list):
self.authenticated = True
# Set headers, client info for requests.
# default_headers = {'User-Agent': 'PyShk v0.0.1'}
# self.client_args = {}
# self.client_args['headers'] = default_headers
# Set up auth - TODO:
# self.auth = None
# if self.access_token_key:
# token = {
# 'token_type': 'mac',
# 'hash_algorithm': 'hmac-sha-1',
# 'access_token': self.access_token_key
# }
# self.auth = OAuth2(self.consumer_key, token=token)
# self.client = requests.Session()
# self.client.auth = self.auth
def get_auth(self, redirect_uri=None):
if not redirect_uri:
redirect_uri = "http://localhost:8000"
authentication_url = (
"https://mlkshk.com/api/authorize"
"?response_type=code&client_id={key}&redirect_uri={uri}").format(
key=self.consumer_key,
uri=redirect_uri)
access_token_url = 'https://mlkshk.com/api/token'
if not self.testing:
webbrowser.open(authentication_url, new=1)
authorization_code = input("Enter the code from the redirected URL: ")
else:
authorization_code = 123456
message = {
'grant_type': "authorization_code",
'code': authorization_code,
'redirect_uri': redirect_uri,
'client_id': self.consumer_key,
'client_secret': self.consumer_secret}
data = urlencode(message)
req = requests.post(access_token_url, params=data, verify=False)
json_resp = req.json()
print("""
{full_token}
>>> Your access token is: {token}
>>> Your access secret is: {secret}
""".format(full_token=json_resp,
token=json_resp['access_token'],
secret=json_resp['secret']))
self.access_token_key = json_resp['access_token']
self.access_token_secret = json_resp['secret']
def _get_url_endpoint(self, endpoint):
return self.base_url + endpoint
def _make_headers(self,
verb=None,
endpoint=None,
nonce=None,
timestamp=None):
normalized_string = "{0}\n".format(self.access_token_key)
normalized_string += "{0}\n".format(timestamp)
normalized_string += "{0}\n".format(nonce)
normalized_string += "{0}\n".format(verb)
normalized_string += "mlkshk.com\n"
normalized_string += "80\n"
normalized_string += "{0}\n".format(endpoint)
digest = hmac.new(
self.access_token_secret.encode('ascii'),
normalized_string.encode('ascii'),
sha1).digest()
if six.PY2:
signature = base64.encodestring(digest).strip().decode('utf8')
else:
signature = base64.encodebytes(digest).strip().decode('utf8')
auth_str = (
'MAC token="{0}", '
'timestamp="{1}", '
'nonce="{2}", '
'signature="{3}"').format(
self.access_token_key,
str(timestamp),
nonce,
signature)
return auth_str
def _make_request(self, verb, endpoint=None, data=None, files=None):
if not self.authenticated:
raise ApiInstanceUnauthorized
resource_url = self._get_url_endpoint(endpoint)
timestamp = int(time.mktime(datetime.datetime.utcnow().timetuple()))
nonce = self.get_nonce()
authorization_header = self._make_headers(
verb=verb,
endpoint=endpoint,
nonce=nonce,
timestamp=timestamp)
if verb == "GET":
req = requests.get(
resource_url,
headers={'Authorization': authorization_header},
verify=False)
elif verb == "POST":
if data:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
data=data)
elif files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files)
elif data and files:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header},
files=files,
data=data)
else:
req = requests.post(
resource_url,
headers={'Authorization': authorization_header})
if req.status_code == 401:
raise ApiResponseUnauthorized(req)
elif req.status_code == 404:
raise NotFound404(req)
elif req.status_code == 500:
raise Exception(req)
if self.testing:
return req
try:
return req.json()
except:
print('returning req', req._content)
return req
@staticmethod
def get_nonce():
nonce = md5(
str(random.SystemRandom().randint(0, 100000000)).encode('utf8')
).hexdigest()
return nonce
@staticmethod
def _get_image_type(image):
if imghdr.what(image) == 'jpeg':
return 'image/jpeg'
elif imghdr.what(image) == 'gif':
return 'image/gif'
elif imghdr.what(image) == 'png':
return 'image/png'
def get_favorites(self, before=None, after=None):
"""
Get a list of the authenticated user's 10 most recent favorites
(likes).
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the authenticated user's
set of Likes.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the authenticated user's set
of Likes.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/favorites'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['favorites']]
def get_user(self, user_id=None, user_name=None):
""" Get a user object from the API. If no ``user_id`` or ``user_name``
is specified, it will return the User object for the currently
authenticated user.
Args:
user_id (int): User ID of the user for whom you want to get
information. [Optional]
user_name(str): Username for the user for whom you want to get
information. [Optional]
Returns:
A User object.
"""
if user_id:
endpoint = '/api/user_id/{0}'.format(user_id)
elif user_name:
endpoint = '/api/user_name/{0}'.format(user_name)
else:
# Return currently authorized user
endpoint = '/api/user'
data = self._make_request(verb="GET", endpoint=endpoint)
try:
return User.NewFromJSON(data)
except:
return data
def get_user_shakes(self):
""" Get a list of Shake objects for the currently authenticated user.
Returns:
A list of Shake objects.
"""
endpoint = '/api/shakes'
data = self._make_request(verb="GET", endpoint=endpoint)
shakes = [Shake.NewFromJSON(shk) for shk in data['shakes']]
return shakes
def get_shared_files_from_shake(self,
shake_id=None,
before=None,
after=None):
"""
Returns a list of SharedFile objects from a particular shake.
Args:
shake_id (int): Shake from which to get a list of SharedFiles
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the given Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the given Shake.
Returns:
List (list) of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/shakes'
if shake_id:
endpoint += '/{0}'.format(shake_id)
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request(verb="GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(f) for f in data['sharedfiles']]
def get_shared_file(self, sharekey=None):
"""
Returns a SharedFile object given by the sharekey.
Args:
sharekey (str): Sharekey of the SharedFile you want to retrieve.
Returns:
SharedFile
"""
if not sharekey:
raise Exception("You must specify a sharekey.")
endpoint = '/api/sharedfile/{0}'.format(sharekey)
data = self._make_request('GET', endpoint)
return SharedFile.NewFromJSON(data)
def like_shared_file(self, sharekey=None):
""" 'Like' a SharedFile. mlkshk doesn't allow you to unlike a
sharedfile, so this is ~~permanent~~.
Args:
sharekey (str): Sharekey for the file you want to 'like'.
Returns:
Either a SharedFile on success, or an exception on error.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{sharekey}/like'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.liked = True
return sf
except:
raise Exception("{0}".format(data['error']))
def save_shared_file(self, sharekey=None):
"""
Save a SharedFile to your Shake.
Args:
sharekey (str): Sharekey for the file to save.
Returns:
SharedFile saved to your shake.
"""
endpoint = '/api/sharedfile/{sharekey}/save'.format(sharekey=sharekey)
data = self._make_request("POST", endpoint=endpoint, data=None)
try:
sf = SharedFile.NewFromJSON(data)
sf.saved = True
return sf
except:
raise Exception("{0}".format(data['error']))
def get_friends_shake(self, before=None, after=None):
"""
Contrary to the endpoint naming, this resource is for a list of
SharedFiles from your friends on mlkshk.
Returns:
List of SharedFiles.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/friends'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['friend_shake']]
def get_incoming_shake(self, before=None, after=None):
"""
Returns a list of the most recent SharedFiles on mlkshk.com
Args:
before (str): get 10 SharedFile objects before (but not including)
the SharedFile given by `before` for the Incoming Shake.
after (str): get 10 SharedFile objects after (but not including)
the SharedFile give by `after' for the Incoming Shake.
Returns:
List of SharedFile objects.
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/incoming'
if before:
endpoint += '/before/{0}'.format(before)
elif after:
endpoint += '/after/{0}'.format(after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['incoming']]
def get_magic_shake(self, before=None, after=None):
"""
From the API:
Returns the 10 most recent files accepted by the 'magic' file selection
algorithm. Currently any files with 10 or more likes are magic.
Returns:
List of SharedFile objects
"""
if before and after:
raise Exception("You cannot specify both before and after keys")
endpoint = '/api/magicfiles'
if before:
endpoint += '/before/{key}'.format(key=before)
elif after:
endpoint += '/after/{key}'.format(key=after)
data = self._make_request("GET", endpoint=endpoint)
return [SharedFile.NewFromJSON(sf) for sf in data['magicfiles']]
def get_comments(self, sharekey=None):
"""
Retrieve comments on a SharedFile
Args:
sharekey (str): Sharekey for the file from which you want to return
the set of comments.
Returns:
List of Comment objects.
"""
if not sharekey:
raise Exception(
"You must specify a sharekey of the file you"
"want to 'like'.")
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
data = self._make_request("GET", endpoint=endpoint)
return [Comment.NewFromJSON(c) for c in data['comments']]
def post_comment(self, sharekey=None, comment=None):
"""
Post a comment on behalf of the current user to the
SharedFile with the given sharekey.
Args:
sharekey (str): Sharekey of the SharedFile to which you'd like
to post a comment.
comment (str): Text of the comment to post.
Returns:
Comment object.
"""
endpoint = '/api/sharedfile/{0}/comments'.format(sharekey)
post_data = {'body': comment}
data = self._make_request("POST", endpoint=endpoint, data=post_data)
return Comment.NewFromJSON(data)
def post_shared_file(self,
image_file=None,
source_link=None,
shake_id=None,
title=None,
description=None):
""" Upload an image.
TODO:
Don't have a pro account to test (or even write) code to upload a
shared filed to a particular shake.
Args:
image_file (str): path to an image (jpg/gif) on your computer.
source_link (str): URL of a source (youtube/vine/etc.)
shake_id (int): shake to which to upload the file or
source_link [optional]
title (str): title of the SharedFile [optional]
description (str): description of the SharedFile
Returns:
SharedFile key.
"""
if image_file and source_link:
raise Exception('You can only specify an image file or '
'a source link, not both.')
if not image_file and not source_link:
raise Exception('You must specify an image file or a source link')
content_type = self._get_image_type(image_file)
if not title:
title = os.path.basename(image_file)
f = open(image_file, 'rb')
endpoint = '/api/upload'
files = {'file': (title, f, content_type)}
data = self._make_request('POST', endpoint=endpoint, files=files)
f.close()
return data
|
jeremylow/pyshk | pyshk/models.py | User.AsDict | python | def AsDict(self, dt=True):
data = {}
if self.name:
data['name'] = self.name
data['mlkshk_url'] = self.mlkshk_url
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.id:
data['id'] = self.id
if self.about:
data['about'] = self.about
if self.website:
data['website'] = self.website
if self.shakes:
data['shakes'] = [shk.AsDict(dt=dt) for shk in self.shakes]
data['shake_count'] = self.shake_count
return data | A dict representation of this User instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this User instance | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L51-L79 | null | class User(object):
"""
A class representing a MLKSHK user.
Exposes the following properties of a user:
user.id
user.name
user.profile_image_url
user.about
user.website
user.shakes
"""
def __init__(self, **kwargs):
param_defaults = {
'id': None,
'name': None,
'profile_image_url': None,
'about': None,
'website': None,
'shakes': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
@property
def mlkshk_url(self):
return "https://mlkshk.com/user/{0}".format(self.name)
@property
def shake_count(self):
if self.shakes:
return len(self.shakes)
else:
return 0
def AsJsonString(self):
"""A JSON string representation of this User instance.
Returns:
A JSON string representation of this User instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def NewFromJSON(data):
"""
Create a new User instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a user.
Returns:
A User instance.
"""
if data.get('shakes', None):
shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]
else:
shakes = None
return User(
id=data.get('id', None),
name=data.get('name', None),
profile_image_url=data.get('profile_image_url', None),
about=data.get('about', None),
website=data.get('website', None),
shakes=shakes)
def __eq__(self, other):
"""
Compare two user objects against one another.
Args:
other (User): another User object against which to compare the
current user.
"""
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.profile_image_url == other.profile_image_url and \
self.about == other.about and \
self.website == other.website and \
self.shakes == other.shakes
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this User instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | User.AsJsonString | python | def AsJsonString(self):
return json.dumps(self.AsDict(dt=False), sort_keys=True) | A JSON string representation of this User instance.
Returns:
A JSON string representation of this User instance | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L81-L87 | [
"def AsDict(self, dt=True):\n \"\"\"\n A dict representation of this User instance.\n\n The return value uses the same key names as the JSON representation.\n\n Args:\n dt (bool): If True, return dates as python datetime objects. If\n False, return dates as ISO strings.\n\n Return:\n A dict representing this User instance\n \"\"\"\n data = {}\n if self.name:\n data['name'] = self.name\n data['mlkshk_url'] = self.mlkshk_url\n if self.profile_image_url:\n data['profile_image_url'] = self.profile_image_url\n if self.id:\n data['id'] = self.id\n if self.about:\n data['about'] = self.about\n if self.website:\n data['website'] = self.website\n if self.shakes:\n data['shakes'] = [shk.AsDict(dt=dt) for shk in self.shakes]\n data['shake_count'] = self.shake_count\n return data\n"
] | class User(object):
"""
A class representing a MLKSHK user.
Exposes the following properties of a user:
user.id
user.name
user.profile_image_url
user.about
user.website
user.shakes
"""
def __init__(self, **kwargs):
param_defaults = {
'id': None,
'name': None,
'profile_image_url': None,
'about': None,
'website': None,
'shakes': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
@property
def mlkshk_url(self):
return "https://mlkshk.com/user/{0}".format(self.name)
@property
def shake_count(self):
if self.shakes:
return len(self.shakes)
else:
return 0
def AsDict(self, dt=True):
"""
A dict representation of this User instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this User instance
"""
data = {}
if self.name:
data['name'] = self.name
data['mlkshk_url'] = self.mlkshk_url
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.id:
data['id'] = self.id
if self.about:
data['about'] = self.about
if self.website:
data['website'] = self.website
if self.shakes:
data['shakes'] = [shk.AsDict(dt=dt) for shk in self.shakes]
data['shake_count'] = self.shake_count
return data
@staticmethod
def NewFromJSON(data):
"""
Create a new User instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a user.
Returns:
A User instance.
"""
if data.get('shakes', None):
shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]
else:
shakes = None
return User(
id=data.get('id', None),
name=data.get('name', None),
profile_image_url=data.get('profile_image_url', None),
about=data.get('about', None),
website=data.get('website', None),
shakes=shakes)
def __eq__(self, other):
"""
Compare two user objects against one another.
Args:
other (User): another User object against which to compare the
current user.
"""
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.profile_image_url == other.profile_image_url and \
self.about == other.about and \
self.website == other.website and \
self.shakes == other.shakes
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this User instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | User.NewFromJSON | python | def NewFromJSON(data):
if data.get('shakes', None):
shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]
else:
shakes = None
return User(
id=data.get('id', None),
name=data.get('name', None),
profile_image_url=data.get('profile_image_url', None),
about=data.get('about', None),
website=data.get('website', None),
shakes=shakes) | Create a new User instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a user.
Returns:
A User instance. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L90-L111 | null | class User(object):
"""
A class representing a MLKSHK user.
Exposes the following properties of a user:
user.id
user.name
user.profile_image_url
user.about
user.website
user.shakes
"""
def __init__(self, **kwargs):
param_defaults = {
'id': None,
'name': None,
'profile_image_url': None,
'about': None,
'website': None,
'shakes': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
@property
def mlkshk_url(self):
return "https://mlkshk.com/user/{0}".format(self.name)
@property
def shake_count(self):
if self.shakes:
return len(self.shakes)
else:
return 0
def AsDict(self, dt=True):
"""
A dict representation of this User instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this User instance
"""
data = {}
if self.name:
data['name'] = self.name
data['mlkshk_url'] = self.mlkshk_url
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.id:
data['id'] = self.id
if self.about:
data['about'] = self.about
if self.website:
data['website'] = self.website
if self.shakes:
data['shakes'] = [shk.AsDict(dt=dt) for shk in self.shakes]
data['shake_count'] = self.shake_count
return data
def AsJsonString(self):
"""A JSON string representation of this User instance.
Returns:
A JSON string representation of this User instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def __eq__(self, other):
"""
Compare two user objects against one another.
Args:
other (User): another User object against which to compare the
current user.
"""
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.profile_image_url == other.profile_image_url and \
self.about == other.about and \
self.website == other.website and \
self.shakes == other.shakes
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this User instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | Comment.AsDict | python | def AsDict(self, dt=True):
data = {}
if self.body:
data['body'] = self.body
if self.posted_at:
data['posted_at'] = self.posted_at
if self.user:
data['user'] = self.user.AsDict()
return data | A dict representation of this Comment instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this Comment instance | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L159-L181 | null | class Comment(object):
"""
A class representing a Comment on mlkshk.
Exposes the following properties of a Comment:
comment.body
comment.posted_at
comment.user
"""
def __init__(self, **kwargs):
param_defaults = {
'body': None,
'posted_at': None,
'user': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
def AsJsonString(self):
"""
A JSON string representation of this Comment instance.
Returns:
A JSON string representation of this Comment instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def NewFromJSON(data):
"""
Create a new Comment instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Comment.
Returns:
A Comment instance.
"""
return Comment(
body=data.get('body', None),
posted_at=data.get('posted_at', None),
user=User.NewFromJSON(data.get('user', None))
)
def __eq__(self, other):
try:
return other and \
self.body == other.body and \
self.posted_at == other.posted_at and \
self.user == other.user
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this Comment instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | Comment.NewFromJSON | python | def NewFromJSON(data):
return Comment(
body=data.get('body', None),
posted_at=data.get('posted_at', None),
user=User.NewFromJSON(data.get('user', None))
) | Create a new Comment instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Comment.
Returns:
A Comment instance. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L193-L207 | [
"def NewFromJSON(data):\n \"\"\"\n Create a new User instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a user.\n\n Returns:\n A User instance.\n \"\"\"\n if data.get('shakes', None):\n shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]\n else:\n shakes = None\n\n return User(\n id=data.get('id', None),\n name=data.get('name', None),\n profile_image_url=data.get('profile_image_url', None),\n about=data.get('about', None),\n website=data.get('website', None),\n shakes=shakes)\n"
] | class Comment(object):
"""
A class representing a Comment on mlkshk.
Exposes the following properties of a Comment:
comment.body
comment.posted_at
comment.user
"""
def __init__(self, **kwargs):
param_defaults = {
'body': None,
'posted_at': None,
'user': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
def AsDict(self, dt=True):
"""
A dict representation of this Comment instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this Comment instance
"""
data = {}
if self.body:
data['body'] = self.body
if self.posted_at:
data['posted_at'] = self.posted_at
if self.user:
data['user'] = self.user.AsDict()
return data
def AsJsonString(self):
"""
A JSON string representation of this Comment instance.
Returns:
A JSON string representation of this Comment instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def __eq__(self, other):
try:
return other and \
self.body == other.body and \
self.posted_at == other.posted_at and \
self.user == other.user
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this Comment instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | Shake.AsDict | python | def AsDict(self, dt=True):
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.owner:
data['owner'] = self.owner.AsDict()
if self.url:
data['url'] = self.url
if self.thumbnail_url:
data['thumbnail_url'] = self.thumbnail_url
if self.description:
data['description'] = self.description
if self.type:
data['type'] = self.type
if dt:
if self.created_at:
data['created_at'] = self.created_at
if self.updated_at:
data['updated_at'] = self.updated_at
else:
if self.created_at:
data['created_at'] = self.created_at_iso
if self.updated_at:
data['updated_at'] = self.updated_at_iso
return data | A dict representation of this Shake instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this Shake instance | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L283-L320 | null | class Shake(object):
"""
A class representing a Shake on mlkshk.
Exposes the following properties of a Shake:
shake.id
shake.name
shake.owner
shake.url
shake.thumbnail_url
shake.description
shake.type
shake.created_at
shake.updated_at
"""
def __init__(self, **kwargs):
param_defaults = {
'id': None,
'name': None,
'owner': None,
'url': None,
'thumbnail_url': None,
'description': None,
'type': None,
'created_at': None,
'updated_at': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
self.created_at = kwargs.get('created_at', None)
self.updated_at = kwargs.get('updated_at', None)
@property
def created_at(self):
return self._created_at
@created_at.setter
def created_at(self, value):
self._created_at = convert_time(value)
@property
def created_at_iso(self):
return self._created_at.isoformat()
@property
def updated_at(self):
return self._updated_at
@updated_at.setter
def updated_at(self, value):
self._updated_at = convert_time(value)
@property
def updated_at_iso(self):
return self._updated_at.isoformat()
def AsJsonString(self):
"""
A JSON string representation of this Shake instance.
Returns:
A JSON string representation of this Shake instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def NewFromJSON(data):
"""
Create a new Shake instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Shake.
Returns:
A Shake instance.
"""
s = Shake(
id=data.get('id', None),
name=data.get('name', None),
url=data.get('url', None),
thumbnail_url=data.get('thumbnail_url', None),
description=data.get('description', None),
type=data.get('type', None),
created_at=data.get('created_at', None),
updated_at=data.get('updated_at', None)
)
if data.get('owner', None):
s.owner = User.NewFromJSON(data.get('owner', None))
return s
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.owner == other.owner and \
self.url == other.url and \
self.thumbnail_url == other.thumbnail_url and \
self.description == other.description and \
self.type == other.type and \
self.created_at == other.created_at and \
self.updated_at == other.updated_at
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this Shake instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | Shake.NewFromJSON | python | def NewFromJSON(data):
s = Shake(
id=data.get('id', None),
name=data.get('name', None),
url=data.get('url', None),
thumbnail_url=data.get('thumbnail_url', None),
description=data.get('description', None),
type=data.get('type', None),
created_at=data.get('created_at', None),
updated_at=data.get('updated_at', None)
)
if data.get('owner', None):
s.owner = User.NewFromJSON(data.get('owner', None))
return s | Create a new Shake instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a Shake.
Returns:
A Shake instance. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L332-L354 | [
"def NewFromJSON(data):\n \"\"\"\n Create a new User instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a user.\n\n Returns:\n A User instance.\n \"\"\"\n if data.get('shakes', None):\n shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]\n else:\n shakes = None\n\n return User(\n id=data.get('id', None),\n name=data.get('name', None),\n profile_image_url=data.get('profile_image_url', None),\n about=data.get('about', None),\n website=data.get('website', None),\n shakes=shakes)\n"
] | class Shake(object):
"""
A class representing a Shake on mlkshk.
Exposes the following properties of a Shake:
shake.id
shake.name
shake.owner
shake.url
shake.thumbnail_url
shake.description
shake.type
shake.created_at
shake.updated_at
"""
def __init__(self, **kwargs):
param_defaults = {
'id': None,
'name': None,
'owner': None,
'url': None,
'thumbnail_url': None,
'description': None,
'type': None,
'created_at': None,
'updated_at': None}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
self.created_at = kwargs.get('created_at', None)
self.updated_at = kwargs.get('updated_at', None)
@property
def created_at(self):
return self._created_at
@created_at.setter
def created_at(self, value):
self._created_at = convert_time(value)
@property
def created_at_iso(self):
return self._created_at.isoformat()
@property
def updated_at(self):
return self._updated_at
@updated_at.setter
def updated_at(self, value):
self._updated_at = convert_time(value)
@property
def updated_at_iso(self):
return self._updated_at.isoformat()
def AsDict(self, dt=True):
"""
A dict representation of this Shake instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this Shake instance
"""
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.owner:
data['owner'] = self.owner.AsDict()
if self.url:
data['url'] = self.url
if self.thumbnail_url:
data['thumbnail_url'] = self.thumbnail_url
if self.description:
data['description'] = self.description
if self.type:
data['type'] = self.type
if dt:
if self.created_at:
data['created_at'] = self.created_at
if self.updated_at:
data['updated_at'] = self.updated_at
else:
if self.created_at:
data['created_at'] = self.created_at_iso
if self.updated_at:
data['updated_at'] = self.updated_at_iso
return data
def AsJsonString(self):
"""
A JSON string representation of this Shake instance.
Returns:
A JSON string representation of this Shake instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.owner == other.owner and \
self.url == other.url and \
self.thumbnail_url == other.thumbnail_url and \
self.description == other.description and \
self.type == other.type and \
self.created_at == other.created_at and \
self.updated_at == other.updated_at
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
""" String representation of this Shake instance. """
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | SharedFile.AsDict | python | def AsDict(self, dt=True):
data = {}
if self.sharekey:
data['sharekey'] = self.sharekey
if self.name:
data['name'] = self.name
if self.user:
data['user'] = self.user.AsDict()
if self.title:
data['title'] = self.title
if self.description:
data['description'] = self.description
if self.posted_at:
if dt:
data['posted_at'] = self.posted_at
else:
data['posted_at'] = self.posted_at_iso
if self.permalink:
data['permalink'] = self.permalink
if self.width:
data['width'] = self.width
if self.height:
data['height'] = self.height
if self.image_url:
data['image_url'] = self.image_url
if self.source_url:
data['source_url'] = self.source_url
data['views'] = self.views
data['likes'] = self.likes
data['saves'] = self.saves
data['comments'] = self.comments
data['nsfw'] = self.nsfw
data['saved'] = self.saved
data['liked'] = self.liked
return data | A dict representation of this Shake instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this Shake instance | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L462-L510 | null | class SharedFile(object):
"""
A class representing a file shared on MLKSHK.
Exposes the following properties of a sharedfile:
sharedfile.sharekey
sharedfile.name
sharedfile.user
sharedfile.title
sharedfile.description
sharedfile.posted_at
sharedfile.permalink
sharedfile.width
sharedfile.height
sharedfile.views
sharedfile.likes
sharedfile.saves
sharedfile.comments
sharedfile.nsfw
sharedfile.image_url
sharedfile.source_url
sharedfile.saved
sharedfile.liked
Args:
sharedfile.sharekey
sharedfile.name
sharedfile.user
sharedfile.title
sharedfile.description
sharedfile.posted_at
sharedfile.permalink
sharedfile.width
sharedfile.height
sharedfile.views
sharedfile.likes
sharedfile.saves
sharedfile.comments
sharedfile.nsfw
sharedfile.image_url
sharedfile.source_url
sharedfile.saved
sharedfile.liked
"""
def __init__(self, *args, **kwargs):
param_defaults = {
'sharekey': None,
'name': None,
'user': None,
'title': None,
'description': None,
'posted_at': None,
'permalink': None,
'width': None,
'height': None,
'views': None,
'likes': None,
'saves': None,
'comments': None,
'nsfw': None,
'image_url': None,
'source_url': None,
'saved': None,
'liked': None,
}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
self.posted_at = kwargs.get('posted_at', None)
@property
def posted_at(self):
return self._posted_at
@posted_at.setter
def posted_at(self, value):
self._posted_at = convert_time(value)
@property
def posted_at_iso(self):
return self._posted_at.isoformat()
def AsJsonString(self):
"""
A JSON string representation of this SharedFile instance.
Returns:
A JSON string representation of this SharedFile instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def NewFromJSON(data):
"""
Create a new SharedFile instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a SharedFile.
Returns:
A SharedFile instance.
"""
return SharedFile(
sharekey=data.get('sharekey', None),
name=data.get('name', None),
user=User.NewFromJSON(data.get('user', None)),
title=data.get('title', None),
description=data.get('description', None),
posted_at=data.get('posted_at', None),
permalink=data.get('permalink', None),
width=data.get('width', None),
height=data.get('height', None),
views=data.get('views', 0),
likes=data.get('likes', 0),
saves=data.get('saves', 0),
comments=data.get('comments', None),
nsfw=data.get('nsfw', False),
image_url=data.get('image_url', None),
source_url=data.get('source_url', None),
saved=data.get('saved', False),
liked=data.get('liked', False),
)
def __eq__(self, other):
"""
Compare two SharedFiles on all attributes **except** saved status
and liked status.
"""
try:
return other and \
self.sharekey == other.sharekey and \
self.name == other.name and \
self.user == other.user and \
self.title == other.title and \
self.description == other.description and \
self.posted_at == other.posted_at and \
self.permalink == other.permalink and \
self.width == other.width and \
self.height == other.height and \
self.views == other.views and \
self.likes == other.likes and \
self.saves == other.saves and \
self.comments == other.comments and \
self.nsfw == other.nsfw and \
self.image_url == other.image_url and \
self.source_url == other.source_url
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return self.AsJsonString()
|
jeremylow/pyshk | pyshk/models.py | SharedFile.NewFromJSON | python | def NewFromJSON(data):
return SharedFile(
sharekey=data.get('sharekey', None),
name=data.get('name', None),
user=User.NewFromJSON(data.get('user', None)),
title=data.get('title', None),
description=data.get('description', None),
posted_at=data.get('posted_at', None),
permalink=data.get('permalink', None),
width=data.get('width', None),
height=data.get('height', None),
views=data.get('views', 0),
likes=data.get('likes', 0),
saves=data.get('saves', 0),
comments=data.get('comments', None),
nsfw=data.get('nsfw', False),
image_url=data.get('image_url', None),
source_url=data.get('source_url', None),
saved=data.get('saved', False),
liked=data.get('liked', False),
) | Create a new SharedFile instance from a JSON dict.
Args:
data (dict): JSON dictionary representing a SharedFile.
Returns:
A SharedFile instance. | train | https://github.com/jeremylow/pyshk/blob/3ab92f6706397cde7a18367266eba9e0f1ada868/pyshk/models.py#L522-L551 | [
"def NewFromJSON(data):\n \"\"\"\n Create a new User instance from a JSON dict.\n\n Args:\n data (dict): JSON dictionary representing a user.\n\n Returns:\n A User instance.\n \"\"\"\n if data.get('shakes', None):\n shakes = [Shake.NewFromJSON(shk) for shk in data.get('shakes')]\n else:\n shakes = None\n\n return User(\n id=data.get('id', None),\n name=data.get('name', None),\n profile_image_url=data.get('profile_image_url', None),\n about=data.get('about', None),\n website=data.get('website', None),\n shakes=shakes)\n"
] | class SharedFile(object):
"""
A class representing a file shared on MLKSHK.
Exposes the following properties of a sharedfile:
sharedfile.sharekey
sharedfile.name
sharedfile.user
sharedfile.title
sharedfile.description
sharedfile.posted_at
sharedfile.permalink
sharedfile.width
sharedfile.height
sharedfile.views
sharedfile.likes
sharedfile.saves
sharedfile.comments
sharedfile.nsfw
sharedfile.image_url
sharedfile.source_url
sharedfile.saved
sharedfile.liked
Args:
sharedfile.sharekey
sharedfile.name
sharedfile.user
sharedfile.title
sharedfile.description
sharedfile.posted_at
sharedfile.permalink
sharedfile.width
sharedfile.height
sharedfile.views
sharedfile.likes
sharedfile.saves
sharedfile.comments
sharedfile.nsfw
sharedfile.image_url
sharedfile.source_url
sharedfile.saved
sharedfile.liked
"""
def __init__(self, *args, **kwargs):
param_defaults = {
'sharekey': None,
'name': None,
'user': None,
'title': None,
'description': None,
'posted_at': None,
'permalink': None,
'width': None,
'height': None,
'views': None,
'likes': None,
'saves': None,
'comments': None,
'nsfw': None,
'image_url': None,
'source_url': None,
'saved': None,
'liked': None,
}
for (param, default) in param_defaults.items():
setattr(self, param, kwargs.get(param, default))
self.posted_at = kwargs.get('posted_at', None)
@property
def posted_at(self):
return self._posted_at
@posted_at.setter
def posted_at(self, value):
self._posted_at = convert_time(value)
@property
def posted_at_iso(self):
return self._posted_at.isoformat()
def AsDict(self, dt=True):
"""
A dict representation of this Shake instance.
The return value uses the same key names as the JSON representation.
Args:
dt (bool): If True, return dates as python datetime objects. If
False, return dates as ISO strings.
Return:
A dict representing this Shake instance
"""
data = {}
if self.sharekey:
data['sharekey'] = self.sharekey
if self.name:
data['name'] = self.name
if self.user:
data['user'] = self.user.AsDict()
if self.title:
data['title'] = self.title
if self.description:
data['description'] = self.description
if self.posted_at:
if dt:
data['posted_at'] = self.posted_at
else:
data['posted_at'] = self.posted_at_iso
if self.permalink:
data['permalink'] = self.permalink
if self.width:
data['width'] = self.width
if self.height:
data['height'] = self.height
if self.image_url:
data['image_url'] = self.image_url
if self.source_url:
data['source_url'] = self.source_url
data['views'] = self.views
data['likes'] = self.likes
data['saves'] = self.saves
data['comments'] = self.comments
data['nsfw'] = self.nsfw
data['saved'] = self.saved
data['liked'] = self.liked
return data
def AsJsonString(self):
"""
A JSON string representation of this SharedFile instance.
Returns:
A JSON string representation of this SharedFile instance
"""
return json.dumps(self.AsDict(dt=False), sort_keys=True)
@staticmethod
def __eq__(self, other):
"""
Compare two SharedFiles on all attributes **except** saved status
and liked status.
"""
try:
return other and \
self.sharekey == other.sharekey and \
self.name == other.name and \
self.user == other.user and \
self.title == other.title and \
self.description == other.description and \
self.posted_at == other.posted_at and \
self.permalink == other.permalink and \
self.width == other.width and \
self.height == other.height and \
self.views == other.views and \
self.likes == other.likes and \
self.saves == other.saves and \
self.comments == other.comments and \
self.nsfw == other.nsfw and \
self.image_url == other.image_url and \
self.source_url == other.source_url
except AttributeError:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return self.AsJsonString()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | manage_file_analysis | python | def manage_file_analysis(args: argparse.Namespace, filename: str, data: object) -> None:
key = DataStore.hashfile(filename)
print('Analyzing {} --> {}'.format(filename, key))
if data.check_key(key): # if exists in database, prepopulate
fit = LineFit(filename, data=data.get_data(key))
else:
fit = LineFit(filename)
if args.time:
noise, curvature, rnge, domn = fit.analyze(time=args.time)
newrow = [args.time, noise, curvature,
rnge, domn, fit.accepts[args.time]]
data.update1(key, newrow, len(fit.noises))
else:
fit.analyze_full()
newrows = np.array([range(len(fit.noises)), fit.noises,
fit.curves, fit.ranges, fit.domains, fit.accepts])
data.update(key, newrows)
data.save() | Take care of the analysis of a datafile | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L79-L99 | null | #!/usr/bin/env python3.5
"""
PACE
TODO:
* model training/testing
* more models (technically)
* multithreading
"""
import sys
import os
import argparse
import hashlib
import typing
from enforce import runtime_validation as types
from tqdm import tqdm
import numpy as np
import numpy.linalg as linalg
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import scipy.integrate as si
import scipy.io as sco
import sklearn as sk
from sklearn import svm
from sklearn import preprocessing
from sklearn import neighbors
DATASTORE = 'linefitdata.mat'
HEADER = (' ____ _ ____ _____\n'
'| _ \ / \ / ___| ____|\n'
'| |_) / _ \| | | _|\n'
'| __/ ___ \ |___| |___\n'
'|_| /_/ \_\____|_____|\n\n'
'PACE: Parameterization & Analysis of Conduit Edges\n'
'William Farmer - 2015\n')
def main():
args = get_args()
data = DataStore(DATASTORE)
data.load()
# Establish directory for img outputs
if not os.path.exists('./img'):
os.makedirs('./img')
if args.plot:
for filename in args.files:
print('Plotting ' + filename)
plot_name = './img/' + filename + '.general_fit.png'
fit = LineFit(filename)
fit.plot_file(name=plot_name, time=args.time)
if args.analyze:
for filename in args.files:
manage_file_analysis(args, filename, data)
if args.plotdata:
data.plot_traindata()
if args.machinetest:
learner = ML(algo=args.model)
if args.printdata:
data.printdata()
if args.printdatashort:
data.printshort()
@types
class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
class ML(object):
def __init__(self, args: argparse.Namespace, algo: str='nn'):
"""
Machine Learning to determine usability of data....
"""
self.algo = self.get_algo(args, algo)
def get_algo(self, args: argparse.Namespace, algo: str) -> object:
""" Returns machine learning algorithm based on arguments """
if algo == 'nn':
return NearestNeighbor(args.nnk)
def train(self) -> None:
""" Trains specified algorithm """
traindata = self.get_data()
self.algo.train(traindata)
def get_data(self) -> np.ndarray:
"""
Gets data for training
We use the domain column to determine what fields have been filled out
If the domain is zero (i.e. not in error) than we should probably ignore it anyway
"""
traindata = data.get_traindata()
return traindata
def plot_fitspace(self, name: str, X: np.ndarray, y: np.ndarray, clf: object) -> None:
""" Plot 2dplane of fitspace """
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
h = 0.01 # Mesh step size
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.savefig(name)
class NearestNeighbor(object):
def __init__(self, k: int):
"""
An example machine learning model. EVERY MODEL NEEDS TO PROVIDE:
1. Train
2. Predict
"""
self.clf = neighbors.KNeighborsClassifier(k, weights='distance',
p=2, algorithm='auto',
n_jobs=8)
def train(self, traindata: np.ndarray) -> None:
""" Trains on dataset """
self.clf.fit(traindata[:, 1:5], traindata[:, 5])
def predict(self, predictdata: np.ndarray) -> np.ndarray:
""" predict given points """
return self.clf.predict(predictdata)
def get_args() -> argparse.Namespace:
"""
Get program arguments.
Just use --help....
"""
parser = argparse.ArgumentParser(prog='python3 linefit.py',
description=('Parameterize and analyze '
'usability of conduit edge data'))
parser.add_argument('files', metavar='F', type=str, nargs='*',
help=('File(s) for processing. '
'Each file has a specific format: '
'See README (or header) for specification.'))
parser.add_argument('-p', '--plot', action='store_true', default=False,
help=('Create Plot of file(s)? Note, unless --time flag used, '
'will plot middle time.'))
parser.add_argument('-pd', '--plotdata', action='store_true', default=False,
help='Create plot of current datastore.')
parser.add_argument('-a', '--analyze', action='store_true', default=False,
help=('Analyze the file and determine Curvature/Noise parameters. '
'If --time not specified, will examine entire file. '
'This will add results to datastore with false flags '
'in accept field if not provided.'))
parser.add_argument('-mt', '--machinetest', action='store_true', default=False,
help=('Determine if the times from the file are usable based on '
'supervised learning model. If --time not specified, '
'will examine entire file.'))
parser.add_argument('-m', '--model', type=str, default='nn',
help=('Learning Model to use. Options are ["nn", "svm", "forest", "sgd"]'))
parser.add_argument('-nnk', '--nnk', type=int, default=10,
help=('k-Parameter for k nearest neighbors. Google it.'))
parser.add_argument('-t', '--time', type=int, default=None,
help=('Time (column) of data to use for analysis OR plotting. '
'Zero-Indexed'))
parser.add_argument('-d', '--datastore', type=str, default=DATASTORE,
help=("Datastore filename override. "
"Don't do this unless you know what you're doing"))
parser.add_argument('-pds', '--printdata', action='store_true', default=False,
help=("Print data"))
parser.add_argument('-pdss', '--printdatashort', action='store_true', default=False,
help=("Print data short"))
args = parser.parse_args()
return args
if __name__ == '__main__':
sys.exit(main())
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | get_args | python | def get_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(prog='python3 linefit.py',
description=('Parameterize and analyze '
'usability of conduit edge data'))
parser.add_argument('files', metavar='F', type=str, nargs='*',
help=('File(s) for processing. '
'Each file has a specific format: '
'See README (or header) for specification.'))
parser.add_argument('-p', '--plot', action='store_true', default=False,
help=('Create Plot of file(s)? Note, unless --time flag used, '
'will plot middle time.'))
parser.add_argument('-pd', '--plotdata', action='store_true', default=False,
help='Create plot of current datastore.')
parser.add_argument('-a', '--analyze', action='store_true', default=False,
help=('Analyze the file and determine Curvature/Noise parameters. '
'If --time not specified, will examine entire file. '
'This will add results to datastore with false flags '
'in accept field if not provided.'))
parser.add_argument('-mt', '--machinetest', action='store_true', default=False,
help=('Determine if the times from the file are usable based on '
'supervised learning model. If --time not specified, '
'will examine entire file.'))
parser.add_argument('-m', '--model', type=str, default='nn',
help=('Learning Model to use. Options are ["nn", "svm", "forest", "sgd"]'))
parser.add_argument('-nnk', '--nnk', type=int, default=10,
help=('k-Parameter for k nearest neighbors. Google it.'))
parser.add_argument('-t', '--time', type=int, default=None,
help=('Time (column) of data to use for analysis OR plotting. '
'Zero-Indexed'))
parser.add_argument('-d', '--datastore', type=str, default=DATASTORE,
help=("Datastore filename override. "
"Don't do this unless you know what you're doing"))
parser.add_argument('-pds', '--printdata', action='store_true', default=False,
help=("Print data"))
parser.add_argument('-pdss', '--printdatashort', action='store_true', default=False,
help=("Print data short"))
args = parser.parse_args()
return args | Get program arguments.
Just use --help.... | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L517-L559 | null | #!/usr/bin/env python3.5
"""
PACE
TODO:
* model training/testing
* more models (technically)
* multithreading
"""
import sys
import os
import argparse
import hashlib
import typing
from enforce import runtime_validation as types
from tqdm import tqdm
import numpy as np
import numpy.linalg as linalg
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import scipy.integrate as si
import scipy.io as sco
import sklearn as sk
from sklearn import svm
from sklearn import preprocessing
from sklearn import neighbors
DATASTORE = 'linefitdata.mat'
HEADER = (' ____ _ ____ _____\n'
'| _ \ / \ / ___| ____|\n'
'| |_) / _ \| | | _|\n'
'| __/ ___ \ |___| |___\n'
'|_| /_/ \_\____|_____|\n\n'
'PACE: Parameterization & Analysis of Conduit Edges\n'
'William Farmer - 2015\n')
def main():
args = get_args()
data = DataStore(DATASTORE)
data.load()
# Establish directory for img outputs
if not os.path.exists('./img'):
os.makedirs('./img')
if args.plot:
for filename in args.files:
print('Plotting ' + filename)
plot_name = './img/' + filename + '.general_fit.png'
fit = LineFit(filename)
fit.plot_file(name=plot_name, time=args.time)
if args.analyze:
for filename in args.files:
manage_file_analysis(args, filename, data)
if args.plotdata:
data.plot_traindata()
if args.machinetest:
learner = ML(algo=args.model)
if args.printdata:
data.printdata()
if args.printdatashort:
data.printshort()
@types
def manage_file_analysis(args: argparse.Namespace, filename: str, data: object) -> None:
"""
Take care of the analysis of a datafile
"""
key = DataStore.hashfile(filename)
print('Analyzing {} --> {}'.format(filename, key))
if data.check_key(key): # if exists in database, prepopulate
fit = LineFit(filename, data=data.get_data(key))
else:
fit = LineFit(filename)
if args.time:
noise, curvature, rnge, domn = fit.analyze(time=args.time)
newrow = [args.time, noise, curvature,
rnge, domn, fit.accepts[args.time]]
data.update1(key, newrow, len(fit.noises))
else:
fit.analyze_full()
newrows = np.array([range(len(fit.noises)), fit.noises,
fit.curves, fit.ranges, fit.domains, fit.accepts])
data.update(key, newrows)
data.save()
class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
class ML(object):
def __init__(self, args: argparse.Namespace, algo: str='nn'):
"""
Machine Learning to determine usability of data....
"""
self.algo = self.get_algo(args, algo)
def get_algo(self, args: argparse.Namespace, algo: str) -> object:
""" Returns machine learning algorithm based on arguments """
if algo == 'nn':
return NearestNeighbor(args.nnk)
def train(self) -> None:
""" Trains specified algorithm """
traindata = self.get_data()
self.algo.train(traindata)
def get_data(self) -> np.ndarray:
"""
Gets data for training
We use the domain column to determine what fields have been filled out
If the domain is zero (i.e. not in error) than we should probably ignore it anyway
"""
traindata = data.get_traindata()
return traindata
def plot_fitspace(self, name: str, X: np.ndarray, y: np.ndarray, clf: object) -> None:
""" Plot 2dplane of fitspace """
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
h = 0.01 # Mesh step size
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.savefig(name)
class NearestNeighbor(object):
def __init__(self, k: int):
"""
An example machine learning model. EVERY MODEL NEEDS TO PROVIDE:
1. Train
2. Predict
"""
self.clf = neighbors.KNeighborsClassifier(k, weights='distance',
p=2, algorithm='auto',
n_jobs=8)
def train(self, traindata: np.ndarray) -> None:
""" Trains on dataset """
self.clf.fit(traindata[:, 1:5], traindata[:, 5])
def predict(self, predictdata: np.ndarray) -> np.ndarray:
""" predict given points """
return self.clf.predict(predictdata)
if __name__ == '__main__':
sys.exit(main())
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.get_keys | python | def get_keys(self) -> typing.List[str]:
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys | Return list of SHA512 hash keys that exist in datafile
:return: list of keys | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L151-L161 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.check_key | python | def check_key(self, key: str) -> bool:
keys = self.get_keys()
return key in keys | Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L164-L173 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.get_traindata | python | def get_traindata(self) -> np.ndarray:
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata | Pulls all available data and concatenates for model training
:return: 2d array of points | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L175-L188 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.plot_traindata | python | def plot_traindata(self, name: str='dataplot') -> None:
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name)) | Plots traindata.... choo choo... | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L191-L203 | [
"def get_traindata(self) -> np.ndarray:\n \"\"\"\n Pulls all available data and concatenates for model training\n\n :return: 2d array of points\n \"\"\"\n traindata = None\n for key, value in self.data.items():\n if key not in ['__header__', '__version__', '__globals__']:\n if traindata is None:\n traindata = value[np.where(value[:, 4] != 0)]\n else:\n traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))\n return traindata\n"
] | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.printdata | python | def printdata(self) -> None:
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000) | Prints data to stdout | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L205-L209 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.update | python | def update(self, key: str, data: np.ndarray) -> None:
self.data[key] = data | Update entry in datastore | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L216-L218 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.update1 | python | def update1(self, key: str, data: np.ndarray, size: int) -> None:
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata | Update one entry in specific record in datastore | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L220-L228 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
@staticmethod
@types
def hashfile(name: str) -> str:
"""
Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe)
"""
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest()
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | DataStore.hashfile | python | def hashfile(name: str) -> str:
hasher = hashlib.sha512()
with open(name, 'rb') as openfile:
for chunk in iter(lambda: openfile.read(4096), b''):
hasher.update(chunk)
return hasher.hexdigest() | Gets a hash of a file using block parsing
http://stackoverflow.com/questions/3431825/generating-a-md5-checksum-of-a-file
Using SHA512 for long-term support (hehehehe) | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L232-L243 | null | class DataStore(object):
def __init__(self, name: str):
"""
Uses a .mat as datastore for compatibility.
Eventually may want to switch to SQLite, or some database? Not sure if
ever needed. This class provides that extensible API structure however.
Datafile has the following structure:
learning_data = {filehash:[[trial_index, noise, curvature,
range, domain, accept, viscosity]
,...],...}
Conveniently, you can use the domain field as a check as to whether or
not the row has been touched. If domain=0 (for that row) then that
means that it hasn't been updated.
:param: name of datastore
"""
self.name = name
self.data = {}
def load(self) -> None:
"""
Load datafile
"""
try:
self.data = sco.loadmat(self.name)
except FileNotFoundError:
pass
def save(self) -> None:
"""
Save datafile to disk
"""
sco.savemat(self.name, self.data)
def get_data(self, key: str) -> np.ndarray:
"""
Returns the specified data. Warning, ZERO ERROR HANDLING
:param key: name of file
:return: 2d data array
"""
return self.data[key]
@types
def get_keys(self) -> typing.List[str]:
"""
Return list of SHA512 hash keys that exist in datafile
:return: list of keys
"""
keys = []
for key in self.data.keys():
if key not in ['__header__', '__version__', '__globals__']:
keys.append(key)
return keys
@types
def check_key(self, key: str) -> bool:
"""
Checks if key exists in datastore. True if yes, False if no.
:param: SHA512 hash key
:return: whether or key not exists in datastore
"""
keys = self.get_keys()
return key in keys
def get_traindata(self) -> np.ndarray:
"""
Pulls all available data and concatenates for model training
:return: 2d array of points
"""
traindata = None
for key, value in self.data.items():
if key not in ['__header__', '__version__', '__globals__']:
if traindata is None:
traindata = value[np.where(value[:, 4] != 0)]
else:
traindata = np.concatenate((traindata, value[np.where(value[:, 4] != 0)]))
return traindata
@types
def plot_traindata(self, name: str='dataplot') -> None:
"""
Plots traindata.... choo choo...
"""
traindata = self.get_traindata()
plt.figure(figsize=(16, 16))
plt.scatter(traindata[:, 1], traindata[:, 2],
c=traindata[:, 5], marker='o', label='Datastore Points')
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.legend(loc=2, fontsize='xx-large')
plt.savefig('./img/{}.png'.format(name))
def printdata(self) -> None:
""" Prints data to stdout """
np.set_printoptions(threshold=np.nan)
print(self.data)
np.set_printoptions(threshold=1000)
def printshort(self) -> None:
""" Print shortened version of data to stdout"""
print(self.data)
@types
def update(self, key: str, data: np.ndarray) -> None:
""" Update entry in datastore """
self.data[key] = data
def update1(self, key: str, data: np.ndarray, size: int) -> None:
""" Update one entry in specific record in datastore """
print(data)
if key in self.get_keys():
self.data[key][data[0]] = data
else:
newdata = np.zeros((size, 6))
newdata[data[0]] = data
self.data[key] = newdata
@staticmethod
@types
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit._loadedges | python | def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity | Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L275-L308 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit.plot_file | python | def plot_file(self, name: str=None, time: int=None) -> None:
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name) | Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L310-L326 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit._gaussian_function | python | def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2) | i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L339-L352 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit._get_fit | python | def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error | Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L355-L378 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit._get_noise | python | def _get_noise(self, residuals: np.ndarray) -> float:
return np.mean(np.abs(residuals)) | Determine Noise of Residuals.
:param: residuals
:return: noise | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L381-L389 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit.analyze | python | def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time] | Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L392-L420 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
@types
def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
"""
Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains
"""
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | LineFit.analyze_full | python | def analyze_full(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
if self.noises[0] == 0:
timelength = len(self.times)
for i in tqdm(range(timelength)):
self.analyze(time=i)
return self.noises, self.curves, self.ranges, self.domains | Determine noise, curvature, range, and domain of specified data.
Like analyze, except examines the entire file.
:param: float->pixel to inch ratio
:return: array->curvatures
:return: array->noises
:return: array->ranges
:return: array->domains | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L423-L439 | null | class LineFit(object):
def __init__(self, filename: str, data: np.ndarray=None,
function_number: int=16, spread_number: int=22):
"""
Main class for line fitting and parameter determination
:param: filename
:param: data for fitting
:param: number of functions
:param: gaussian spread number
"""
self.filename = filename
(self.averagedata, self.times,
self.accepts, self.ratio, self.viscosity) = self._loadedges()
self.domain = np.arange(len(self.averagedata[:, 0]))
self.function_number = function_number
self.spread_number = spread_number
if data is None:
self.noises = np.zeros(len(self.times))
self.curves = np.zeros(len(self.times))
self.ranges = np.zeros(len(self.times))
self.domains = np.zeros(len(self.times))
else:
self.noises = data[:, 1]
self.curves = data[:, 2]
self.ranges = data[:, 3]
self.domains = data[:, 4]
@types
def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:
"""
Attempts to intelligently load the .mat file and take average of left and right edges
:return: left and right averages
:return: times for each column
:return: accept/reject for each column
:return: pixel-inch ratio
"""
data = sco.loadmat(self.filename)
datakeys = [k for k in data.keys()
if ('right' in k) or ('left' in k) or ('edge' in k)]
averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)
try:
times = (data['times'] - data['times'].min())[0]
except KeyError:
times = np.arange(len(data[datakeys[0]][0]))
try:
accept = data['accept']
except KeyError:
accept = np.zeros(len(times))
try:
ratio = data['ratio']
except KeyError:
ratio = 1
try:
viscosity = data['viscosity']
except KeyError:
viscosity = np.ones(len(times))
return averagedata, times, accept, ratio, viscosity
def plot_file(self, name: str=None, time: int=None) -> None:
"""
Plot specific time for provided datafile.
If no time provided, will plot middle.
:param: savefile name
:param: time/data column
"""
if not time:
time = int(len(self.times) / 2)
if not name:
name = './img/' + self.filename + '.png'
yhat, residuals, residual_mean, noise = self._get_fit(time)
plt.figure()
plt.scatter(self.domain, self.averagedata[:, time], alpha=0.2)
plt.plot(yhat)
plt.savefig(name)
@staticmethod
@types
def ddiff(arr: np.ndarray) -> np.ndarray:
"""
Helper Function: Divided Differences
input: array
"""
return arr[:-1] - arr[1:]
@types
def _gaussian_function(self, datalength: int, values: np.ndarray,
height: int, index: int) -> np.ndarray:
"""
i'th Regression Model Gaussian
:param: len(x)
:param: x values
:param: height of gaussian
:param: position of gaussian
:return: gaussian bumps over domain
"""
return height * np.exp(-(1 / (self.spread_number * datalength)) *
(values - ((datalength / self.function_number) * index)) ** 2)
@types
def _get_fit(self, time: int) -> typing.Tuple[np.ndarray, np.ndarray, float, float]:
"""
Fit regression model to data
:param: time (column of data)
:return: predicted points
:return: residuals
:return: mean residual
:return: error
"""
rawdata = self.averagedata[:, time]
domain = np.arange(len(rawdata))
datalength = len(domain)
coefficients = np.zeros((datalength, self.function_number + 2))
coefficients[:, 0] = 1
coefficients[:, 1] = domain
for i in range(self.function_number):
coefficients[:, 2 + i] = self._gaussian_function(datalength, domain, 1, i)
betas = linalg.inv(coefficients.transpose().dot(coefficients)).dot(coefficients.transpose().dot(rawdata))
predicted_values = coefficients.dot(betas)
residuals = rawdata - predicted_values
error = np.sqrt(residuals.transpose().dot(residuals) / (datalength - (self.function_number + 2)))
return predicted_values, residuals, residuals.mean(), error
@types
def _get_noise(self, residuals: np.ndarray) -> float:
"""
Determine Noise of Residuals.
:param: residuals
:return: noise
"""
return np.mean(np.abs(residuals))
@types
def analyze(self, time: int=None) -> typing.Tuple[float, float, int, int]:
"""
Determine noise, curvature, range, and domain of specified array.
:param: pixel to inch ratio
:param: time (column) to use.
:return: curvature
:return: noise
:return: range
:return: domain
"""
if not time:
time = int(len(self.times) / 2)
if self.domains[time] == 0:
yhat, residuals, mean_residual, error = self._get_fit(time)
yhat_p = self.ddiff(yhat)
yhat_pp = self.ddiff(yhat_p)
noise = self._get_noise(residuals)
curvature = (1 / self.ratio) * (1 / len(yhat_pp)) * np.sqrt(si.simps(yhat_pp ** 2))
rng = (self.ratio * (np.max(self.averagedata[:, time]) -
np.min(self.averagedata[:, time])))
dmn = self.ratio * len(self.averagedata[:, time])
self.noises[time] = np.log10(noise)
self.curves[time] = np.log10(curvature)
self.ranges[time] = np.log10(rng)
self.domains[time] = np.log10(dmn)
return self.noises[time], self.curves[time], self.ranges[time], self.domains[time]
@types
|
Dispersive-Hydrodynamics-Lab/PACE | PACE/PACE.py | ML.get_algo | python | def get_algo(self, args: argparse.Namespace, algo: str) -> object:
if algo == 'nn':
return NearestNeighbor(args.nnk) | Returns machine learning algorithm based on arguments | train | https://github.com/Dispersive-Hydrodynamics-Lab/PACE/blob/4ce27d5fc9b02cc2ce55f6fea7fc8d6015317e1f/PACE/PACE.py#L449-L452 | null | class ML(object):
def __init__(self, args: argparse.Namespace, algo: str='nn'):
"""
Machine Learning to determine usability of data....
"""
self.algo = self.get_algo(args, algo)
def train(self) -> None:
""" Trains specified algorithm """
traindata = self.get_data()
self.algo.train(traindata)
def get_data(self) -> np.ndarray:
"""
Gets data for training
We use the domain column to determine what fields have been filled out
If the domain is zero (i.e. not in error) than we should probably ignore it anyway
"""
traindata = data.get_traindata()
return traindata
def plot_fitspace(self, name: str, X: np.ndarray, y: np.ndarray, clf: object) -> None:
""" Plot 2dplane of fitspace """
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
h = 0.01 # Mesh step size
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.xlabel(r'$\log_{10}$ Noise')
plt.ylabel(r'$\log_{10}$ Curvature')
plt.savefig(name)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.