body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
|---|---|---|---|---|---|---|---|
def _der(self, x):
'\n Returns the first derivative of the function at each value in x. Only\n called internally by HARKinterpolator1D.derivative.\n '
(y, dydx) = self.eval_with_derivative(x)
return dydx
| -4,543,842,455,542,227,500
|
Returns the first derivative of the function at each value in x. Only
called internally by HARKinterpolator1D.derivative.
|
HARK/interpolation.py
|
_der
|
cohenimhuji/HARK
|
python
|
def _der(self, x):
'\n Returns the first derivative of the function at each value in x. Only\n called internally by HARKinterpolator1D.derivative.\n '
(y, dydx) = self.eval_with_derivative(x)
return dydx
|
def _evalAndDer(self, x):
'\n Returns the level and first derivative of the function at each value in\n x. Only called internally by HARKinterpolator1D.eval_and_der.\n '
m = len(x)
fx = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
fx[:, j] = self.functions[j](x)
fx[np.isnan(fx)] = np.inf
i = np.argmin(fx, axis=1)
y = fx[(np.arange(m), i)]
dydx = np.zeros_like(y)
for j in range(self.funcCount):
c = (i == j)
dydx[c] = self.functions[j].derivative(x[c])
return (y, dydx)
| 89,318,326,350,410,140
|
Returns the level and first derivative of the function at each value in
x. Only called internally by HARKinterpolator1D.eval_and_der.
|
HARK/interpolation.py
|
_evalAndDer
|
cohenimhuji/HARK
|
python
|
def _evalAndDer(self, x):
'\n Returns the level and first derivative of the function at each value in\n x. Only called internally by HARKinterpolator1D.eval_and_der.\n '
m = len(x)
fx = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
fx[:, j] = self.functions[j](x)
fx[np.isnan(fx)] = np.inf
i = np.argmin(fx, axis=1)
y = fx[(np.arange(m), i)]
dydx = np.zeros_like(y)
for j in range(self.funcCount):
c = (i == j)
dydx[c] = self.functions[j].derivative(x[c])
return (y, dydx)
|
def __init__(self, *functions):
'\n Constructor to make a new upper envelope iterpolation.\n\n Parameters\n ----------\n *functions : function\n Any number of real functions; often instances of HARKinterpolator1D\n\n Returns\n -------\n new instance of UpperEnvelope\n '
self.functions = []
for function in functions:
self.functions.append(function)
self.funcCount = len(self.functions)
| -3,742,434,272,132,200,000
|
Constructor to make a new upper envelope iterpolation.
Parameters
----------
*functions : function
Any number of real functions; often instances of HARKinterpolator1D
Returns
-------
new instance of UpperEnvelope
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, *functions):
'\n Constructor to make a new upper envelope iterpolation.\n\n Parameters\n ----------\n *functions : function\n Any number of real functions; often instances of HARKinterpolator1D\n\n Returns\n -------\n new instance of UpperEnvelope\n '
self.functions = []
for function in functions:
self.functions.append(function)
self.funcCount = len(self.functions)
|
def _evaluate(self, x):
'\n Returns the level of the function at each value in x as the maximum among\n all of the functions. Only called internally by HARKinterpolator1D.__call__.\n '
if _isscalar(x):
y = np.nanmax([f(x) for f in self.functions])
else:
m = len(x)
fx = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
fx[:, j] = self.functions[j](x)
y = np.nanmax(fx, axis=1)
return y
| 7,497,230,056,737,183,000
|
Returns the level of the function at each value in x as the maximum among
all of the functions. Only called internally by HARKinterpolator1D.__call__.
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x):
'\n Returns the level of the function at each value in x as the maximum among\n all of the functions. Only called internally by HARKinterpolator1D.__call__.\n '
if _isscalar(x):
y = np.nanmax([f(x) for f in self.functions])
else:
m = len(x)
fx = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
fx[:, j] = self.functions[j](x)
y = np.nanmax(fx, axis=1)
return y
|
def _der(self, x):
'\n Returns the first derivative of the function at each value in x. Only\n called internally by HARKinterpolator1D.derivative.\n '
(y, dydx) = self.eval_with_derivative(x)
return dydx
| -4,543,842,455,542,227,500
|
Returns the first derivative of the function at each value in x. Only
called internally by HARKinterpolator1D.derivative.
|
HARK/interpolation.py
|
_der
|
cohenimhuji/HARK
|
python
|
def _der(self, x):
'\n Returns the first derivative of the function at each value in x. Only\n called internally by HARKinterpolator1D.derivative.\n '
(y, dydx) = self.eval_with_derivative(x)
return dydx
|
def _evalAndDer(self, x):
'\n Returns the level and first derivative of the function at each value in\n x. Only called internally by HARKinterpolator1D.eval_and_der.\n '
m = len(x)
fx = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
fx[:, j] = self.functions[j](x)
fx[np.isnan(fx)] = np.inf
i = np.argmax(fx, axis=1)
y = fx[(np.arange(m), i)]
dydx = np.zeros_like(y)
for j in range(self.funcCount):
c = (i == j)
dydx[c] = self.functions[j].derivative(x[c])
return (y, dydx)
| 5,161,202,635,096,723,000
|
Returns the level and first derivative of the function at each value in
x. Only called internally by HARKinterpolator1D.eval_and_der.
|
HARK/interpolation.py
|
_evalAndDer
|
cohenimhuji/HARK
|
python
|
def _evalAndDer(self, x):
'\n Returns the level and first derivative of the function at each value in\n x. Only called internally by HARKinterpolator1D.eval_and_der.\n '
m = len(x)
fx = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
fx[:, j] = self.functions[j](x)
fx[np.isnan(fx)] = np.inf
i = np.argmax(fx, axis=1)
y = fx[(np.arange(m), i)]
dydx = np.zeros_like(y)
for j in range(self.funcCount):
c = (i == j)
dydx[c] = self.functions[j].derivative(x[c])
return (y, dydx)
|
def __init__(self, *functions):
'\n Constructor to make a new lower envelope iterpolation.\n\n Parameters\n ----------\n *functions : function\n Any number of real functions; often instances of HARKinterpolator2D\n\n Returns\n -------\n new instance of LowerEnvelope2D\n '
self.functions = []
for function in functions:
self.functions.append(function)
self.funcCount = len(self.functions)
| 1,982,686,540,821,018,600
|
Constructor to make a new lower envelope iterpolation.
Parameters
----------
*functions : function
Any number of real functions; often instances of HARKinterpolator2D
Returns
-------
new instance of LowerEnvelope2D
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, *functions):
'\n Constructor to make a new lower envelope iterpolation.\n\n Parameters\n ----------\n *functions : function\n Any number of real functions; often instances of HARKinterpolator2D\n\n Returns\n -------\n new instance of LowerEnvelope2D\n '
self.functions = []
for function in functions:
self.functions.append(function)
self.funcCount = len(self.functions)
|
def _evaluate(self, x, y):
'\n Returns the level of the function at each value in (x,y) as the minimum\n among all of the functions. Only called internally by\n HARKinterpolator2D.__call__.\n '
if _isscalar(x):
f = np.nanmin([f(x, y) for f in self.functions])
else:
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y)
f = np.nanmin(temp, axis=1)
return f
| 5,249,836,552,260,828,000
|
Returns the level of the function at each value in (x,y) as the minimum
among all of the functions. Only called internally by
HARKinterpolator2D.__call__.
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x, y):
'\n Returns the level of the function at each value in (x,y) as the minimum\n among all of the functions. Only called internally by\n HARKinterpolator2D.__call__.\n '
if _isscalar(x):
f = np.nanmin([f(x, y) for f in self.functions])
else:
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y)
f = np.nanmin(temp, axis=1)
return f
|
def _derX(self, x, y):
'\n Returns the first derivative of the function with respect to X at each\n value in (x,y). Only called internally by HARKinterpolator2D._derX.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
dfdx = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdx[c] = self.functions[j].derivativeX(x[c], y[c])
return dfdx
| -5,991,292,268,584,979,000
|
Returns the first derivative of the function with respect to X at each
value in (x,y). Only called internally by HARKinterpolator2D._derX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, x, y):
'\n Returns the first derivative of the function with respect to X at each\n value in (x,y). Only called internally by HARKinterpolator2D._derX.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
dfdx = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdx[c] = self.functions[j].derivativeX(x[c], y[c])
return dfdx
|
def _derY(self, x, y):
'\n Returns the first derivative of the function with respect to Y at each\n value in (x,y). Only called internally by HARKinterpolator2D._derY.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
y = temp[(np.arange(m), i)]
dfdy = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdy[c] = self.functions[j].derivativeY(x[c], y[c])
return dfdy
| -6,446,132,566,151,625,000
|
Returns the first derivative of the function with respect to Y at each
value in (x,y). Only called internally by HARKinterpolator2D._derY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, x, y):
'\n Returns the first derivative of the function with respect to Y at each\n value in (x,y). Only called internally by HARKinterpolator2D._derY.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
y = temp[(np.arange(m), i)]
dfdy = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdy[c] = self.functions[j].derivativeY(x[c], y[c])
return dfdy
|
def __init__(self, *functions):
'\n Constructor to make a new lower envelope iterpolation.\n\n Parameters\n ----------\n *functions : function\n Any number of real functions; often instances of HARKinterpolator3D\n\n Returns\n -------\n None\n '
self.functions = []
for function in functions:
self.functions.append(function)
self.funcCount = len(self.functions)
| 4,380,085,783,528,576,000
|
Constructor to make a new lower envelope iterpolation.
Parameters
----------
*functions : function
Any number of real functions; often instances of HARKinterpolator3D
Returns
-------
None
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, *functions):
'\n Constructor to make a new lower envelope iterpolation.\n\n Parameters\n ----------\n *functions : function\n Any number of real functions; often instances of HARKinterpolator3D\n\n Returns\n -------\n None\n '
self.functions = []
for function in functions:
self.functions.append(function)
self.funcCount = len(self.functions)
|
def _evaluate(self, x, y, z):
'\n Returns the level of the function at each value in (x,y,z) as the minimum\n among all of the functions. Only called internally by\n HARKinterpolator3D.__call__.\n '
if _isscalar(x):
f = np.nanmin([f(x, y, z) for f in self.functions])
else:
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
f = np.nanmin(temp, axis=1)
return f
| 6,856,665,904,065,716,000
|
Returns the level of the function at each value in (x,y,z) as the minimum
among all of the functions. Only called internally by
HARKinterpolator3D.__call__.
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x, y, z):
'\n Returns the level of the function at each value in (x,y,z) as the minimum\n among all of the functions. Only called internally by\n HARKinterpolator3D.__call__.\n '
if _isscalar(x):
f = np.nanmin([f(x, y, z) for f in self.functions])
else:
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
f = np.nanmin(temp, axis=1)
return f
|
def _derX(self, x, y, z):
'\n Returns the first derivative of the function with respect to X at each\n value in (x,y,z). Only called internally by HARKinterpolator3D._derX.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
dfdx = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdx[c] = self.functions[j].derivativeX(x[c], y[c], z[c])
return dfdx
| -411,142,791,819,318,200
|
Returns the first derivative of the function with respect to X at each
value in (x,y,z). Only called internally by HARKinterpolator3D._derX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, x, y, z):
'\n Returns the first derivative of the function with respect to X at each\n value in (x,y,z). Only called internally by HARKinterpolator3D._derX.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
dfdx = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdx[c] = self.functions[j].derivativeX(x[c], y[c], z[c])
return dfdx
|
def _derY(self, x, y, z):
'\n Returns the first derivative of the function with respect to Y at each\n value in (x,y,z). Only called internally by HARKinterpolator3D._derY.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
y = temp[(np.arange(m), i)]
dfdy = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdy[c] = self.functions[j].derivativeY(x[c], y[c], z[c])
return dfdy
| -4,448,192,538,913,105,000
|
Returns the first derivative of the function with respect to Y at each
value in (x,y,z). Only called internally by HARKinterpolator3D._derY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, x, y, z):
'\n Returns the first derivative of the function with respect to Y at each\n value in (x,y,z). Only called internally by HARKinterpolator3D._derY.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
y = temp[(np.arange(m), i)]
dfdy = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdy[c] = self.functions[j].derivativeY(x[c], y[c], z[c])
return dfdy
|
def _derZ(self, x, y, z):
'\n Returns the first derivative of the function with respect to Z at each\n value in (x,y,z). Only called internally by HARKinterpolator3D._derZ.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
y = temp[(np.arange(m), i)]
dfdz = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdz[c] = self.functions[j].derivativeZ(x[c], y[c], z[c])
return dfdz
| -8,711,043,215,877,459,000
|
Returns the first derivative of the function with respect to Z at each
value in (x,y,z). Only called internally by HARKinterpolator3D._derZ.
|
HARK/interpolation.py
|
_derZ
|
cohenimhuji/HARK
|
python
|
def _derZ(self, x, y, z):
'\n Returns the first derivative of the function with respect to Z at each\n value in (x,y,z). Only called internally by HARKinterpolator3D._derZ.\n '
m = len(x)
temp = np.zeros((m, self.funcCount))
for j in range(self.funcCount):
temp[:, j] = self.functions[j](x, y, z)
temp[np.isnan(temp)] = np.inf
i = np.argmin(temp, axis=1)
y = temp[(np.arange(m), i)]
dfdz = np.zeros_like(x)
for j in range(self.funcCount):
c = (i == j)
dfdz[c] = self.functions[j].derivativeZ(x[c], y[c], z[c])
return dfdz
|
def __init__(self, func, lowerBound):
'\n Make a new instance of VariableLowerBoundFunc2D.\n\n Parameters\n ----------\n func : function\n A function f: (R_+ x R) --> R representing the function of interest\n shifted by its lower bound in the first input.\n lowerBound : function\n The lower bound in the first input of the function of interest, as\n a function of the second input.\n\n Returns\n -------\n None\n '
self.func = func
self.lowerBound = lowerBound
| -7,535,546,025,031,904,000
|
Make a new instance of VariableLowerBoundFunc2D.
Parameters
----------
func : function
A function f: (R_+ x R) --> R representing the function of interest
shifted by its lower bound in the first input.
lowerBound : function
The lower bound in the first input of the function of interest, as
a function of the second input.
Returns
-------
None
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, func, lowerBound):
'\n Make a new instance of VariableLowerBoundFunc2D.\n\n Parameters\n ----------\n func : function\n A function f: (R_+ x R) --> R representing the function of interest\n shifted by its lower bound in the first input.\n lowerBound : function\n The lower bound in the first input of the function of interest, as\n a function of the second input.\n\n Returns\n -------\n None\n '
self.func = func
self.lowerBound = lowerBound
|
def __call__(self, x, y):
'\n Evaluate the function at given state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n\n Returns\n -------\n f_out : np.array\n Function evaluated at (x,y), of same shape as inputs.\n '
xShift = self.lowerBound(y)
f_out = self.func((x - xShift), y)
return f_out
| 2,563,630,424,569,989,000
|
Evaluate the function at given state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
Returns
-------
f_out : np.array
Function evaluated at (x,y), of same shape as inputs.
|
HARK/interpolation.py
|
__call__
|
cohenimhuji/HARK
|
python
|
def __call__(self, x, y):
'\n Evaluate the function at given state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n\n Returns\n -------\n f_out : np.array\n Function evaluated at (x,y), of same shape as inputs.\n '
xShift = self.lowerBound(y)
f_out = self.func((x - xShift), y)
return f_out
|
def derivativeX(self, x, y):
'\n Evaluate the first derivative with respect to x of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n\n Returns\n -------\n dfdx_out : np.array\n First derivative of function with respect to the first input,\n evaluated at (x,y), of same shape as inputs.\n '
xShift = self.lowerBound(y)
dfdx_out = self.func.derivativeX((x - xShift), y)
return dfdx_out
| 7,029,267,794,205,042,000
|
Evaluate the first derivative with respect to x of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
Returns
-------
dfdx_out : np.array
First derivative of function with respect to the first input,
evaluated at (x,y), of same shape as inputs.
|
HARK/interpolation.py
|
derivativeX
|
cohenimhuji/HARK
|
python
|
def derivativeX(self, x, y):
'\n Evaluate the first derivative with respect to x of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n\n Returns\n -------\n dfdx_out : np.array\n First derivative of function with respect to the first input,\n evaluated at (x,y), of same shape as inputs.\n '
xShift = self.lowerBound(y)
dfdx_out = self.func.derivativeX((x - xShift), y)
return dfdx_out
|
def derivativeY(self, x, y):
'\n Evaluate the first derivative with respect to y of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n\n Returns\n -------\n dfdy_out : np.array\n First derivative of function with respect to the second input,\n evaluated at (x,y), of same shape as inputs.\n '
(xShift, xShiftDer) = self.lowerBound.eval_with_derivative(y)
dfdy_out = (self.func.derivativeY((x - xShift), y) - (xShiftDer * self.func.derivativeX((x - xShift), y)))
return dfdy_out
| 543,617,472,426,047,800
|
Evaluate the first derivative with respect to y of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
Returns
-------
dfdy_out : np.array
First derivative of function with respect to the second input,
evaluated at (x,y), of same shape as inputs.
|
HARK/interpolation.py
|
derivativeY
|
cohenimhuji/HARK
|
python
|
def derivativeY(self, x, y):
'\n Evaluate the first derivative with respect to y of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n\n Returns\n -------\n dfdy_out : np.array\n First derivative of function with respect to the second input,\n evaluated at (x,y), of same shape as inputs.\n '
(xShift, xShiftDer) = self.lowerBound.eval_with_derivative(y)
dfdy_out = (self.func.derivativeY((x - xShift), y) - (xShiftDer * self.func.derivativeX((x - xShift), y)))
return dfdy_out
|
def __init__(self, func, lowerBound):
'\n Make a new instance of VariableLowerBoundFunc3D.\n\n Parameters\n ----------\n func : function\n A function f: (R_+ x R^2) --> R representing the function of interest\n shifted by its lower bound in the first input.\n lowerBound : function\n The lower bound in the first input of the function of interest, as\n a function of the second input.\n\n Returns\n -------\n None\n '
self.func = func
self.lowerBound = lowerBound
| 7,236,612,615,693,391,000
|
Make a new instance of VariableLowerBoundFunc3D.
Parameters
----------
func : function
A function f: (R_+ x R^2) --> R representing the function of interest
shifted by its lower bound in the first input.
lowerBound : function
The lower bound in the first input of the function of interest, as
a function of the second input.
Returns
-------
None
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, func, lowerBound):
'\n Make a new instance of VariableLowerBoundFunc3D.\n\n Parameters\n ----------\n func : function\n A function f: (R_+ x R^2) --> R representing the function of interest\n shifted by its lower bound in the first input.\n lowerBound : function\n The lower bound in the first input of the function of interest, as\n a function of the second input.\n\n Returns\n -------\n None\n '
self.func = func
self.lowerBound = lowerBound
|
def __call__(self, x, y, z):
'\n Evaluate the function at given state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n f_out : np.array\n Function evaluated at (x,y,z), of same shape as inputs.\n '
xShift = self.lowerBound(y)
f_out = self.func((x - xShift), y, z)
return f_out
| -2,633,069,501,070,394,000
|
Evaluate the function at given state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
f_out : np.array
Function evaluated at (x,y,z), of same shape as inputs.
|
HARK/interpolation.py
|
__call__
|
cohenimhuji/HARK
|
python
|
def __call__(self, x, y, z):
'\n Evaluate the function at given state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n f_out : np.array\n Function evaluated at (x,y,z), of same shape as inputs.\n '
xShift = self.lowerBound(y)
f_out = self.func((x - xShift), y, z)
return f_out
|
def derivativeX(self, x, y, z):
'\n Evaluate the first derivative with respect to x of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n dfdx_out : np.array\n First derivative of function with respect to the first input,\n evaluated at (x,y,z), of same shape as inputs.\n '
xShift = self.lowerBound(y)
dfdx_out = self.func.derivativeX((x - xShift), y, z)
return dfdx_out
| 2,479,280,836,390,275,600
|
Evaluate the first derivative with respect to x of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
dfdx_out : np.array
First derivative of function with respect to the first input,
evaluated at (x,y,z), of same shape as inputs.
|
HARK/interpolation.py
|
derivativeX
|
cohenimhuji/HARK
|
python
|
def derivativeX(self, x, y, z):
'\n Evaluate the first derivative with respect to x of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n dfdx_out : np.array\n First derivative of function with respect to the first input,\n evaluated at (x,y,z), of same shape as inputs.\n '
xShift = self.lowerBound(y)
dfdx_out = self.func.derivativeX((x - xShift), y, z)
return dfdx_out
|
def derivativeY(self, x, y, z):
'\n Evaluate the first derivative with respect to y of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n dfdy_out : np.array\n First derivative of function with respect to the second input,\n evaluated at (x,y,z), of same shape as inputs.\n '
(xShift, xShiftDer) = self.lowerBound.eval_with_derivative(y)
dfdy_out = (self.func.derivativeY((x - xShift), y, z) - (xShiftDer * self.func.derivativeX((x - xShift), y, z)))
return dfdy_out
| -1,435,814,437,818,480,400
|
Evaluate the first derivative with respect to y of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
dfdy_out : np.array
First derivative of function with respect to the second input,
evaluated at (x,y,z), of same shape as inputs.
|
HARK/interpolation.py
|
derivativeY
|
cohenimhuji/HARK
|
python
|
def derivativeY(self, x, y, z):
'\n Evaluate the first derivative with respect to y of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n dfdy_out : np.array\n First derivative of function with respect to the second input,\n evaluated at (x,y,z), of same shape as inputs.\n '
(xShift, xShiftDer) = self.lowerBound.eval_with_derivative(y)
dfdy_out = (self.func.derivativeY((x - xShift), y, z) - (xShiftDer * self.func.derivativeX((x - xShift), y, z)))
return dfdy_out
|
def derivativeZ(self, x, y, z):
'\n Evaluate the first derivative with respect to z of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n dfdz_out : np.array\n First derivative of function with respect to the third input,\n evaluated at (x,y,z), of same shape as inputs.\n '
xShift = self.lowerBound(y)
dfdz_out = self.func.derivativeZ((x - xShift), y, z)
return dfdz_out
| -6,892,089,819,161,378,000
|
Evaluate the first derivative with respect to z of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
dfdz_out : np.array
First derivative of function with respect to the third input,
evaluated at (x,y,z), of same shape as inputs.
|
HARK/interpolation.py
|
derivativeZ
|
cohenimhuji/HARK
|
python
|
def derivativeZ(self, x, y, z):
'\n Evaluate the first derivative with respect to z of the function at given\n state space points.\n\n Parameters\n ----------\n x : np.array\n First input values.\n y : np.array\n Second input values; should be of same shape as x.\n z : np.array\n Third input values; should be of same shape as x.\n\n Returns\n -------\n dfdz_out : np.array\n First derivative of function with respect to the third input,\n evaluated at (x,y,z), of same shape as inputs.\n '
xShift = self.lowerBound(y)
dfdz_out = self.func.derivativeZ((x - xShift), y, z)
return dfdz_out
|
def __init__(self, xInterpolators, y_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(x,y) using interpolations over f(x,y_0) for a fixed grid of\n y_0 values.\n\n Parameters\n ----------\n xInterpolators : [HARKinterpolator1D]\n A list of 1D interpolations over the x variable. The nth element of\n xInterpolators represents f(x,y_values[n]).\n y_values: numpy.array\n An array of y values equal in length to xInterpolators.\n\n Returns\n -------\n new instance of LinearInterpOnInterp1D\n '
self.xInterpolators = xInterpolators
self.y_list = y_values
self.y_n = y_values.size
| 662,969,043,855,528,400
|
Constructor for the class, generating an approximation to a function of
the form f(x,y) using interpolations over f(x,y_0) for a fixed grid of
y_0 values.
Parameters
----------
xInterpolators : [HARKinterpolator1D]
A list of 1D interpolations over the x variable. The nth element of
xInterpolators represents f(x,y_values[n]).
y_values: numpy.array
An array of y values equal in length to xInterpolators.
Returns
-------
new instance of LinearInterpOnInterp1D
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, xInterpolators, y_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(x,y) using interpolations over f(x,y_0) for a fixed grid of\n y_0 values.\n\n Parameters\n ----------\n xInterpolators : [HARKinterpolator1D]\n A list of 1D interpolations over the x variable. The nth element of\n xInterpolators represents f(x,y_values[n]).\n y_values: numpy.array\n An array of y values equal in length to xInterpolators.\n\n Returns\n -------\n new instance of LinearInterpOnInterp1D\n '
self.xInterpolators = xInterpolators
self.y_list = y_values
self.y_n = y_values.size
|
def _evaluate(self, x, y):
'\n Returns the level of the interpolated function at each value in x,y.\n Only called internally by HARKinterpolator2D.__call__ (etc).\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
f = (((1 - alpha) * self.xInterpolators[(y_pos - 1)](x)) + (alpha * self.xInterpolators[y_pos](x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
if (y.size > 0):
for i in range(1, self.y_n):
c = (y_pos == i)
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
f[c] = (((1 - alpha) * self.xInterpolators[(i - 1)](x[c])) + (alpha * self.xInterpolators[i](x[c])))
return f
| 5,593,763,169,825,352,000
|
Returns the level of the interpolated function at each value in x,y.
Only called internally by HARKinterpolator2D.__call__ (etc).
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x, y):
'\n Returns the level of the interpolated function at each value in x,y.\n Only called internally by HARKinterpolator2D.__call__ (etc).\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
f = (((1 - alpha) * self.xInterpolators[(y_pos - 1)](x)) + (alpha * self.xInterpolators[y_pos](x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
if (y.size > 0):
for i in range(1, self.y_n):
c = (y_pos == i)
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
f[c] = (((1 - alpha) * self.xInterpolators[(i - 1)](x[c])) + (alpha * self.xInterpolators[i](x[c])))
return f
|
def _derX(self, x, y):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdx = (((1 - alpha) * self.xInterpolators[(y_pos - 1)]._der(x)) + (alpha * self.xInterpolators[y_pos]._der(x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
if (y.size > 0):
for i in range(1, self.y_n):
c = (y_pos == i)
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
dfdx[c] = (((1 - alpha) * self.xInterpolators[(i - 1)]._der(x[c])) + (alpha * self.xInterpolators[i]._der(x[c])))
return dfdx
| -6,343,394,968,401,222,000
|
Returns the derivative with respect to x of the interpolated function
at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, x, y):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdx = (((1 - alpha) * self.xInterpolators[(y_pos - 1)]._der(x)) + (alpha * self.xInterpolators[y_pos]._der(x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
if (y.size > 0):
for i in range(1, self.y_n):
c = (y_pos == i)
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
dfdx[c] = (((1 - alpha) * self.xInterpolators[(i - 1)]._der(x[c])) + (alpha * self.xInterpolators[i]._der(x[c])))
return dfdx
|
def _derY(self, x, y):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeY.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
dfdy = ((self.xInterpolators[y_pos](x) - self.xInterpolators[(y_pos - 1)](x)) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
if (y.size > 0):
for i in range(1, self.y_n):
c = (y_pos == i)
if np.any(c):
dfdy[c] = ((self.xInterpolators[i](x[c]) - self.xInterpolators[(i - 1)](x[c])) / (self.y_list[i] - self.y_list[(i - 1)]))
return dfdy
| -475,866,422,364,120,100
|
Returns the derivative with respect to y of the interpolated function
at each value in x,y. Only called internally by HARKinterpolator2D.derivativeY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, x, y):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeY.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
dfdy = ((self.xInterpolators[y_pos](x) - self.xInterpolators[(y_pos - 1)](x)) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
if (y.size > 0):
for i in range(1, self.y_n):
c = (y_pos == i)
if np.any(c):
dfdy[c] = ((self.xInterpolators[i](x[c]) - self.xInterpolators[(i - 1)](x[c])) / (self.y_list[i] - self.y_list[(i - 1)]))
return dfdy
|
def __init__(self, xInterpolators, y_values, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(x,y,z) using interpolations over f(x,y_0,z_0) for a fixed grid\n of y_0 and z_0 values.\n\n Parameters\n ----------\n xInterpolators : [[HARKinterpolator1D]]\n A list of lists of 1D interpolations over the x variable. The i,j-th\n element of xInterpolators represents f(x,y_values[i],z_values[j]).\n y_values: numpy.array\n An array of y values equal in length to xInterpolators.\n z_values: numpy.array\n An array of z values equal in length to xInterpolators[0].\n\n Returns\n -------\n new instance of BilinearInterpOnInterp1D\n '
self.xInterpolators = xInterpolators
self.y_list = y_values
self.y_n = y_values.size
self.z_list = z_values
self.z_n = z_values.size
| -3,114,806,296,255,248,400
|
Constructor for the class, generating an approximation to a function of
the form f(x,y,z) using interpolations over f(x,y_0,z_0) for a fixed grid
of y_0 and z_0 values.
Parameters
----------
xInterpolators : [[HARKinterpolator1D]]
A list of lists of 1D interpolations over the x variable. The i,j-th
element of xInterpolators represents f(x,y_values[i],z_values[j]).
y_values: numpy.array
An array of y values equal in length to xInterpolators.
z_values: numpy.array
An array of z values equal in length to xInterpolators[0].
Returns
-------
new instance of BilinearInterpOnInterp1D
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, xInterpolators, y_values, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(x,y,z) using interpolations over f(x,y_0,z_0) for a fixed grid\n of y_0 and z_0 values.\n\n Parameters\n ----------\n xInterpolators : [[HARKinterpolator1D]]\n A list of lists of 1D interpolations over the x variable. The i,j-th\n element of xInterpolators represents f(x,y_values[i],z_values[j]).\n y_values: numpy.array\n An array of y values equal in length to xInterpolators.\n z_values: numpy.array\n An array of z values equal in length to xInterpolators[0].\n\n Returns\n -------\n new instance of BilinearInterpOnInterp1D\n '
self.xInterpolators = xInterpolators
self.y_list = y_values
self.y_n = y_values.size
self.z_list = z_values
self.z_n = z_values.size
|
def _evaluate(self, x, y, z):
'\n Returns the level of the interpolated function at each value in x,y,z.\n Only called internally by HARKinterpolator3D.__call__ (etc).\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)](x)) + (((1 - alpha) * beta) * self.xInterpolators[(y_pos - 1)][z_pos](x))) + ((alpha * (1 - beta)) * self.xInterpolators[y_pos][(z_pos - 1)](x))) + ((alpha * beta) * self.xInterpolators[y_pos][z_pos](x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
f[c] = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(i - 1)][(j - 1)](x[c])) + (((1 - alpha) * beta) * self.xInterpolators[(i - 1)][j](x[c]))) + ((alpha * (1 - beta)) * self.xInterpolators[i][(j - 1)](x[c]))) + ((alpha * beta) * self.xInterpolators[i][j](x[c])))
return f
| 9,073,170,695,666,131,000
|
Returns the level of the interpolated function at each value in x,y,z.
Only called internally by HARKinterpolator3D.__call__ (etc).
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x, y, z):
'\n Returns the level of the interpolated function at each value in x,y,z.\n Only called internally by HARKinterpolator3D.__call__ (etc).\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)](x)) + (((1 - alpha) * beta) * self.xInterpolators[(y_pos - 1)][z_pos](x))) + ((alpha * (1 - beta)) * self.xInterpolators[y_pos][(z_pos - 1)](x))) + ((alpha * beta) * self.xInterpolators[y_pos][z_pos](x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
f[c] = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(i - 1)][(j - 1)](x[c])) + (((1 - alpha) * beta) * self.xInterpolators[(i - 1)][j](x[c]))) + ((alpha * (1 - beta)) * self.xInterpolators[i][(j - 1)](x[c]))) + ((alpha * beta) * self.xInterpolators[i][j](x[c])))
return f
|
def _derX(self, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)]._der(x)) + (((1 - alpha) * beta) * self.xInterpolators[(y_pos - 1)][z_pos]._der(x))) + ((alpha * (1 - beta)) * self.xInterpolators[y_pos][(z_pos - 1)]._der(x))) + ((alpha * beta) * self.xInterpolators[y_pos][z_pos]._der(x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdx[c] = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(i - 1)][(j - 1)]._der(x[c])) + (((1 - alpha) * beta) * self.xInterpolators[(i - 1)][j]._der(x[c]))) + ((alpha * (1 - beta)) * self.xInterpolators[i][(j - 1)]._der(x[c]))) + ((alpha * beta) * self.xInterpolators[i][j]._der(x[c])))
return dfdx
| -442,082,114,651,853,500
|
Returns the derivative with respect to x of the interpolated function
at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)]._der(x)) + (((1 - alpha) * beta) * self.xInterpolators[(y_pos - 1)][z_pos]._der(x))) + ((alpha * (1 - beta)) * self.xInterpolators[y_pos][(z_pos - 1)]._der(x))) + ((alpha * beta) * self.xInterpolators[y_pos][z_pos]._der(x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdx[c] = ((((((1 - alpha) * (1 - beta)) * self.xInterpolators[(i - 1)][(j - 1)]._der(x[c])) + (((1 - alpha) * beta) * self.xInterpolators[(i - 1)][j]._der(x[c]))) + ((alpha * (1 - beta)) * self.xInterpolators[i][(j - 1)]._der(x[c]))) + ((alpha * beta) * self.xInterpolators[i][j]._der(x[c])))
return dfdx
|
def _derY(self, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = (((((1 - beta) * self.xInterpolators[y_pos][(z_pos - 1)](x)) + (beta * self.xInterpolators[y_pos][z_pos](x))) - (((1 - beta) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)](x)) + (beta * self.xInterpolators[(y_pos - 1)][z_pos](x)))) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdy[c] = (((((1 - beta) * self.xInterpolators[i][(j - 1)](x[c])) + (beta * self.xInterpolators[i][j](x[c]))) - (((1 - beta) * self.xInterpolators[(i - 1)][(j - 1)](x[c])) + (beta * self.xInterpolators[(i - 1)][j](x[c])))) / (self.y_list[i] - self.y_list[(i - 1)]))
return dfdy
| 6,050,288,234,357,051,000
|
Returns the derivative with respect to y of the interpolated function
at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = (((((1 - beta) * self.xInterpolators[y_pos][(z_pos - 1)](x)) + (beta * self.xInterpolators[y_pos][z_pos](x))) - (((1 - beta) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)](x)) + (beta * self.xInterpolators[(y_pos - 1)][z_pos](x)))) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdy[c] = (((((1 - beta) * self.xInterpolators[i][(j - 1)](x[c])) + (beta * self.xInterpolators[i][j](x[c]))) - (((1 - beta) * self.xInterpolators[(i - 1)][(j - 1)](x[c])) + (beta * self.xInterpolators[(i - 1)][j](x[c])))) / (self.y_list[i] - self.y_list[(i - 1)]))
return dfdy
|
def _derZ(self, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeZ.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdz = (((((1 - alpha) * self.xInterpolators[(y_pos - 1)][z_pos](x)) + (alpha * self.xInterpolators[y_pos][z_pos](x))) - (((1 - alpha) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)](x)) + (alpha * self.xInterpolators[y_pos][(z_pos - 1)](x)))) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
dfdz[c] = (((((1 - alpha) * self.xInterpolators[(i - 1)][j](x[c])) + (alpha * self.xInterpolators[i][j](x[c]))) - (((1 - alpha) * self.xInterpolators[(i - 1)][(j - 1)](x[c])) + (alpha * self.xInterpolators[i][(j - 1)](x[c])))) / (self.z_list[j] - self.z_list[(j - 1)]))
return dfdz
| 8,785,880,321,161,172,000
|
Returns the derivative with respect to z of the interpolated function
at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeZ.
|
HARK/interpolation.py
|
_derZ
|
cohenimhuji/HARK
|
python
|
def _derZ(self, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeZ.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdz = (((((1 - alpha) * self.xInterpolators[(y_pos - 1)][z_pos](x)) + (alpha * self.xInterpolators[y_pos][z_pos](x))) - (((1 - alpha) * self.xInterpolators[(y_pos - 1)][(z_pos - 1)](x)) + (alpha * self.xInterpolators[y_pos][(z_pos - 1)](x)))) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
dfdz[c] = (((((1 - alpha) * self.xInterpolators[(i - 1)][j](x[c])) + (alpha * self.xInterpolators[i][j](x[c]))) - (((1 - alpha) * self.xInterpolators[(i - 1)][(j - 1)](x[c])) + (alpha * self.xInterpolators[i][(j - 1)](x[c])))) / (self.z_list[j] - self.z_list[(j - 1)]))
return dfdz
|
def __init__(self, wInterpolators, x_values, y_values, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(w,x,y,z) using interpolations over f(w,x_0,y_0,z_0) for a fixed\n grid of y_0 and z_0 values.\n\n Parameters\n ----------\n wInterpolators : [[[HARKinterpolator1D]]]\n A list of lists of lists of 1D interpolations over the x variable.\n The i,j,k-th element of wInterpolators represents f(w,x_values[i],y_values[j],z_values[k]).\n x_values: numpy.array\n An array of x values equal in length to wInterpolators.\n y_values: numpy.array\n An array of y values equal in length to wInterpolators[0].\n z_values: numpy.array\n An array of z values equal in length to wInterpolators[0][0]\n\n Returns\n -------\n new instance of TrilinearInterpOnInterp1D\n '
self.wInterpolators = wInterpolators
self.x_list = x_values
self.x_n = x_values.size
self.y_list = y_values
self.y_n = y_values.size
self.z_list = z_values
self.z_n = z_values.size
| -4,740,230,801,676,111,000
|
Constructor for the class, generating an approximation to a function of
the form f(w,x,y,z) using interpolations over f(w,x_0,y_0,z_0) for a fixed
grid of y_0 and z_0 values.
Parameters
----------
wInterpolators : [[[HARKinterpolator1D]]]
A list of lists of lists of 1D interpolations over the x variable.
The i,j,k-th element of wInterpolators represents f(w,x_values[i],y_values[j],z_values[k]).
x_values: numpy.array
An array of x values equal in length to wInterpolators.
y_values: numpy.array
An array of y values equal in length to wInterpolators[0].
z_values: numpy.array
An array of z values equal in length to wInterpolators[0][0]
Returns
-------
new instance of TrilinearInterpOnInterp1D
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, wInterpolators, x_values, y_values, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(w,x,y,z) using interpolations over f(w,x_0,y_0,z_0) for a fixed\n grid of y_0 and z_0 values.\n\n Parameters\n ----------\n wInterpolators : [[[HARKinterpolator1D]]]\n A list of lists of lists of 1D interpolations over the x variable.\n The i,j,k-th element of wInterpolators represents f(w,x_values[i],y_values[j],z_values[k]).\n x_values: numpy.array\n An array of x values equal in length to wInterpolators.\n y_values: numpy.array\n An array of y values equal in length to wInterpolators[0].\n z_values: numpy.array\n An array of z values equal in length to wInterpolators[0][0]\n\n Returns\n -------\n new instance of TrilinearInterpOnInterp1D\n '
self.wInterpolators = wInterpolators
self.x_list = x_values
self.x_n = x_values.size
self.y_list = y_values
self.y_n = y_values.size
self.z_list = z_values
self.z_n = z_values.size
|
def _evaluate(self, w, x, y, z):
'\n Returns the level of the interpolated function at each value in w,x,y,z.\n Only called internally by HARKinterpolator4D.__call__ (etc).\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.x_list[x_pos] - self.x_list[(x_pos - 1)]))
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w))) + (((alpha * beta) * gamma) * self.wInterpolators[x_pos][y_pos][z_pos](w)))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
f[c] = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c]))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c]))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(i - 1)][j][k](w[c]))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c]))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[i][(j - 1)][k](w[c]))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)](w[c]))) + (((alpha * beta) * gamma) * self.wInterpolators[i][j][k](w[c])))
return f
| 7,536,716,941,229,016,000
|
Returns the level of the interpolated function at each value in w,x,y,z.
Only called internally by HARKinterpolator4D.__call__ (etc).
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, w, x, y, z):
'\n Returns the level of the interpolated function at each value in w,x,y,z.\n Only called internally by HARKinterpolator4D.__call__ (etc).\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.x_list[x_pos] - self.x_list[(x_pos - 1)]))
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w))) + (((alpha * beta) * gamma) * self.wInterpolators[x_pos][y_pos][z_pos](w)))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
f[c] = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c]))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c]))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(i - 1)][j][k](w[c]))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c]))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[i][(j - 1)][k](w[c]))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)](w[c]))) + (((alpha * beta) * gamma) * self.wInterpolators[i][j][k](w[c])))
return f
|
def _derW(self, w, x, y, z):
'\n Returns the derivative with respect to w of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeW.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.x_list[x_pos] - self.x_list[(x_pos - 1)]))
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdw = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)]._der(w)) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos]._der(w))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)]._der(w))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos]._der(w))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)]._der(w))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos]._der(w))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)]._der(w))) + (((alpha * beta) * gamma) * self.wInterpolators[x_pos][y_pos][z_pos]._der(w)))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdw = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
dfdw[c] = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)]._der(w[c])) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k]._der(w[c]))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)]._der(w[c]))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(i - 1)][j][k]._der(w[c]))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)]._der(w[c]))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[i][(j - 1)][k]._der(w[c]))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)]._der(w[c]))) + (((alpha * beta) * gamma) * self.wInterpolators[i][j][k]._der(w[c])))
return dfdw
| 5,101,724,768,978,712,000
|
Returns the derivative with respect to w of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeW.
|
HARK/interpolation.py
|
_derW
|
cohenimhuji/HARK
|
python
|
def _derW(self, w, x, y, z):
'\n Returns the derivative with respect to w of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeW.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.x_list[x_pos] - self.x_list[(x_pos - 1)]))
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdw = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)]._der(w)) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos]._der(w))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)]._der(w))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos]._der(w))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)]._der(w))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos]._der(w))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)]._der(w))) + (((alpha * beta) * gamma) * self.wInterpolators[x_pos][y_pos][z_pos]._der(w)))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdw = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
dfdw[c] = (((((((((((1 - alpha) * (1 - beta)) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)]._der(w[c])) + ((((1 - alpha) * (1 - beta)) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k]._der(w[c]))) + ((((1 - alpha) * beta) * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)]._der(w[c]))) + ((((1 - alpha) * beta) * gamma) * self.wInterpolators[(i - 1)][j][k]._der(w[c]))) + (((alpha * (1 - beta)) * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)]._der(w[c]))) + (((alpha * (1 - beta)) * gamma) * self.wInterpolators[i][(j - 1)][k]._der(w[c]))) + (((alpha * beta) * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)]._der(w[c]))) + (((alpha * beta) * gamma) * self.wInterpolators[i][j][k]._der(w[c])))
return dfdw
|
def _derX(self, w, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeX.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = ((((((((1 - beta) * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - beta) * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w))) + ((beta * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w))) + ((beta * gamma) * self.wInterpolators[x_pos][y_pos][z_pos](w))) - ((((((1 - beta) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - beta) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w))) + ((beta * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w))) + ((beta * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w)))) / (self.x_list[x_pos] - self.x_list[(x_pos - 1)]))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
dfdx[c] = ((((((((1 - beta) * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c])) + (((1 - beta) * gamma) * self.wInterpolators[i][(j - 1)][k](w[c]))) + ((beta * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)](w[c]))) + ((beta * gamma) * self.wInterpolators[i][j][k](w[c]))) - ((((((1 - beta) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + (((1 - beta) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c]))) + ((beta * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c]))) + ((beta * gamma) * self.wInterpolators[(i - 1)][j][k](w[c])))) / (self.x_list[i] - self.x_list[(i - 1)]))
return dfdx
| -5,434,517,824,112,447,000
|
Returns the derivative with respect to x of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, w, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeX.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = ((((((((1 - beta) * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - beta) * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w))) + ((beta * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w))) + ((beta * gamma) * self.wInterpolators[x_pos][y_pos][z_pos](w))) - ((((((1 - beta) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - beta) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w))) + ((beta * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w))) + ((beta * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w)))) / (self.x_list[x_pos] - self.x_list[(x_pos - 1)]))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
dfdx[c] = ((((((((1 - beta) * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c])) + (((1 - beta) * gamma) * self.wInterpolators[i][(j - 1)][k](w[c]))) + ((beta * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)](w[c]))) + ((beta * gamma) * self.wInterpolators[i][j][k](w[c]))) - ((((((1 - beta) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + (((1 - beta) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c]))) + ((beta * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c]))) + ((beta * gamma) * self.wInterpolators[(i - 1)][j][k](w[c])))) / (self.x_list[i] - self.x_list[(i - 1)]))
return dfdx
|
def _derY(self, w, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeY.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.y_list[x_pos] - self.x_list[(x_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = ((((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w)) + (((1 - alpha) * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w))) + ((alpha * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w))) + ((alpha * gamma) * self.wInterpolators[x_pos][y_pos][z_pos](w))) - ((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - alpha) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w))) + ((alpha * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w))) + ((alpha * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w)))) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
dfdy[c] = ((((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c])) + (((1 - alpha) * gamma) * self.wInterpolators[(i - 1)][j][k](w[c]))) + ((alpha * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)](w[c]))) + ((alpha * gamma) * self.wInterpolators[i][j][k](w[c]))) - ((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + (((1 - alpha) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c]))) + ((alpha * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c]))) + ((alpha * gamma) * self.wInterpolators[i][(j - 1)][k](w[c])))) / (self.y_list[j] - self.y_list[(j - 1)]))
return dfdy
| -7,388,617,949,752,141,000
|
Returns the derivative with respect to y of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, w, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeY.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.y_list[x_pos] - self.x_list[(x_pos - 1)]))
gamma = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = ((((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w)) + (((1 - alpha) * gamma) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w))) + ((alpha * (1 - gamma)) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w))) + ((alpha * gamma) * self.wInterpolators[x_pos][y_pos][z_pos](w))) - ((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - alpha) * gamma) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w))) + ((alpha * (1 - gamma)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w))) + ((alpha * gamma) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w)))) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
gamma = ((z[c] - self.z_list[(k - 1)]) / (self.z_list[k] - self.z_list[(k - 1)]))
dfdy[c] = ((((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c])) + (((1 - alpha) * gamma) * self.wInterpolators[(i - 1)][j][k](w[c]))) + ((alpha * (1 - gamma)) * self.wInterpolators[i][j][(k - 1)](w[c]))) + ((alpha * gamma) * self.wInterpolators[i][j][k](w[c]))) - ((((((1 - alpha) * (1 - gamma)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + (((1 - alpha) * gamma) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c]))) + ((alpha * (1 - gamma)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c]))) + ((alpha * gamma) * self.wInterpolators[i][(j - 1)][k](w[c])))) / (self.y_list[j] - self.y_list[(j - 1)]))
return dfdy
|
def _derZ(self, w, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeZ.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.y_list[x_pos] - self.x_list[(x_pos - 1)]))
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdz = ((((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w)) + (((1 - alpha) * beta) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w))) + ((alpha * (1 - beta)) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w))) + ((alpha * beta) * self.wInterpolators[x_pos][y_pos][z_pos](w))) - ((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - alpha) * beta) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w))) + ((alpha * (1 - beta)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w))) + ((alpha * beta) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w)))) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
dfdz[c] = ((((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c])) + (((1 - alpha) * beta) * self.wInterpolators[(i - 1)][j][k](w[c]))) + ((alpha * (1 - beta)) * self.wInterpolators[i][(j - 1)][k](w[c]))) + ((alpha * beta) * self.wInterpolators[i][j][k](w[c]))) - ((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + (((1 - alpha) * beta) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c]))) + ((alpha * (1 - beta)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c]))) + ((alpha * beta) * self.wInterpolators[i][j][(k - 1)](w[c])))) / (self.z_list[k] - self.z_list[(k - 1)]))
return dfdz
| -2,644,368,838,800,340,500
|
Returns the derivative with respect to z of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeZ.
|
HARK/interpolation.py
|
_derZ
|
cohenimhuji/HARK
|
python
|
def _derZ(self, w, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeZ.\n '
if _isscalar(w):
x_pos = max(min(np.searchsorted(self.x_list, x), (self.x_n - 1)), 1)
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((x - self.x_list[(x_pos - 1)]) / (self.y_list[x_pos] - self.x_list[(x_pos - 1)]))
beta = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdz = ((((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][z_pos](w)) + (((1 - alpha) * beta) * self.wInterpolators[(x_pos - 1)][y_pos][z_pos](w))) + ((alpha * (1 - beta)) * self.wInterpolators[x_pos][(y_pos - 1)][z_pos](w))) + ((alpha * beta) * self.wInterpolators[x_pos][y_pos][z_pos](w))) - ((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(x_pos - 1)][(y_pos - 1)][(z_pos - 1)](w)) + (((1 - alpha) * beta) * self.wInterpolators[(x_pos - 1)][y_pos][(z_pos - 1)](w))) + ((alpha * (1 - beta)) * self.wInterpolators[x_pos][(y_pos - 1)][(z_pos - 1)](w))) + ((alpha * beta) * self.wInterpolators[x_pos][y_pos][(z_pos - 1)](w)))) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
x_pos = np.searchsorted(self.x_list, x)
x_pos[(x_pos > (self.x_n - 1))] = (self.x_n - 1)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
for i in range(1, self.x_n):
for j in range(1, self.y_n):
for k in range(1, self.z_n):
c = np.logical_and(np.logical_and((i == x_pos), (j == y_pos)), (k == z_pos))
if np.any(c):
alpha = ((x[c] - self.x_list[(i - 1)]) / (self.x_list[i] - self.x_list[(i - 1)]))
beta = ((y[c] - self.y_list[(j - 1)]) / (self.y_list[j] - self.y_list[(j - 1)]))
dfdz[c] = ((((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(i - 1)][(j - 1)][k](w[c])) + (((1 - alpha) * beta) * self.wInterpolators[(i - 1)][j][k](w[c]))) + ((alpha * (1 - beta)) * self.wInterpolators[i][(j - 1)][k](w[c]))) + ((alpha * beta) * self.wInterpolators[i][j][k](w[c]))) - ((((((1 - alpha) * (1 - beta)) * self.wInterpolators[(i - 1)][(j - 1)][(k - 1)](w[c])) + (((1 - alpha) * beta) * self.wInterpolators[(i - 1)][j][(k - 1)](w[c]))) + ((alpha * (1 - beta)) * self.wInterpolators[i][(j - 1)][(k - 1)](w[c]))) + ((alpha * beta) * self.wInterpolators[i][j][(k - 1)](w[c])))) / (self.z_list[k] - self.z_list[(k - 1)]))
return dfdz
|
def __init__(self, xyInterpolators, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(x,y,z) using interpolations over f(x,y,z_0) for a fixed grid\n of z_0 values.\n\n Parameters\n ----------\n xyInterpolators : [HARKinterpolator2D]\n A list of 2D interpolations over the x and y variables. The nth\n element of xyInterpolators represents f(x,y,z_values[n]).\n z_values: numpy.array\n An array of z values equal in length to xyInterpolators.\n\n Returns\n -------\n new instance of LinearInterpOnInterp2D\n '
self.xyInterpolators = xyInterpolators
self.z_list = z_values
self.z_n = z_values.size
| -4,900,033,517,917,075,000
|
Constructor for the class, generating an approximation to a function of
the form f(x,y,z) using interpolations over f(x,y,z_0) for a fixed grid
of z_0 values.
Parameters
----------
xyInterpolators : [HARKinterpolator2D]
A list of 2D interpolations over the x and y variables. The nth
element of xyInterpolators represents f(x,y,z_values[n]).
z_values: numpy.array
An array of z values equal in length to xyInterpolators.
Returns
-------
new instance of LinearInterpOnInterp2D
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, xyInterpolators, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(x,y,z) using interpolations over f(x,y,z_0) for a fixed grid\n of z_0 values.\n\n Parameters\n ----------\n xyInterpolators : [HARKinterpolator2D]\n A list of 2D interpolations over the x and y variables. The nth\n element of xyInterpolators represents f(x,y,z_values[n]).\n z_values: numpy.array\n An array of z values equal in length to xyInterpolators.\n\n Returns\n -------\n new instance of LinearInterpOnInterp2D\n '
self.xyInterpolators = xyInterpolators
self.z_list = z_values
self.z_n = z_values.size
|
def _evaluate(self, x, y, z):
'\n Returns the level of the interpolated function at each value in x,y,z.\n Only called internally by HARKinterpolator3D.__call__ (etc).\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = (((1 - alpha) * self.xyInterpolators[(z_pos - 1)](x, y)) + (alpha * self.xyInterpolators[z_pos](x, y)))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
alpha = ((z[c] - self.z_list[(i - 1)]) / (self.z_list[i] - self.z_list[(i - 1)]))
f[c] = (((1 - alpha) * self.xyInterpolators[(i - 1)](x[c], y[c])) + (alpha * self.xyInterpolators[i](x[c], y[c])))
return f
| -7,647,440,318,575,433,000
|
Returns the level of the interpolated function at each value in x,y,z.
Only called internally by HARKinterpolator3D.__call__ (etc).
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x, y, z):
'\n Returns the level of the interpolated function at each value in x,y,z.\n Only called internally by HARKinterpolator3D.__call__ (etc).\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = (((1 - alpha) * self.xyInterpolators[(z_pos - 1)](x, y)) + (alpha * self.xyInterpolators[z_pos](x, y)))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
alpha = ((z[c] - self.z_list[(i - 1)]) / (self.z_list[i] - self.z_list[(i - 1)]))
f[c] = (((1 - alpha) * self.xyInterpolators[(i - 1)](x[c], y[c])) + (alpha * self.xyInterpolators[i](x[c], y[c])))
return f
|
def _derX(self, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX.\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = (((1 - alpha) * self.xyInterpolators[(z_pos - 1)].derivativeX(x, y)) + (alpha * self.xyInterpolators[z_pos].derivativeX(x, y)))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
alpha = ((z[c] - self.z_list[(i - 1)]) / (self.z_list[i] - self.z_list[(i - 1)]))
dfdx[c] = (((1 - alpha) * self.xyInterpolators[(i - 1)].derivativeX(x[c], y[c])) + (alpha * self.xyInterpolators[i].derivativeX(x[c], y[c])))
return dfdx
| 1,322,114,914,739,417,900
|
Returns the derivative with respect to x of the interpolated function
at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeX.\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = (((1 - alpha) * self.xyInterpolators[(z_pos - 1)].derivativeX(x, y)) + (alpha * self.xyInterpolators[z_pos].derivativeX(x, y)))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
alpha = ((z[c] - self.z_list[(i - 1)]) / (self.z_list[i] - self.z_list[(i - 1)]))
dfdx[c] = (((1 - alpha) * self.xyInterpolators[(i - 1)].derivativeX(x[c], y[c])) + (alpha * self.xyInterpolators[i].derivativeX(x[c], y[c])))
return dfdx
|
def _derY(self, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY.\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = (((1 - alpha) * self.xyInterpolators[(z_pos - 1)].derivativeY(x, y)) + (alpha * self.xyInterpolators[z_pos].derivativeY(x, y)))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
alpha = ((z[c] - self.z_list[(i - 1)]) / (self.z_list[i] - self.z_list[(i - 1)]))
dfdy[c] = (((1 - alpha) * self.xyInterpolators[(i - 1)].derivativeY(x[c], y[c])) + (alpha * self.xyInterpolators[i].derivativeY(x[c], y[c])))
return dfdy
| -3,482,169,174,195,050,000
|
Returns the derivative with respect to y of the interpolated function
at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeY.\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = (((1 - alpha) * self.xyInterpolators[(z_pos - 1)].derivativeY(x, y)) + (alpha * self.xyInterpolators[z_pos].derivativeY(x, y)))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
alpha = ((z[c] - self.z_list[(i - 1)]) / (self.z_list[i] - self.z_list[(i - 1)]))
dfdy[c] = (((1 - alpha) * self.xyInterpolators[(i - 1)].derivativeY(x[c], y[c])) + (alpha * self.xyInterpolators[i].derivativeY(x[c], y[c])))
return dfdy
|
def _derZ(self, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeZ.\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
dfdz = ((self.xyInterpolators[z_pos].derivativeX(x, y) - self.xyInterpolators[(z_pos - 1)].derivativeX(x, y)) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
dfdz[c] = ((self.xyInterpolators[i](x[c], y[c]) - self.xyInterpolators[(i - 1)](x[c], y[c])) / (self.z_list[i] - self.z_list[(i - 1)]))
return dfdz
| 7,974,405,844,827,120,000
|
Returns the derivative with respect to z of the interpolated function
at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeZ.
|
HARK/interpolation.py
|
_derZ
|
cohenimhuji/HARK
|
python
|
def _derZ(self, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in x,y,z. Only called internally by HARKinterpolator3D.derivativeZ.\n '
if _isscalar(x):
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
dfdz = ((self.xyInterpolators[z_pos].derivativeX(x, y) - self.xyInterpolators[(z_pos - 1)].derivativeX(x, y)) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
if (x.size > 0):
for i in range(1, self.z_n):
c = (z_pos == i)
if np.any(c):
dfdz[c] = ((self.xyInterpolators[i](x[c], y[c]) - self.xyInterpolators[(i - 1)](x[c], y[c])) / (self.z_list[i] - self.z_list[(i - 1)]))
return dfdz
|
def __init__(self, wxInterpolators, y_values, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(w,x,y,z) using interpolations over f(w,x,y_0,z_0) for a fixed\n grid of y_0 and z_0 values.\n\n Parameters\n ----------\n wxInterpolators : [[HARKinterpolator2D]]\n A list of lists of 2D interpolations over the w and x variables.\n The i,j-th element of wxInterpolators represents\n f(w,x,y_values[i],z_values[j]).\n y_values: numpy.array\n An array of y values equal in length to wxInterpolators.\n z_values: numpy.array\n An array of z values equal in length to wxInterpolators[0].\n\n Returns\n -------\n new instance of BilinearInterpOnInterp2D\n '
self.wxInterpolators = wxInterpolators
self.y_list = y_values
self.y_n = y_values.size
self.z_list = z_values
self.z_n = z_values.size
| 2,062,076,805,536,904,000
|
Constructor for the class, generating an approximation to a function of
the form f(w,x,y,z) using interpolations over f(w,x,y_0,z_0) for a fixed
grid of y_0 and z_0 values.
Parameters
----------
wxInterpolators : [[HARKinterpolator2D]]
A list of lists of 2D interpolations over the w and x variables.
The i,j-th element of wxInterpolators represents
f(w,x,y_values[i],z_values[j]).
y_values: numpy.array
An array of y values equal in length to wxInterpolators.
z_values: numpy.array
An array of z values equal in length to wxInterpolators[0].
Returns
-------
new instance of BilinearInterpOnInterp2D
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, wxInterpolators, y_values, z_values):
'\n Constructor for the class, generating an approximation to a function of\n the form f(w,x,y,z) using interpolations over f(w,x,y_0,z_0) for a fixed\n grid of y_0 and z_0 values.\n\n Parameters\n ----------\n wxInterpolators : [[HARKinterpolator2D]]\n A list of lists of 2D interpolations over the w and x variables.\n The i,j-th element of wxInterpolators represents\n f(w,x,y_values[i],z_values[j]).\n y_values: numpy.array\n An array of y values equal in length to wxInterpolators.\n z_values: numpy.array\n An array of z values equal in length to wxInterpolators[0].\n\n Returns\n -------\n new instance of BilinearInterpOnInterp2D\n '
self.wxInterpolators = wxInterpolators
self.y_list = y_values
self.y_n = y_values.size
self.z_list = z_values
self.z_n = z_values.size
|
def _evaluate(self, w, x, y, z):
'\n Returns the level of the interpolated function at each value in x,y,z.\n Only called internally by HARKinterpolator4D.__call__ (etc).\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)](w, x)) + (((1 - alpha) * beta) * self.wxInterpolators[(y_pos - 1)][z_pos](w, x))) + ((alpha * (1 - beta)) * self.wxInterpolators[y_pos][(z_pos - 1)](w, x))) + ((alpha * beta) * self.wxInterpolators[y_pos][z_pos](w, x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
f[c] = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(i - 1)][(j - 1)](w[c], x[c])) + (((1 - alpha) * beta) * self.wxInterpolators[(i - 1)][j](w[c], x[c]))) + ((alpha * (1 - beta)) * self.wxInterpolators[i][(j - 1)](w[c], x[c]))) + ((alpha * beta) * self.wxInterpolators[i][j](w[c], x[c])))
return f
| -7,212,439,489,972,631,000
|
Returns the level of the interpolated function at each value in x,y,z.
Only called internally by HARKinterpolator4D.__call__ (etc).
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, w, x, y, z):
'\n Returns the level of the interpolated function at each value in x,y,z.\n Only called internally by HARKinterpolator4D.__call__ (etc).\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
f = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)](w, x)) + (((1 - alpha) * beta) * self.wxInterpolators[(y_pos - 1)][z_pos](w, x))) + ((alpha * (1 - beta)) * self.wxInterpolators[y_pos][(z_pos - 1)](w, x))) + ((alpha * beta) * self.wxInterpolators[y_pos][z_pos](w, x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
f = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
f[c] = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(i - 1)][(j - 1)](w[c], x[c])) + (((1 - alpha) * beta) * self.wxInterpolators[(i - 1)][j](w[c], x[c]))) + ((alpha * (1 - beta)) * self.wxInterpolators[i][(j - 1)](w[c], x[c]))) + ((alpha * beta) * self.wxInterpolators[i][j](w[c], x[c])))
return f
|
def _derW(self, w, x, y, z):
'\n Returns the derivative with respect to w of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeW.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdw = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)].derivativeX(w, x)) + (((1 - alpha) * beta) * self.wxInterpolators[(y_pos - 1)][z_pos].derivativeX(w, x))) + ((alpha * (1 - beta)) * self.wxInterpolators[y_pos][(z_pos - 1)].derivativeX(w, x))) + ((alpha * beta) * self.wxInterpolators[y_pos][z_pos].derivativeX(w, x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdw = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdw[c] = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(i - 1)][(j - 1)].derivativeX(w[c], x[c])) + (((1 - alpha) * beta) * self.wxInterpolators[(i - 1)][j].derivativeX(w[c], x[c]))) + ((alpha * (1 - beta)) * self.wxInterpolators[i][(j - 1)].derivativeX(w[c], x[c]))) + ((alpha * beta) * self.wxInterpolators[i][j].derivativeX(w[c], x[c])))
return dfdw
| 3,454,840,866,681,943,600
|
Returns the derivative with respect to w of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeW.
|
HARK/interpolation.py
|
_derW
|
cohenimhuji/HARK
|
python
|
def _derW(self, w, x, y, z):
'\n Returns the derivative with respect to w of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeW.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdw = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)].derivativeX(w, x)) + (((1 - alpha) * beta) * self.wxInterpolators[(y_pos - 1)][z_pos].derivativeX(w, x))) + ((alpha * (1 - beta)) * self.wxInterpolators[y_pos][(z_pos - 1)].derivativeX(w, x))) + ((alpha * beta) * self.wxInterpolators[y_pos][z_pos].derivativeX(w, x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdw = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdw[c] = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(i - 1)][(j - 1)].derivativeX(w[c], x[c])) + (((1 - alpha) * beta) * self.wxInterpolators[(i - 1)][j].derivativeX(w[c], x[c]))) + ((alpha * (1 - beta)) * self.wxInterpolators[i][(j - 1)].derivativeX(w[c], x[c]))) + ((alpha * beta) * self.wxInterpolators[i][j].derivativeX(w[c], x[c])))
return dfdw
|
def _derX(self, w, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeX.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)].derivativeY(w, x)) + (((1 - alpha) * beta) * self.wxInterpolators[(y_pos - 1)][z_pos].derivativeY(w, x))) + ((alpha * (1 - beta)) * self.wxInterpolators[y_pos][(z_pos - 1)].derivativeY(w, x))) + ((alpha * beta) * self.wxInterpolators[y_pos][z_pos].derivativeY(w, x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdx[c] = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(i - 1)][(j - 1)].derivativeY(w[c], x[c])) + (((1 - alpha) * beta) * self.wxInterpolators[(i - 1)][j].derivativeY(w[c], x[c]))) + ((alpha * (1 - beta)) * self.wxInterpolators[i][(j - 1)].derivativeY(w[c], x[c]))) + ((alpha * beta) * self.wxInterpolators[i][j].derivativeY(w[c], x[c])))
return dfdx
| 5,816,581,178,847,682,000
|
Returns the derivative with respect to x of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, w, x, y, z):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeX.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdx = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)].derivativeY(w, x)) + (((1 - alpha) * beta) * self.wxInterpolators[(y_pos - 1)][z_pos].derivativeY(w, x))) + ((alpha * (1 - beta)) * self.wxInterpolators[y_pos][(z_pos - 1)].derivativeY(w, x))) + ((alpha * beta) * self.wxInterpolators[y_pos][z_pos].derivativeY(w, x)))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdx = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdx[c] = ((((((1 - alpha) * (1 - beta)) * self.wxInterpolators[(i - 1)][(j - 1)].derivativeY(w[c], x[c])) + (((1 - alpha) * beta) * self.wxInterpolators[(i - 1)][j].derivativeY(w[c], x[c]))) + ((alpha * (1 - beta)) * self.wxInterpolators[i][(j - 1)].derivativeY(w[c], x[c]))) + ((alpha * beta) * self.wxInterpolators[i][j].derivativeY(w[c], x[c])))
return dfdx
|
def _derY(self, w, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeY.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = (((((1 - beta) * self.wxInterpolators[y_pos][(z_pos - 1)](w, x)) + (beta * self.wxInterpolators[y_pos][z_pos](w, x))) - (((1 - beta) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)](w, x)) + (beta * self.wxInterpolators[(y_pos - 1)][z_pos](w, x)))) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdy[c] = (((((1 - beta) * self.wxInterpolators[i][(j - 1)](w[c], x[c])) + (beta * self.wxInterpolators[i][j](w[c], x[c]))) - (((1 - beta) * self.wxInterpolators[(i - 1)][(j - 1)](w[c], x[c])) + (beta * self.wxInterpolators[(i - 1)][j](w[c], x[c])))) / (self.y_list[i] - self.y_list[(i - 1)]))
return dfdy
| -2,196,527,685,885,881,000
|
Returns the derivative with respect to y of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeY.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, w, x, y, z):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeY.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
beta = ((z - self.z_list[(z_pos - 1)]) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
dfdy = (((((1 - beta) * self.wxInterpolators[y_pos][(z_pos - 1)](w, x)) + (beta * self.wxInterpolators[y_pos][z_pos](w, x))) - (((1 - beta) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)](w, x)) + (beta * self.wxInterpolators[(y_pos - 1)][z_pos](w, x)))) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdy = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
beta = ((z[c] - self.z_list[(j - 1)]) / (self.z_list[j] - self.z_list[(j - 1)]))
dfdy[c] = (((((1 - beta) * self.wxInterpolators[i][(j - 1)](w[c], x[c])) + (beta * self.wxInterpolators[i][j](w[c], x[c]))) - (((1 - beta) * self.wxInterpolators[(i - 1)][(j - 1)](w[c], x[c])) + (beta * self.wxInterpolators[(i - 1)][j](w[c], x[c])))) / (self.y_list[i] - self.y_list[(i - 1)]))
return dfdy
|
def _derZ(self, w, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeZ.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdz = (((((1 - alpha) * self.wxInterpolators[(y_pos - 1)][z_pos](w, x)) + (alpha * self.wxInterpolators[y_pos][z_pos](w, x))) - (((1 - alpha) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)](w, x)) + (alpha * self.wxInterpolators[y_pos][(z_pos - 1)](w, x)))) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
dfdz[c] = (((((1 - alpha) * self.wxInterpolators[(i - 1)][j](w[c], x[c])) + (alpha * self.wxInterpolators[i][j](w[c], x[c]))) - (((1 - alpha) * self.wxInterpolators[(i - 1)][(j - 1)](w[c], x[c])) + (alpha * self.wxInterpolators[i][(j - 1)](w[c], x[c])))) / (self.z_list[j] - self.z_list[(j - 1)]))
return dfdz
| -3,309,131,212,846,705,700
|
Returns the derivative with respect to z of the interpolated function
at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeZ.
|
HARK/interpolation.py
|
_derZ
|
cohenimhuji/HARK
|
python
|
def _derZ(self, w, x, y, z):
'\n Returns the derivative with respect to z of the interpolated function\n at each value in w,x,y,z. Only called internally by HARKinterpolator4D.derivativeZ.\n '
if _isscalar(x):
y_pos = max(min(np.searchsorted(self.y_list, y), (self.y_n - 1)), 1)
z_pos = max(min(np.searchsorted(self.z_list, z), (self.z_n - 1)), 1)
alpha = ((y - self.y_list[(y_pos - 1)]) / (self.y_list[y_pos] - self.y_list[(y_pos - 1)]))
dfdz = (((((1 - alpha) * self.wxInterpolators[(y_pos - 1)][z_pos](w, x)) + (alpha * self.wxInterpolators[y_pos][z_pos](w, x))) - (((1 - alpha) * self.wxInterpolators[(y_pos - 1)][(z_pos - 1)](w, x)) + (alpha * self.wxInterpolators[y_pos][(z_pos - 1)](w, x)))) / (self.z_list[z_pos] - self.z_list[(z_pos - 1)]))
else:
m = len(x)
y_pos = np.searchsorted(self.y_list, y)
y_pos[(y_pos > (self.y_n - 1))] = (self.y_n - 1)
y_pos[(y_pos < 1)] = 1
z_pos = np.searchsorted(self.z_list, z)
z_pos[(z_pos > (self.z_n - 1))] = (self.z_n - 1)
z_pos[(z_pos < 1)] = 1
dfdz = (np.zeros(m) + np.nan)
for i in range(1, self.y_n):
for j in range(1, self.z_n):
c = np.logical_and((i == y_pos), (j == z_pos))
if np.any(c):
alpha = ((y[c] - self.y_list[(i - 1)]) / (self.y_list[i] - self.y_list[(i - 1)]))
dfdz[c] = (((((1 - alpha) * self.wxInterpolators[(i - 1)][j](w[c], x[c])) + (alpha * self.wxInterpolators[i][j](w[c], x[c]))) - (((1 - alpha) * self.wxInterpolators[(i - 1)][(j - 1)](w[c], x[c])) + (alpha * self.wxInterpolators[i][(j - 1)](w[c], x[c])))) / (self.z_list[j] - self.z_list[(j - 1)]))
return dfdz
|
def __init__(self, f_values, x_values, y_values):
'\n Constructor for 2D curvilinear interpolation for a function f(x,y)\n\n Parameters\n ----------\n f_values: numpy.array\n A 2D array of function values such that f_values[i,j] =\n f(x_values[i,j],y_values[i,j]).\n x_values: numpy.array\n A 2D array of x values of the same size as f_values.\n y_values: numpy.array\n A 2D array of y values of the same size as f_values.\n\n Returns\n -------\n new instance of Curvilinear2DInterp\n '
self.f_values = f_values
self.x_values = x_values
self.y_values = y_values
my_shape = f_values.shape
self.x_n = my_shape[0]
self.y_n = my_shape[1]
self.updatePolarity()
| 313,606,005,359,733,760
|
Constructor for 2D curvilinear interpolation for a function f(x,y)
Parameters
----------
f_values: numpy.array
A 2D array of function values such that f_values[i,j] =
f(x_values[i,j],y_values[i,j]).
x_values: numpy.array
A 2D array of x values of the same size as f_values.
y_values: numpy.array
A 2D array of y values of the same size as f_values.
Returns
-------
new instance of Curvilinear2DInterp
|
HARK/interpolation.py
|
__init__
|
cohenimhuji/HARK
|
python
|
def __init__(self, f_values, x_values, y_values):
'\n Constructor for 2D curvilinear interpolation for a function f(x,y)\n\n Parameters\n ----------\n f_values: numpy.array\n A 2D array of function values such that f_values[i,j] =\n f(x_values[i,j],y_values[i,j]).\n x_values: numpy.array\n A 2D array of x values of the same size as f_values.\n y_values: numpy.array\n A 2D array of y values of the same size as f_values.\n\n Returns\n -------\n new instance of Curvilinear2DInterp\n '
self.f_values = f_values
self.x_values = x_values
self.y_values = y_values
my_shape = f_values.shape
self.x_n = my_shape[0]
self.y_n = my_shape[1]
self.updatePolarity()
|
def updatePolarity(self):
'\n Fills in the polarity attribute of the interpolation, determining whether\n the "plus" (True) or "minus" (False) solution of the system of equations\n should be used for each sector. Needs to be called in __init__.\n\n Parameters\n ----------\n none\n\n Returns\n -------\n none\n '
x_temp = (0.5 * (self.x_values[0:(self.x_n - 1), 0:(self.y_n - 1)] + self.x_values[1:self.x_n, 1:self.y_n]))
y_temp = (0.5 * (self.y_values[0:(self.x_n - 1), 0:(self.y_n - 1)] + self.y_values[1:self.x_n, 1:self.y_n]))
size = ((self.x_n - 1) * (self.y_n - 1))
x_temp = np.reshape(x_temp, size)
y_temp = np.reshape(y_temp, size)
y_pos = np.tile(np.arange(0, (self.y_n - 1)), (self.x_n - 1))
x_pos = np.reshape(np.tile(np.arange(0, (self.x_n - 1)), ((self.y_n - 1), 1)).transpose(), size)
self.polarity = np.ones(((self.x_n - 1), (self.y_n - 1)), dtype=bool)
(alpha, beta) = self.findCoords(x_temp, y_temp, x_pos, y_pos)
polarity = np.logical_and(np.logical_and((alpha > 0), (alpha < 1)), np.logical_and((beta > 0), (beta < 1)))
self.polarity = np.reshape(polarity, ((self.x_n - 1), (self.y_n - 1)))
| 1,964,623,072,593,055,500
|
Fills in the polarity attribute of the interpolation, determining whether
the "plus" (True) or "minus" (False) solution of the system of equations
should be used for each sector. Needs to be called in __init__.
Parameters
----------
none
Returns
-------
none
|
HARK/interpolation.py
|
updatePolarity
|
cohenimhuji/HARK
|
python
|
def updatePolarity(self):
'\n Fills in the polarity attribute of the interpolation, determining whether\n the "plus" (True) or "minus" (False) solution of the system of equations\n should be used for each sector. Needs to be called in __init__.\n\n Parameters\n ----------\n none\n\n Returns\n -------\n none\n '
x_temp = (0.5 * (self.x_values[0:(self.x_n - 1), 0:(self.y_n - 1)] + self.x_values[1:self.x_n, 1:self.y_n]))
y_temp = (0.5 * (self.y_values[0:(self.x_n - 1), 0:(self.y_n - 1)] + self.y_values[1:self.x_n, 1:self.y_n]))
size = ((self.x_n - 1) * (self.y_n - 1))
x_temp = np.reshape(x_temp, size)
y_temp = np.reshape(y_temp, size)
y_pos = np.tile(np.arange(0, (self.y_n - 1)), (self.x_n - 1))
x_pos = np.reshape(np.tile(np.arange(0, (self.x_n - 1)), ((self.y_n - 1), 1)).transpose(), size)
self.polarity = np.ones(((self.x_n - 1), (self.y_n - 1)), dtype=bool)
(alpha, beta) = self.findCoords(x_temp, y_temp, x_pos, y_pos)
polarity = np.logical_and(np.logical_and((alpha > 0), (alpha < 1)), np.logical_and((beta > 0), (beta < 1)))
self.polarity = np.reshape(polarity, ((self.x_n - 1), (self.y_n - 1)))
|
def findSector(self, x, y):
'\n Finds the quadrilateral "sector" for each (x,y) point in the input.\n Only called as a subroutine of _evaluate().\n\n Parameters\n ----------\n x : np.array\n Values whose sector should be found.\n y : np.array\n Values whose sector should be found. Should be same size as x.\n\n Returns\n -------\n x_pos : np.array\n Sector x-coordinates for each point of the input, of the same size.\n y_pos : np.array\n Sector y-coordinates for each point of the input, of the same size.\n '
m = x.size
x_pos_guess = ((np.ones(m) * self.x_n) / 2).astype(int)
y_pos_guess = ((np.ones(m) * self.y_n) / 2).astype(int)
violationCheck = (lambda x_check, y_check, x_bound_1, y_bound_1, x_bound_2, y_bound_2: (((((y_bound_2 - y_bound_1) * x_check) - ((x_bound_2 - x_bound_1) * y_check)) > ((x_bound_1 * y_bound_2) - (y_bound_1 * x_bound_2))) + 0))
these = np.ones(m, dtype=bool)
max_loops = (self.x_n + self.y_n)
loops = 0
while (np.any(these) and (loops < max_loops)):
x_temp = x[these]
y_temp = y[these]
xA = self.x_values[(x_pos_guess[these], y_pos_guess[these])]
xB = self.x_values[((x_pos_guess[these] + 1), y_pos_guess[these])]
xC = self.x_values[(x_pos_guess[these], (y_pos_guess[these] + 1))]
xD = self.x_values[((x_pos_guess[these] + 1), (y_pos_guess[these] + 1))]
yA = self.y_values[(x_pos_guess[these], y_pos_guess[these])]
yB = self.y_values[((x_pos_guess[these] + 1), y_pos_guess[these])]
yC = self.y_values[(x_pos_guess[these], (y_pos_guess[these] + 1))]
yD = self.y_values[((x_pos_guess[these] + 1), (y_pos_guess[these] + 1))]
move_down = ((y_temp < np.minimum(yA, yB)) + 0)
move_right = ((x_temp > np.maximum(xB, xD)) + 0)
move_up = ((y_temp > np.maximum(yC, yD)) + 0)
move_left = ((x_temp < np.minimum(xA, xC)) + 0)
c = ((((move_down + move_right) + move_up) + move_left) == 0)
move_down[c] = violationCheck(x_temp[c], y_temp[c], xA[c], yA[c], xB[c], yB[c])
move_right[c] = violationCheck(x_temp[c], y_temp[c], xB[c], yB[c], xD[c], yD[c])
move_up[c] = violationCheck(x_temp[c], y_temp[c], xD[c], yD[c], xC[c], yC[c])
move_left[c] = violationCheck(x_temp[c], y_temp[c], xC[c], yC[c], xA[c], yA[c])
x_pos_next = ((x_pos_guess[these] - move_left) + move_right)
x_pos_next[(x_pos_next < 0)] = 0
x_pos_next[(x_pos_next > (self.x_n - 2))] = (self.x_n - 2)
y_pos_next = ((y_pos_guess[these] - move_down) + move_up)
y_pos_next[(y_pos_next < 0)] = 0
y_pos_next[(y_pos_next > (self.y_n - 2))] = (self.y_n - 2)
no_move = np.array(np.logical_and((x_pos_guess[these] == x_pos_next), (y_pos_guess[these] == y_pos_next)))
x_pos_guess[these] = x_pos_next
y_pos_guess[these] = y_pos_next
temp = these.nonzero()
these[temp[0][no_move]] = False
loops += 1
x_pos = x_pos_guess
y_pos = y_pos_guess
return (x_pos, y_pos)
| -8,147,189,302,166,205,000
|
Finds the quadrilateral "sector" for each (x,y) point in the input.
Only called as a subroutine of _evaluate().
Parameters
----------
x : np.array
Values whose sector should be found.
y : np.array
Values whose sector should be found. Should be same size as x.
Returns
-------
x_pos : np.array
Sector x-coordinates for each point of the input, of the same size.
y_pos : np.array
Sector y-coordinates for each point of the input, of the same size.
|
HARK/interpolation.py
|
findSector
|
cohenimhuji/HARK
|
python
|
def findSector(self, x, y):
'\n Finds the quadrilateral "sector" for each (x,y) point in the input.\n Only called as a subroutine of _evaluate().\n\n Parameters\n ----------\n x : np.array\n Values whose sector should be found.\n y : np.array\n Values whose sector should be found. Should be same size as x.\n\n Returns\n -------\n x_pos : np.array\n Sector x-coordinates for each point of the input, of the same size.\n y_pos : np.array\n Sector y-coordinates for each point of the input, of the same size.\n '
m = x.size
x_pos_guess = ((np.ones(m) * self.x_n) / 2).astype(int)
y_pos_guess = ((np.ones(m) * self.y_n) / 2).astype(int)
violationCheck = (lambda x_check, y_check, x_bound_1, y_bound_1, x_bound_2, y_bound_2: (((((y_bound_2 - y_bound_1) * x_check) - ((x_bound_2 - x_bound_1) * y_check)) > ((x_bound_1 * y_bound_2) - (y_bound_1 * x_bound_2))) + 0))
these = np.ones(m, dtype=bool)
max_loops = (self.x_n + self.y_n)
loops = 0
while (np.any(these) and (loops < max_loops)):
x_temp = x[these]
y_temp = y[these]
xA = self.x_values[(x_pos_guess[these], y_pos_guess[these])]
xB = self.x_values[((x_pos_guess[these] + 1), y_pos_guess[these])]
xC = self.x_values[(x_pos_guess[these], (y_pos_guess[these] + 1))]
xD = self.x_values[((x_pos_guess[these] + 1), (y_pos_guess[these] + 1))]
yA = self.y_values[(x_pos_guess[these], y_pos_guess[these])]
yB = self.y_values[((x_pos_guess[these] + 1), y_pos_guess[these])]
yC = self.y_values[(x_pos_guess[these], (y_pos_guess[these] + 1))]
yD = self.y_values[((x_pos_guess[these] + 1), (y_pos_guess[these] + 1))]
move_down = ((y_temp < np.minimum(yA, yB)) + 0)
move_right = ((x_temp > np.maximum(xB, xD)) + 0)
move_up = ((y_temp > np.maximum(yC, yD)) + 0)
move_left = ((x_temp < np.minimum(xA, xC)) + 0)
c = ((((move_down + move_right) + move_up) + move_left) == 0)
move_down[c] = violationCheck(x_temp[c], y_temp[c], xA[c], yA[c], xB[c], yB[c])
move_right[c] = violationCheck(x_temp[c], y_temp[c], xB[c], yB[c], xD[c], yD[c])
move_up[c] = violationCheck(x_temp[c], y_temp[c], xD[c], yD[c], xC[c], yC[c])
move_left[c] = violationCheck(x_temp[c], y_temp[c], xC[c], yC[c], xA[c], yA[c])
x_pos_next = ((x_pos_guess[these] - move_left) + move_right)
x_pos_next[(x_pos_next < 0)] = 0
x_pos_next[(x_pos_next > (self.x_n - 2))] = (self.x_n - 2)
y_pos_next = ((y_pos_guess[these] - move_down) + move_up)
y_pos_next[(y_pos_next < 0)] = 0
y_pos_next[(y_pos_next > (self.y_n - 2))] = (self.y_n - 2)
no_move = np.array(np.logical_and((x_pos_guess[these] == x_pos_next), (y_pos_guess[these] == y_pos_next)))
x_pos_guess[these] = x_pos_next
y_pos_guess[these] = y_pos_next
temp = these.nonzero()
these[temp[0][no_move]] = False
loops += 1
x_pos = x_pos_guess
y_pos = y_pos_guess
return (x_pos, y_pos)
|
def findCoords(self, x, y, x_pos, y_pos):
'\n Calculates the relative coordinates (alpha,beta) for each point (x,y),\n given the sectors (x_pos,y_pos) in which they reside. Only called as\n a subroutine of __call__().\n\n Parameters\n ----------\n x : np.array\n Values whose sector should be found.\n y : np.array\n Values whose sector should be found. Should be same size as x.\n x_pos : np.array\n Sector x-coordinates for each point in (x,y), of the same size.\n y_pos : np.array\n Sector y-coordinates for each point in (x,y), of the same size.\n\n Returns\n -------\n alpha : np.array\n Relative "horizontal" position of the input in their respective sectors.\n beta : np.array\n Relative "vertical" position of the input in their respective sectors.\n '
xA = self.x_values[(x_pos, y_pos)]
xB = self.x_values[((x_pos + 1), y_pos)]
xC = self.x_values[(x_pos, (y_pos + 1))]
xD = self.x_values[((x_pos + 1), (y_pos + 1))]
yA = self.y_values[(x_pos, y_pos)]
yB = self.y_values[((x_pos + 1), y_pos)]
yC = self.y_values[(x_pos, (y_pos + 1))]
yD = self.y_values[((x_pos + 1), (y_pos + 1))]
polarity = ((2.0 * self.polarity[(x_pos, y_pos)]) - 1.0)
a = xA
b = (xB - xA)
c = (xC - xA)
d = (((xA - xB) - xC) + xD)
e = yA
f = (yB - yA)
g = (yC - yA)
h = (((yA - yB) - yC) + yD)
denom = ((d * g) - (h * c))
mu = (((h * b) - (d * f)) / denom)
tau = (((h * (a - x)) - (d * (e - y))) / denom)
zeta = ((a - x) + (c * tau))
eta = ((b + (c * mu)) + (d * tau))
theta = (d * mu)
alpha = (((- eta) + (polarity * np.sqrt(((eta ** 2.0) - ((4.0 * zeta) * theta))))) / (2.0 * theta))
beta = ((mu * alpha) + tau)
z = np.logical_or(np.isnan(alpha), np.isnan(beta))
if np.any(z):
these = np.isclose((f / b), ((yD - yC) / (xD - xC)))
if np.any(these):
kappa = (f[these] / b[these])
int_bot = (yA[these] - (kappa * xA[these]))
int_top = (yC[these] - (kappa * xC[these]))
int_these = (y[these] - (kappa * x[these]))
beta_temp = ((int_these - int_bot) / (int_top - int_bot))
x_left = ((beta_temp * xC[these]) + ((1.0 - beta_temp) * xA[these]))
x_right = ((beta_temp * xD[these]) + ((1.0 - beta_temp) * xB[these]))
alpha_temp = ((x[these] - x_left) / (x_right - x_left))
beta[these] = beta_temp
alpha[these] = alpha_temp
return (alpha, beta)
| 6,835,459,731,216,263,000
|
Calculates the relative coordinates (alpha,beta) for each point (x,y),
given the sectors (x_pos,y_pos) in which they reside. Only called as
a subroutine of __call__().
Parameters
----------
x : np.array
Values whose sector should be found.
y : np.array
Values whose sector should be found. Should be same size as x.
x_pos : np.array
Sector x-coordinates for each point in (x,y), of the same size.
y_pos : np.array
Sector y-coordinates for each point in (x,y), of the same size.
Returns
-------
alpha : np.array
Relative "horizontal" position of the input in their respective sectors.
beta : np.array
Relative "vertical" position of the input in their respective sectors.
|
HARK/interpolation.py
|
findCoords
|
cohenimhuji/HARK
|
python
|
def findCoords(self, x, y, x_pos, y_pos):
'\n Calculates the relative coordinates (alpha,beta) for each point (x,y),\n given the sectors (x_pos,y_pos) in which they reside. Only called as\n a subroutine of __call__().\n\n Parameters\n ----------\n x : np.array\n Values whose sector should be found.\n y : np.array\n Values whose sector should be found. Should be same size as x.\n x_pos : np.array\n Sector x-coordinates for each point in (x,y), of the same size.\n y_pos : np.array\n Sector y-coordinates for each point in (x,y), of the same size.\n\n Returns\n -------\n alpha : np.array\n Relative "horizontal" position of the input in their respective sectors.\n beta : np.array\n Relative "vertical" position of the input in their respective sectors.\n '
xA = self.x_values[(x_pos, y_pos)]
xB = self.x_values[((x_pos + 1), y_pos)]
xC = self.x_values[(x_pos, (y_pos + 1))]
xD = self.x_values[((x_pos + 1), (y_pos + 1))]
yA = self.y_values[(x_pos, y_pos)]
yB = self.y_values[((x_pos + 1), y_pos)]
yC = self.y_values[(x_pos, (y_pos + 1))]
yD = self.y_values[((x_pos + 1), (y_pos + 1))]
polarity = ((2.0 * self.polarity[(x_pos, y_pos)]) - 1.0)
a = xA
b = (xB - xA)
c = (xC - xA)
d = (((xA - xB) - xC) + xD)
e = yA
f = (yB - yA)
g = (yC - yA)
h = (((yA - yB) - yC) + yD)
denom = ((d * g) - (h * c))
mu = (((h * b) - (d * f)) / denom)
tau = (((h * (a - x)) - (d * (e - y))) / denom)
zeta = ((a - x) + (c * tau))
eta = ((b + (c * mu)) + (d * tau))
theta = (d * mu)
alpha = (((- eta) + (polarity * np.sqrt(((eta ** 2.0) - ((4.0 * zeta) * theta))))) / (2.0 * theta))
beta = ((mu * alpha) + tau)
z = np.logical_or(np.isnan(alpha), np.isnan(beta))
if np.any(z):
these = np.isclose((f / b), ((yD - yC) / (xD - xC)))
if np.any(these):
kappa = (f[these] / b[these])
int_bot = (yA[these] - (kappa * xA[these]))
int_top = (yC[these] - (kappa * xC[these]))
int_these = (y[these] - (kappa * x[these]))
beta_temp = ((int_these - int_bot) / (int_top - int_bot))
x_left = ((beta_temp * xC[these]) + ((1.0 - beta_temp) * xA[these]))
x_right = ((beta_temp * xD[these]) + ((1.0 - beta_temp) * xB[these]))
alpha_temp = ((x[these] - x_left) / (x_right - x_left))
beta[these] = beta_temp
alpha[these] = alpha_temp
return (alpha, beta)
|
def _evaluate(self, x, y):
'\n Returns the level of the interpolated function at each value in x,y.\n Only called internally by HARKinterpolator2D.__call__ (etc).\n '
(x_pos, y_pos) = self.findSector(x, y)
(alpha, beta) = self.findCoords(x, y, x_pos, y_pos)
f = ((((((1 - alpha) * (1 - beta)) * self.f_values[(x_pos, y_pos)]) + (((1 - alpha) * beta) * self.f_values[(x_pos, (y_pos + 1))])) + ((alpha * (1 - beta)) * self.f_values[((x_pos + 1), y_pos)])) + ((alpha * beta) * self.f_values[((x_pos + 1), (y_pos + 1))]))
return f
| -7,214,506,177,621,786,000
|
Returns the level of the interpolated function at each value in x,y.
Only called internally by HARKinterpolator2D.__call__ (etc).
|
HARK/interpolation.py
|
_evaluate
|
cohenimhuji/HARK
|
python
|
def _evaluate(self, x, y):
'\n Returns the level of the interpolated function at each value in x,y.\n Only called internally by HARKinterpolator2D.__call__ (etc).\n '
(x_pos, y_pos) = self.findSector(x, y)
(alpha, beta) = self.findCoords(x, y, x_pos, y_pos)
f = ((((((1 - alpha) * (1 - beta)) * self.f_values[(x_pos, y_pos)]) + (((1 - alpha) * beta) * self.f_values[(x_pos, (y_pos + 1))])) + ((alpha * (1 - beta)) * self.f_values[((x_pos + 1), y_pos)])) + ((alpha * beta) * self.f_values[((x_pos + 1), (y_pos + 1))]))
return f
|
def _derX(self, x, y):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.\n '
(x_pos, y_pos) = self.findSector(x, y)
(alpha, beta) = self.findCoords(x, y, x_pos, y_pos)
xA = self.x_values[(x_pos, y_pos)]
xB = self.x_values[((x_pos + 1), y_pos)]
xC = self.x_values[(x_pos, (y_pos + 1))]
xD = self.x_values[((x_pos + 1), (y_pos + 1))]
yA = self.y_values[(x_pos, y_pos)]
yB = self.y_values[((x_pos + 1), y_pos)]
yC = self.y_values[(x_pos, (y_pos + 1))]
yD = self.y_values[((x_pos + 1), (y_pos + 1))]
fA = self.f_values[(x_pos, y_pos)]
fB = self.f_values[((x_pos + 1), y_pos)]
fC = self.f_values[(x_pos, (y_pos + 1))]
fD = self.f_values[((x_pos + 1), (y_pos + 1))]
alpha_x = (((1 - beta) * (xB - xA)) + (beta * (xD - xC)))
alpha_y = (((1 - beta) * (yB - yA)) + (beta * (yD - yC)))
beta_x = (((1 - alpha) * (xC - xA)) + (alpha * (xD - xB)))
beta_y = (((1 - alpha) * (yC - yA)) + (alpha * (yD - yB)))
det = ((alpha_x * beta_y) - (beta_x * alpha_y))
x_alpha = (beta_y / det)
x_beta = ((- alpha_y) / det)
dfda = (((1 - beta) * (fB - fA)) + (beta * (fD - fC)))
dfdb = (((1 - alpha) * (fC - fA)) + (alpha * (fD - fB)))
dfdx = ((x_alpha * dfda) + (x_beta * dfdb))
return dfdx
| 1,090,018,395,499,337,000
|
Returns the derivative with respect to x of the interpolated function
at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.
|
HARK/interpolation.py
|
_derX
|
cohenimhuji/HARK
|
python
|
def _derX(self, x, y):
'\n Returns the derivative with respect to x of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.\n '
(x_pos, y_pos) = self.findSector(x, y)
(alpha, beta) = self.findCoords(x, y, x_pos, y_pos)
xA = self.x_values[(x_pos, y_pos)]
xB = self.x_values[((x_pos + 1), y_pos)]
xC = self.x_values[(x_pos, (y_pos + 1))]
xD = self.x_values[((x_pos + 1), (y_pos + 1))]
yA = self.y_values[(x_pos, y_pos)]
yB = self.y_values[((x_pos + 1), y_pos)]
yC = self.y_values[(x_pos, (y_pos + 1))]
yD = self.y_values[((x_pos + 1), (y_pos + 1))]
fA = self.f_values[(x_pos, y_pos)]
fB = self.f_values[((x_pos + 1), y_pos)]
fC = self.f_values[(x_pos, (y_pos + 1))]
fD = self.f_values[((x_pos + 1), (y_pos + 1))]
alpha_x = (((1 - beta) * (xB - xA)) + (beta * (xD - xC)))
alpha_y = (((1 - beta) * (yB - yA)) + (beta * (yD - yC)))
beta_x = (((1 - alpha) * (xC - xA)) + (alpha * (xD - xB)))
beta_y = (((1 - alpha) * (yC - yA)) + (alpha * (yD - yB)))
det = ((alpha_x * beta_y) - (beta_x * alpha_y))
x_alpha = (beta_y / det)
x_beta = ((- alpha_y) / det)
dfda = (((1 - beta) * (fB - fA)) + (beta * (fD - fC)))
dfdb = (((1 - alpha) * (fC - fA)) + (alpha * (fD - fB)))
dfdx = ((x_alpha * dfda) + (x_beta * dfdb))
return dfdx
|
def _derY(self, x, y):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.\n '
(x_pos, y_pos) = self.findSector(x, y)
(alpha, beta) = self.findCoords(x, y, x_pos, y_pos)
xA = self.x_values[(x_pos, y_pos)]
xB = self.x_values[((x_pos + 1), y_pos)]
xC = self.x_values[(x_pos, (y_pos + 1))]
xD = self.x_values[((x_pos + 1), (y_pos + 1))]
yA = self.y_values[(x_pos, y_pos)]
yB = self.y_values[((x_pos + 1), y_pos)]
yC = self.y_values[(x_pos, (y_pos + 1))]
yD = self.y_values[((x_pos + 1), (y_pos + 1))]
fA = self.f_values[(x_pos, y_pos)]
fB = self.f_values[((x_pos + 1), y_pos)]
fC = self.f_values[(x_pos, (y_pos + 1))]
fD = self.f_values[((x_pos + 1), (y_pos + 1))]
alpha_x = (((1 - beta) * (xB - xA)) + (beta * (xD - xC)))
alpha_y = (((1 - beta) * (yB - yA)) + (beta * (yD - yC)))
beta_x = (((1 - alpha) * (xC - xA)) + (alpha * (xD - xB)))
beta_y = (((1 - alpha) * (yC - yA)) + (alpha * (yD - yB)))
det = ((alpha_x * beta_y) - (beta_x * alpha_y))
y_alpha = ((- beta_x) / det)
y_beta = (alpha_x / det)
dfda = (((1 - beta) * (fB - fA)) + (beta * (fD - fC)))
dfdb = (((1 - alpha) * (fC - fA)) + (alpha * (fD - fB)))
dfdy = ((y_alpha * dfda) + (y_beta * dfdb))
return dfdy
| 6,395,438,805,271,595,000
|
Returns the derivative with respect to y of the interpolated function
at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.
|
HARK/interpolation.py
|
_derY
|
cohenimhuji/HARK
|
python
|
def _derY(self, x, y):
'\n Returns the derivative with respect to y of the interpolated function\n at each value in x,y. Only called internally by HARKinterpolator2D.derivativeX.\n '
(x_pos, y_pos) = self.findSector(x, y)
(alpha, beta) = self.findCoords(x, y, x_pos, y_pos)
xA = self.x_values[(x_pos, y_pos)]
xB = self.x_values[((x_pos + 1), y_pos)]
xC = self.x_values[(x_pos, (y_pos + 1))]
xD = self.x_values[((x_pos + 1), (y_pos + 1))]
yA = self.y_values[(x_pos, y_pos)]
yB = self.y_values[((x_pos + 1), y_pos)]
yC = self.y_values[(x_pos, (y_pos + 1))]
yD = self.y_values[((x_pos + 1), (y_pos + 1))]
fA = self.f_values[(x_pos, y_pos)]
fB = self.f_values[((x_pos + 1), y_pos)]
fC = self.f_values[(x_pos, (y_pos + 1))]
fD = self.f_values[((x_pos + 1), (y_pos + 1))]
alpha_x = (((1 - beta) * (xB - xA)) + (beta * (xD - xC)))
alpha_y = (((1 - beta) * (yB - yA)) + (beta * (yD - yC)))
beta_x = (((1 - alpha) * (xC - xA)) + (alpha * (xD - xB)))
beta_y = (((1 - alpha) * (yC - yA)) + (alpha * (yD - yB)))
det = ((alpha_x * beta_y) - (beta_x * alpha_y))
y_alpha = ((- beta_x) / det)
y_beta = (alpha_x / det)
dfda = (((1 - beta) * (fB - fA)) + (beta * (fD - fC)))
dfdb = (((1 - alpha) * (fC - fA)) + (alpha * (fD - fB)))
dfdy = ((y_alpha * dfda) + (y_beta * dfdb))
return dfdy
|
def get_image(train):
'\n Randomly pick one image from training data\n ====================\n Args:\n train: train data\n ====================\n Return:\n image\n '
n = len(train)
ind = np.random.randint(0, n)
img_dir = train[ind]
image = Image.open(img_dir)
image = image.resize([208, 208])
image = np.array(image)
return image
| -5,667,126,658,187,695,000
|
Randomly pick one image from training data
====================
Args:
train: train data
====================
Return:
image
|
cats_dogs/base.py
|
get_image
|
GPUworkstation/tensorflow-project
|
python
|
def get_image(train):
'\n Randomly pick one image from training data\n ====================\n Args:\n train: train data\n ====================\n Return:\n image\n '
n = len(train)
ind = np.random.randint(0, n)
img_dir = train[ind]
image = Image.open(img_dir)
image = image.resize([208, 208])
image = np.array(image)
return image
|
def evaluate():
'\n Test one image against the saved models and parameters\n '
train_dir = './data/train/'
(train, train_label) = train_test_split.get_files(train_dir)
image_array = get_image(train)
with tf.Graph().as_default():
batch_size = 1
n_classes = 2
image = tf.cast(image_array, tf.float32)
image = tf.image.per_image_standardization(image)
image = tf.reshape(image, [1, 208, 208, 3])
logits = cnn.inference(image, batch_size, n_classes)
logits = tf.nn.softmax(logits)
X = tf.placeholder(tf.float32, shape=[208, 208, 3])
logs_train_dir = './logs/train/'
saver = tf.train.Saver()
with tf.Session() as sess:
print('Reading checkpoints...')
ckpt = tf.train.get_checkpoint_state(logs_train_dir)
if (ckpt and ckpt.model_checkpoint_path):
global_step = ckpt.model_checkpoint_path.split('/')[(- 1)].split('-')[(- 1)]
saver.restore(sess, ckpt.model_checkpoint_path)
print('Loading success, global_step is %s'.format(global_step))
else:
print('No checkpoint file found')
prediction = sess.run(logits, feed_dict={X: image_array})
max_index = np.argmax(prediction)
if (max_index == 0):
print('This is a cat with possibility {:.6f}'.format(prediction[:, 0]))
else:
print('This is a dog with possibility {:.6f}'.format(prediction[:, 1]))
| -4,895,543,533,726,775,000
|
Test one image against the saved models and parameters
|
cats_dogs/base.py
|
evaluate
|
GPUworkstation/tensorflow-project
|
python
|
def evaluate():
'\n \n '
train_dir = './data/train/'
(train, train_label) = train_test_split.get_files(train_dir)
image_array = get_image(train)
with tf.Graph().as_default():
batch_size = 1
n_classes = 2
image = tf.cast(image_array, tf.float32)
image = tf.image.per_image_standardization(image)
image = tf.reshape(image, [1, 208, 208, 3])
logits = cnn.inference(image, batch_size, n_classes)
logits = tf.nn.softmax(logits)
X = tf.placeholder(tf.float32, shape=[208, 208, 3])
logs_train_dir = './logs/train/'
saver = tf.train.Saver()
with tf.Session() as sess:
print('Reading checkpoints...')
ckpt = tf.train.get_checkpoint_state(logs_train_dir)
if (ckpt and ckpt.model_checkpoint_path):
global_step = ckpt.model_checkpoint_path.split('/')[(- 1)].split('-')[(- 1)]
saver.restore(sess, ckpt.model_checkpoint_path)
print('Loading success, global_step is %s'.format(global_step))
else:
print('No checkpoint file found')
prediction = sess.run(logits, feed_dict={X: image_array})
max_index = np.argmax(prediction)
if (max_index == 0):
print('This is a cat with possibility {:.6f}'.format(prediction[:, 0]))
else:
print('This is a dog with possibility {:.6f}'.format(prediction[:, 1]))
|
def evaluate_state_from_last_coordinate(self, c):
'\n cmax: 450\n cmin: 150\n \n c definately will be between 150 and 450.\n state0 - (150 - 179)\n state1 - (180 - 209)\n state2 - (210 - 239)\n state3 - (240 - 269)\n state4 - (270 - 299)\n state5 - (300 - 329)\n state6 - (330 - 359)\n state7 - (360 - 389)\n state8 - (390 - 419)\n state9 - (420 - 450)\n '
if ((c >= 150) and (c <= 179)):
return 0
elif ((c >= 180) and (c <= 209)):
return 1
elif ((c >= 210) and (c <= 239)):
return 2
elif ((c >= 240) and (c <= 269)):
return 3
elif ((c >= 270) and (c <= 299)):
return 4
elif ((c >= 300) and (c <= 329)):
return 5
elif ((c >= 330) and (c <= 359)):
return 6
elif ((c >= 360) and (c <= 389)):
return 7
elif ((c >= 390) and (c <= 419)):
return 8
elif ((c >= 420) and (c <= 450)):
return 9
| -6,452,695,814,198,387,000
|
cmax: 450
cmin: 150
c definately will be between 150 and 450.
state0 - (150 - 179)
state1 - (180 - 209)
state2 - (210 - 239)
state3 - (240 - 269)
state4 - (270 - 299)
state5 - (300 - 329)
state6 - (330 - 359)
state7 - (360 - 389)
state8 - (390 - 419)
state9 - (420 - 450)
|
pytennis/play.py
|
evaluate_state_from_last_coordinate
|
elishatofunmi/ReinEnv
|
python
|
def evaluate_state_from_last_coordinate(self, c):
'\n cmax: 450\n cmin: 150\n \n c definately will be between 150 and 450.\n state0 - (150 - 179)\n state1 - (180 - 209)\n state2 - (210 - 239)\n state3 - (240 - 269)\n state4 - (270 - 299)\n state5 - (300 - 329)\n state6 - (330 - 359)\n state7 - (360 - 389)\n state8 - (390 - 419)\n state9 - (420 - 450)\n '
if ((c >= 150) and (c <= 179)):
return 0
elif ((c >= 180) and (c <= 209)):
return 1
elif ((c >= 210) and (c <= 239)):
return 2
elif ((c >= 240) and (c <= 269)):
return 3
elif ((c >= 270) and (c <= 299)):
return 4
elif ((c >= 300) and (c <= 329)):
return 5
elif ((c >= 330) and (c <= 359)):
return 6
elif ((c >= 360) and (c <= 389)):
return 7
elif ((c >= 390) and (c <= 419)):
return 8
elif ((c >= 420) and (c <= 450)):
return 9
|
def randomVal(self, action):
'\n cmax: 450\n cmin: 150\n \n c definately will be between 150 and 450.\n state0 - (150 - 179)\n state1 - (180 - 209)\n state2 - (210 - 239)\n state3 - (240 - 269)\n state4 - (270 - 299)\n state5 - (300 - 329)\n state6 - (330 - 359)\n state7 - (360 - 389)\n state8 - (390 - 419)\n state9 - (420 - 450)\n '
if (action == 0):
val = np.random.choice([i for i in range(150, 180)])
elif (action == 1):
val = np.random.choice([i for i in range(180, 210)])
elif (action == 2):
val = np.random.choice([i for i in range(210, 240)])
elif (action == 3):
val = np.random.choice([i for i in range(240, 270)])
elif (action == 4):
val = np.random.choice([i for i in range(270, 300)])
elif (action == 5):
val = np.random.choice([i for i in range(300, 330)])
elif (action == 6):
val = np.random.choice([i for i in range(330, 360)])
elif (action == 7):
val = np.random.choice([i for i in range(360, 390)])
elif (action == 8):
val = np.random.choice([i for i in range(390, 420)])
else:
val = np.random.choice([i for i in range(420, 450)])
return val
| 2,146,418,420,197,529,000
|
cmax: 450
cmin: 150
c definately will be between 150 and 450.
state0 - (150 - 179)
state1 - (180 - 209)
state2 - (210 - 239)
state3 - (240 - 269)
state4 - (270 - 299)
state5 - (300 - 329)
state6 - (330 - 359)
state7 - (360 - 389)
state8 - (390 - 419)
state9 - (420 - 450)
|
pytennis/play.py
|
randomVal
|
elishatofunmi/ReinEnv
|
python
|
def randomVal(self, action):
'\n cmax: 450\n cmin: 150\n \n c definately will be between 150 and 450.\n state0 - (150 - 179)\n state1 - (180 - 209)\n state2 - (210 - 239)\n state3 - (240 - 269)\n state4 - (270 - 299)\n state5 - (300 - 329)\n state6 - (330 - 359)\n state7 - (360 - 389)\n state8 - (390 - 419)\n state9 - (420 - 450)\n '
if (action == 0):
val = np.random.choice([i for i in range(150, 180)])
elif (action == 1):
val = np.random.choice([i for i in range(180, 210)])
elif (action == 2):
val = np.random.choice([i for i in range(210, 240)])
elif (action == 3):
val = np.random.choice([i for i in range(240, 270)])
elif (action == 4):
val = np.random.choice([i for i in range(270, 300)])
elif (action == 5):
val = np.random.choice([i for i in range(300, 330)])
elif (action == 6):
val = np.random.choice([i for i in range(330, 360)])
elif (action == 7):
val = np.random.choice([i for i in range(360, 390)])
elif (action == 8):
val = np.random.choice([i for i in range(390, 420)])
else:
val = np.random.choice([i for i in range(420, 450)])
return val
|
def print_metrics(round_number, client_ids, metrics, hierarchies, num_samples, path):
'Prints or appends the given metrics in a csv.\n\n The resulting dataframe is of the form:\n client_id, round_number, hierarchy, num_samples, metric1, metric2\n twebbstack, 0, , 18, 0.5, 0.89\n\n Args:\n round_number: Number of the round the metrics correspond to. If\n 0, then the file in path is overwritten. If not 0, we append to\n that file.\n client_ids: Ids of the clients. Not all ids must be in the following\n dicts.\n metrics: Dict keyed by client id. Each element is a dict of metrics\n for that client in the specified round. The dicts for all clients\n are expected to have the same set of keys.\n hierarchies: Dict keyed by client id. Each element is a list of hierarchies\n to which the client belongs.\n num_samples: Dict keyed by client id. Each element is the number of test\n samples for the client.\n '
columns = (COLUMN_NAMES + get_metrics_names(metrics))
client_data = pd.DataFrame(columns=columns)
for (i, c_id) in enumerate(client_ids):
current_client = {'client_id': c_id, 'round_number': round_number, 'hierarchy': ','.join(hierarchies.get(c_id, [])), 'num_samples': num_samples.get(c_id, np.nan)}
current_metrics = metrics.get(c_id, {})
for (metric, metric_value) in current_metrics.items():
current_client[metric] = metric_value
client_data.loc[len(client_data)] = current_client
mode = ('w' if (round_number == 0) else 'a')
print_dataframe(client_data, path, mode)
| -7,489,702,820,993,001,000
|
Prints or appends the given metrics in a csv.
The resulting dataframe is of the form:
client_id, round_number, hierarchy, num_samples, metric1, metric2
twebbstack, 0, , 18, 0.5, 0.89
Args:
round_number: Number of the round the metrics correspond to. If
0, then the file in path is overwritten. If not 0, we append to
that file.
client_ids: Ids of the clients. Not all ids must be in the following
dicts.
metrics: Dict keyed by client id. Each element is a dict of metrics
for that client in the specified round. The dicts for all clients
are expected to have the same set of keys.
hierarchies: Dict keyed by client id. Each element is a list of hierarchies
to which the client belongs.
num_samples: Dict keyed by client id. Each element is the number of test
samples for the client.
|
models/metrics/writer.py
|
print_metrics
|
slowbull/leaf
|
python
|
def print_metrics(round_number, client_ids, metrics, hierarchies, num_samples, path):
'Prints or appends the given metrics in a csv.\n\n The resulting dataframe is of the form:\n client_id, round_number, hierarchy, num_samples, metric1, metric2\n twebbstack, 0, , 18, 0.5, 0.89\n\n Args:\n round_number: Number of the round the metrics correspond to. If\n 0, then the file in path is overwritten. If not 0, we append to\n that file.\n client_ids: Ids of the clients. Not all ids must be in the following\n dicts.\n metrics: Dict keyed by client id. Each element is a dict of metrics\n for that client in the specified round. The dicts for all clients\n are expected to have the same set of keys.\n hierarchies: Dict keyed by client id. Each element is a list of hierarchies\n to which the client belongs.\n num_samples: Dict keyed by client id. Each element is the number of test\n samples for the client.\n '
columns = (COLUMN_NAMES + get_metrics_names(metrics))
client_data = pd.DataFrame(columns=columns)
for (i, c_id) in enumerate(client_ids):
current_client = {'client_id': c_id, 'round_number': round_number, 'hierarchy': ','.join(hierarchies.get(c_id, [])), 'num_samples': num_samples.get(c_id, np.nan)}
current_metrics = metrics.get(c_id, {})
for (metric, metric_value) in current_metrics.items():
current_client[metric] = metric_value
client_data.loc[len(client_data)] = current_client
mode = ('w' if (round_number == 0) else 'a')
print_dataframe(client_data, path, mode)
|
def print_dataframe(df, path, mode='w'):
'Writes the given dataframe in path as a csv'
header = (mode == 'w')
df.to_csv(path, mode=mode, header=header, index=False)
| 4,623,578,379,609,001,000
|
Writes the given dataframe in path as a csv
|
models/metrics/writer.py
|
print_dataframe
|
slowbull/leaf
|
python
|
def print_dataframe(df, path, mode='w'):
header = (mode == 'w')
df.to_csv(path, mode=mode, header=header, index=False)
|
def get_metrics_names(metrics):
'Gets the names of the metrics.\n\n Args:\n metrics: Dict keyed by client id. Each element is a dict of metrics\n for that client in the specified round. The dicts for all clients\n are expected to have the same set of keys.'
if (len(metrics) == 0):
return []
metrics_dict = next(iter(metrics.values()))
return list(metrics_dict.keys())
| 97,297,999,199,947,840
|
Gets the names of the metrics.
Args:
metrics: Dict keyed by client id. Each element is a dict of metrics
for that client in the specified round. The dicts for all clients
are expected to have the same set of keys.
|
models/metrics/writer.py
|
get_metrics_names
|
slowbull/leaf
|
python
|
def get_metrics_names(metrics):
'Gets the names of the metrics.\n\n Args:\n metrics: Dict keyed by client id. Each element is a dict of metrics\n for that client in the specified round. The dicts for all clients\n are expected to have the same set of keys.'
if (len(metrics) == 0):
return []
metrics_dict = next(iter(metrics.values()))
return list(metrics_dict.keys())
|
def build_learner(agent, env_outputs, agent_outputs, env_id):
'Builds the learner loop.\n\n Args:\n agent: A snt.RNNCore module outputting `AgentOutput` named tuples, with an\n `unroll` call for computing the outputs for a whole trajectory.\n agent_state: The initial agent state for each sequence in the batch.\n env_outputs: A `StepOutput` namedtuple where each field is of shape\n [T+1, ...].\n agent_outputs: An `AgentOutput` namedtuple where each field is of shape\n [T+1, ...].\n\n Returns:\n A tuple of (done, infos, and environment frames) where\n the environment frames tensor causes an update.\n '
learner_outputs = agent.unroll(agent_outputs.action, env_outputs, env_id)
bootstrap_value = learner_outputs.un_normalized_vf[(- 1)]
agent_outputs = nest.map_structure((lambda t: t[1:]), agent_outputs)
(rewards, infos, done, _) = nest.map_structure((lambda t: t[1:]), env_outputs)
learner_outputs = nest.map_structure((lambda t: t[:(- 1)]), learner_outputs)
if (FLAGS.reward_clipping == 'abs_one'):
clipped_rewards = tf.clip_by_value(rewards, (- 1), 1)
elif (FLAGS.reward_clipping == 'soft_asymmetric'):
squeezed = tf.tanh((rewards / 5.0))
clipped_rewards = (tf.where((rewards < 0), (0.3 * squeezed), squeezed) * 5.0)
discounts = (tf.to_float((~ done)) * FLAGS.discounting)
game_specific_mean = tf.gather(agent._mean, env_id)
game_specific_std = tf.gather(agent._std, env_id)
with tf.device('/cpu'):
vtrace_returns = vtrace.from_logits(behaviour_policy_logits=agent_outputs.policy_logits, target_policy_logits=learner_outputs.policy_logits, actions=agent_outputs.action, discounts=discounts, rewards=clipped_rewards, un_normalized_values=learner_outputs.un_normalized_vf, normalized_values=learner_outputs.normalized_vf, mean=game_specific_mean, std=game_specific_std, bootstrap_value=bootstrap_value)
normalized_vtrace = ((vtrace_returns.vs - game_specific_mean) / game_specific_std)
normalized_vtrace = nest.map_structure(tf.stop_gradient, normalized_vtrace)
total_loss = compute_policy_gradient_loss(learner_outputs.policy_logits, agent_outputs.action, vtrace_returns.pg_advantages)
baseline_loss = compute_baseline_loss((normalized_vtrace - learner_outputs.normalized_vf))
total_loss += (FLAGS.baseline_cost * baseline_loss)
total_loss += (FLAGS.entropy_cost * compute_entropy_loss(learner_outputs.policy_logits))
num_env_frames = tf.train.get_global_step()
learning_rate = tf.train.polynomial_decay(FLAGS.learning_rate, num_env_frames, FLAGS.total_environment_frames, 0)
optimizer = tf.train.RMSPropOptimizer(learning_rate, FLAGS.decay, FLAGS.momentum, FLAGS.epsilon)
if (FLAGS.gradient_clipping > 0.0):
variables = tf.trainable_variables()
gradients = tf.gradients(total_loss, variables)
(gradients, _) = tf.clip_by_global_norm(gradients, FLAGS.gradient_clipping)
train_op = optimizer.apply_gradients(zip(gradients, variables))
else:
train_op = optimizer.minimize(total_loss)
with tf.control_dependencies([train_op]):
num_env_frames_and_train = num_env_frames.assign_add((FLAGS.batch_size * FLAGS.unroll_length))
tf.summary.scalar('learning_rate', learning_rate)
tf.summary.scalar('total_loss', total_loss)
tf.summary.histogram('action', agent_outputs.action)
with tf.device('/cpu'):
(mean, mean_squared) = agent.update_moments(vtrace_returns.vs, env_id)
return ((done, infos, num_env_frames_and_train) + (mean, mean_squared))
| -8,594,914,943,402,240,000
|
Builds the learner loop.
Args:
agent: A snt.RNNCore module outputting `AgentOutput` named tuples, with an
`unroll` call for computing the outputs for a whole trajectory.
agent_state: The initial agent state for each sequence in the batch.
env_outputs: A `StepOutput` namedtuple where each field is of shape
[T+1, ...].
agent_outputs: An `AgentOutput` namedtuple where each field is of shape
[T+1, ...].
Returns:
A tuple of (done, infos, and environment frames) where
the environment frames tensor causes an update.
|
popart/build_learner.py
|
build_learner
|
steffenvan/IMPALA-PopArt
|
python
|
def build_learner(agent, env_outputs, agent_outputs, env_id):
'Builds the learner loop.\n\n Args:\n agent: A snt.RNNCore module outputting `AgentOutput` named tuples, with an\n `unroll` call for computing the outputs for a whole trajectory.\n agent_state: The initial agent state for each sequence in the batch.\n env_outputs: A `StepOutput` namedtuple where each field is of shape\n [T+1, ...].\n agent_outputs: An `AgentOutput` namedtuple where each field is of shape\n [T+1, ...].\n\n Returns:\n A tuple of (done, infos, and environment frames) where\n the environment frames tensor causes an update.\n '
learner_outputs = agent.unroll(agent_outputs.action, env_outputs, env_id)
bootstrap_value = learner_outputs.un_normalized_vf[(- 1)]
agent_outputs = nest.map_structure((lambda t: t[1:]), agent_outputs)
(rewards, infos, done, _) = nest.map_structure((lambda t: t[1:]), env_outputs)
learner_outputs = nest.map_structure((lambda t: t[:(- 1)]), learner_outputs)
if (FLAGS.reward_clipping == 'abs_one'):
clipped_rewards = tf.clip_by_value(rewards, (- 1), 1)
elif (FLAGS.reward_clipping == 'soft_asymmetric'):
squeezed = tf.tanh((rewards / 5.0))
clipped_rewards = (tf.where((rewards < 0), (0.3 * squeezed), squeezed) * 5.0)
discounts = (tf.to_float((~ done)) * FLAGS.discounting)
game_specific_mean = tf.gather(agent._mean, env_id)
game_specific_std = tf.gather(agent._std, env_id)
with tf.device('/cpu'):
vtrace_returns = vtrace.from_logits(behaviour_policy_logits=agent_outputs.policy_logits, target_policy_logits=learner_outputs.policy_logits, actions=agent_outputs.action, discounts=discounts, rewards=clipped_rewards, un_normalized_values=learner_outputs.un_normalized_vf, normalized_values=learner_outputs.normalized_vf, mean=game_specific_mean, std=game_specific_std, bootstrap_value=bootstrap_value)
normalized_vtrace = ((vtrace_returns.vs - game_specific_mean) / game_specific_std)
normalized_vtrace = nest.map_structure(tf.stop_gradient, normalized_vtrace)
total_loss = compute_policy_gradient_loss(learner_outputs.policy_logits, agent_outputs.action, vtrace_returns.pg_advantages)
baseline_loss = compute_baseline_loss((normalized_vtrace - learner_outputs.normalized_vf))
total_loss += (FLAGS.baseline_cost * baseline_loss)
total_loss += (FLAGS.entropy_cost * compute_entropy_loss(learner_outputs.policy_logits))
num_env_frames = tf.train.get_global_step()
learning_rate = tf.train.polynomial_decay(FLAGS.learning_rate, num_env_frames, FLAGS.total_environment_frames, 0)
optimizer = tf.train.RMSPropOptimizer(learning_rate, FLAGS.decay, FLAGS.momentum, FLAGS.epsilon)
if (FLAGS.gradient_clipping > 0.0):
variables = tf.trainable_variables()
gradients = tf.gradients(total_loss, variables)
(gradients, _) = tf.clip_by_global_norm(gradients, FLAGS.gradient_clipping)
train_op = optimizer.apply_gradients(zip(gradients, variables))
else:
train_op = optimizer.minimize(total_loss)
with tf.control_dependencies([train_op]):
num_env_frames_and_train = num_env_frames.assign_add((FLAGS.batch_size * FLAGS.unroll_length))
tf.summary.scalar('learning_rate', learning_rate)
tf.summary.scalar('total_loss', total_loss)
tf.summary.histogram('action', agent_outputs.action)
with tf.device('/cpu'):
(mean, mean_squared) = agent.update_moments(vtrace_returns.vs, env_id)
return ((done, infos, num_env_frames_and_train) + (mean, mean_squared))
|
@mock_ec2
def test_request_spot_instances_default_arguments():
'\n Test that moto set the correct default arguments\n '
conn = boto.connect_ec2()
request = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
request.state.should.equal('open')
request.price.should.equal(0.5)
request.launch_specification.image_id.should.equal('ami-abcd1234')
request.type.should.equal('one-time')
request.valid_from.should.equal(None)
request.valid_until.should.equal(None)
request.launch_group.should.equal(None)
request.availability_zone_group.should.equal(None)
request.launch_specification.key_name.should.equal(None)
security_group_names = [group.name for group in request.launch_specification.groups]
security_group_names.should.equal(['default'])
request.launch_specification.instance_type.should.equal('m1.small')
request.launch_specification.placement.should.equal(None)
request.launch_specification.kernel.should.equal(None)
request.launch_specification.ramdisk.should.equal(None)
request.launch_specification.subnet_id.should.equal(None)
| -7,028,861,979,922,277,000
|
Test that moto set the correct default arguments
|
tests/test_ec2/test_spot_instances.py
|
test_request_spot_instances_default_arguments
|
GoodRx/moto
|
python
|
@mock_ec2
def test_request_spot_instances_default_arguments():
'\n \n '
conn = boto.connect_ec2()
request = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
request.state.should.equal('open')
request.price.should.equal(0.5)
request.launch_specification.image_id.should.equal('ami-abcd1234')
request.type.should.equal('one-time')
request.valid_from.should.equal(None)
request.valid_until.should.equal(None)
request.launch_group.should.equal(None)
request.availability_zone_group.should.equal(None)
request.launch_specification.key_name.should.equal(None)
security_group_names = [group.name for group in request.launch_specification.groups]
security_group_names.should.equal(['default'])
request.launch_specification.instance_type.should.equal('m1.small')
request.launch_specification.placement.should.equal(None)
request.launch_specification.kernel.should.equal(None)
request.launch_specification.ramdisk.should.equal(None)
request.launch_specification.subnet_id.should.equal(None)
|
@mock_ec2
def test_request_spot_instances_fulfilled():
'\n Test that moto correctly fullfills a spot instance request\n '
conn = boto.ec2.connect_to_region('us-east-1')
request = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
request.state.should.equal('open')
get_model('SpotInstanceRequest')[0].state = 'active'
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
request.state.should.equal('active')
| -9,222,702,379,383,652,000
|
Test that moto correctly fullfills a spot instance request
|
tests/test_ec2/test_spot_instances.py
|
test_request_spot_instances_fulfilled
|
GoodRx/moto
|
python
|
@mock_ec2
def test_request_spot_instances_fulfilled():
'\n \n '
conn = boto.ec2.connect_to_region('us-east-1')
request = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
request.state.should.equal('open')
get_model('SpotInstanceRequest')[0].state = 'active'
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
request.state.should.equal('active')
|
@mock_ec2
def test_tag_spot_instance_request():
'\n Test that moto correctly tags a spot instance request\n '
conn = boto.connect_ec2()
request = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
request[0].add_tag('tag1', 'value1')
request[0].add_tag('tag2', 'value2')
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
tag_dict = dict(request.tags)
tag_dict.should.equal({'tag1': 'value1', 'tag2': 'value2'})
| 8,481,017,730,225,486,000
|
Test that moto correctly tags a spot instance request
|
tests/test_ec2/test_spot_instances.py
|
test_tag_spot_instance_request
|
GoodRx/moto
|
python
|
@mock_ec2
def test_tag_spot_instance_request():
'\n \n '
conn = boto.connect_ec2()
request = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
request[0].add_tag('tag1', 'value1')
request[0].add_tag('tag2', 'value2')
requests = conn.get_all_spot_instance_requests()
requests.should.have.length_of(1)
request = requests[0]
tag_dict = dict(request.tags)
tag_dict.should.equal({'tag1': 'value1', 'tag2': 'value2'})
|
@mock_ec2
def test_get_all_spot_instance_requests_filtering():
'\n Test that moto correctly filters spot instance requests\n '
conn = boto.connect_ec2()
request1 = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
request2 = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
request1[0].add_tag('tag1', 'value1')
request1[0].add_tag('tag2', 'value2')
request2[0].add_tag('tag1', 'value1')
request2[0].add_tag('tag2', 'wrong')
requests = conn.get_all_spot_instance_requests(filters={'state': 'active'})
requests.should.have.length_of(0)
requests = conn.get_all_spot_instance_requests(filters={'state': 'open'})
requests.should.have.length_of(3)
requests = conn.get_all_spot_instance_requests(filters={'tag:tag1': 'value1'})
requests.should.have.length_of(2)
requests = conn.get_all_spot_instance_requests(filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'})
requests.should.have.length_of(1)
| 1,229,120,870,811,941,400
|
Test that moto correctly filters spot instance requests
|
tests/test_ec2/test_spot_instances.py
|
test_get_all_spot_instance_requests_filtering
|
GoodRx/moto
|
python
|
@mock_ec2
def test_get_all_spot_instance_requests_filtering():
'\n \n '
conn = boto.connect_ec2()
request1 = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
request2 = conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
conn.request_spot_instances(price=0.5, image_id='ami-abcd1234')
request1[0].add_tag('tag1', 'value1')
request1[0].add_tag('tag2', 'value2')
request2[0].add_tag('tag1', 'value1')
request2[0].add_tag('tag2', 'wrong')
requests = conn.get_all_spot_instance_requests(filters={'state': 'active'})
requests.should.have.length_of(0)
requests = conn.get_all_spot_instance_requests(filters={'state': 'open'})
requests.should.have.length_of(3)
requests = conn.get_all_spot_instance_requests(filters={'tag:tag1': 'value1'})
requests.should.have.length_of(2)
requests = conn.get_all_spot_instance_requests(filters={'tag:tag1': 'value1', 'tag:tag2': 'value2'})
requests.should.have.length_of(1)
|
def __init__(self, *args, **kwargs):
'This is a fake class to support current implemetation of MultiApiClientMixin."\n Will be removed in final version of multiapi azure-core based client\n '
pass
| -2,091,115,876,554,127,000
|
This is a fake class to support current implemetation of MultiApiClientMixin."
Will be removed in final version of multiapi azure-core based client
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
__init__
|
AFengKK/azure-sdk-for-python
|
python
|
def __init__(self, *args, **kwargs):
'This is a fake class to support current implemetation of MultiApiClientMixin."\n Will be removed in final version of multiapi azure-core based client\n '
pass
|
@classmethod
def models(cls, api_version=DEFAULT_API_VERSION):
'Module depends on the API version:\n\n * 2017-03-01: :mod:`v2017_03_01.models<azure.mgmt.containerregistry.v2017_03_01.models>`\n * 2017-10-01: :mod:`v2017_10_01.models<azure.mgmt.containerregistry.v2017_10_01.models>`\n * 2018-02-01-preview: :mod:`v2018_02_01_preview.models<azure.mgmt.containerregistry.v2018_02_01_preview.models>`\n * 2018-09-01: :mod:`v2018_09_01.models<azure.mgmt.containerregistry.v2018_09_01.models>`\n * 2019-04-01: :mod:`v2019_04_01.models<azure.mgmt.containerregistry.v2019_04_01.models>`\n * 2019-05-01: :mod:`v2019_05_01.models<azure.mgmt.containerregistry.v2019_05_01.models>`\n * 2019-05-01-preview: :mod:`v2019_05_01_preview.models<azure.mgmt.containerregistry.v2019_05_01_preview.models>`\n * 2019-06-01-preview: :mod:`v2019_06_01_preview.models<azure.mgmt.containerregistry.v2019_06_01_preview.models>`\n * 2019-12-01-preview: :mod:`v2019_12_01_preview.models<azure.mgmt.containerregistry.v2019_12_01_preview.models>`\n * 2020-11-01-preview: :mod:`v2020_11_01_preview.models<azure.mgmt.containerregistry.v2020_11_01_preview.models>`\n * 2021-06-01-preview: :mod:`v2021_06_01_preview.models<azure.mgmt.containerregistry.v2021_06_01_preview.models>`\n * 2021-08-01-preview: :mod:`v2021_08_01_preview.models<azure.mgmt.containerregistry.v2021_08_01_preview.models>`\n '
if (api_version == '2017-03-01'):
from ..v2017_03_01 import models
return models
elif (api_version == '2017-10-01'):
from ..v2017_10_01 import models
return models
elif (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview import models
return models
elif (api_version == '2018-09-01'):
from ..v2018_09_01 import models
return models
elif (api_version == '2019-04-01'):
from ..v2019_04_01 import models
return models
elif (api_version == '2019-05-01'):
from ..v2019_05_01 import models
return models
elif (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview import models
return models
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview import models
return models
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview import models
return models
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview import models
return models
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview import models
return models
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview import models
return models
raise ValueError('API version {} is not available'.format(api_version))
| -7,498,642,931,209,086,000
|
Module depends on the API version:
* 2017-03-01: :mod:`v2017_03_01.models<azure.mgmt.containerregistry.v2017_03_01.models>`
* 2017-10-01: :mod:`v2017_10_01.models<azure.mgmt.containerregistry.v2017_10_01.models>`
* 2018-02-01-preview: :mod:`v2018_02_01_preview.models<azure.mgmt.containerregistry.v2018_02_01_preview.models>`
* 2018-09-01: :mod:`v2018_09_01.models<azure.mgmt.containerregistry.v2018_09_01.models>`
* 2019-04-01: :mod:`v2019_04_01.models<azure.mgmt.containerregistry.v2019_04_01.models>`
* 2019-05-01: :mod:`v2019_05_01.models<azure.mgmt.containerregistry.v2019_05_01.models>`
* 2019-05-01-preview: :mod:`v2019_05_01_preview.models<azure.mgmt.containerregistry.v2019_05_01_preview.models>`
* 2019-06-01-preview: :mod:`v2019_06_01_preview.models<azure.mgmt.containerregistry.v2019_06_01_preview.models>`
* 2019-12-01-preview: :mod:`v2019_12_01_preview.models<azure.mgmt.containerregistry.v2019_12_01_preview.models>`
* 2020-11-01-preview: :mod:`v2020_11_01_preview.models<azure.mgmt.containerregistry.v2020_11_01_preview.models>`
* 2021-06-01-preview: :mod:`v2021_06_01_preview.models<azure.mgmt.containerregistry.v2021_06_01_preview.models>`
* 2021-08-01-preview: :mod:`v2021_08_01_preview.models<azure.mgmt.containerregistry.v2021_08_01_preview.models>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
models
|
AFengKK/azure-sdk-for-python
|
python
|
@classmethod
def models(cls, api_version=DEFAULT_API_VERSION):
'Module depends on the API version:\n\n * 2017-03-01: :mod:`v2017_03_01.models<azure.mgmt.containerregistry.v2017_03_01.models>`\n * 2017-10-01: :mod:`v2017_10_01.models<azure.mgmt.containerregistry.v2017_10_01.models>`\n * 2018-02-01-preview: :mod:`v2018_02_01_preview.models<azure.mgmt.containerregistry.v2018_02_01_preview.models>`\n * 2018-09-01: :mod:`v2018_09_01.models<azure.mgmt.containerregistry.v2018_09_01.models>`\n * 2019-04-01: :mod:`v2019_04_01.models<azure.mgmt.containerregistry.v2019_04_01.models>`\n * 2019-05-01: :mod:`v2019_05_01.models<azure.mgmt.containerregistry.v2019_05_01.models>`\n * 2019-05-01-preview: :mod:`v2019_05_01_preview.models<azure.mgmt.containerregistry.v2019_05_01_preview.models>`\n * 2019-06-01-preview: :mod:`v2019_06_01_preview.models<azure.mgmt.containerregistry.v2019_06_01_preview.models>`\n * 2019-12-01-preview: :mod:`v2019_12_01_preview.models<azure.mgmt.containerregistry.v2019_12_01_preview.models>`\n * 2020-11-01-preview: :mod:`v2020_11_01_preview.models<azure.mgmt.containerregistry.v2020_11_01_preview.models>`\n * 2021-06-01-preview: :mod:`v2021_06_01_preview.models<azure.mgmt.containerregistry.v2021_06_01_preview.models>`\n * 2021-08-01-preview: :mod:`v2021_08_01_preview.models<azure.mgmt.containerregistry.v2021_08_01_preview.models>`\n '
if (api_version == '2017-03-01'):
from ..v2017_03_01 import models
return models
elif (api_version == '2017-10-01'):
from ..v2017_10_01 import models
return models
elif (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview import models
return models
elif (api_version == '2018-09-01'):
from ..v2018_09_01 import models
return models
elif (api_version == '2019-04-01'):
from ..v2019_04_01 import models
return models
elif (api_version == '2019-05-01'):
from ..v2019_05_01 import models
return models
elif (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview import models
return models
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview import models
return models
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview import models
return models
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview import models
return models
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview import models
return models
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview import models
return models
raise ValueError('API version {} is not available'.format(api_version))
|
@property
def agent_pools(self):
'Instance depends on the API version:\n\n * 2019-06-01-preview: :class:`AgentPoolsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.AgentPoolsOperations>`\n '
api_version = self._get_api_version('agent_pools')
if (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import AgentPoolsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'agent_pools'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -5,085,916,263,640,466,000
|
Instance depends on the API version:
* 2019-06-01-preview: :class:`AgentPoolsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.AgentPoolsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
agent_pools
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def agent_pools(self):
'Instance depends on the API version:\n\n * 2019-06-01-preview: :class:`AgentPoolsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.AgentPoolsOperations>`\n '
api_version = self._get_api_version('agent_pools')
if (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import AgentPoolsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'agent_pools'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def build_steps(self):
'Instance depends on the API version:\n\n * 2018-02-01-preview: :class:`BuildStepsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildStepsOperations>`\n '
api_version = self._get_api_version('build_steps')
if (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import BuildStepsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'build_steps'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -1,496,829,672,706,553,600
|
Instance depends on the API version:
* 2018-02-01-preview: :class:`BuildStepsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildStepsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
build_steps
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def build_steps(self):
'Instance depends on the API version:\n\n * 2018-02-01-preview: :class:`BuildStepsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildStepsOperations>`\n '
api_version = self._get_api_version('build_steps')
if (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import BuildStepsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'build_steps'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def build_tasks(self):
'Instance depends on the API version:\n\n * 2018-02-01-preview: :class:`BuildTasksOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildTasksOperations>`\n '
api_version = self._get_api_version('build_tasks')
if (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import BuildTasksOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'build_tasks'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -5,406,831,408,611,816,000
|
Instance depends on the API version:
* 2018-02-01-preview: :class:`BuildTasksOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildTasksOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
build_tasks
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def build_tasks(self):
'Instance depends on the API version:\n\n * 2018-02-01-preview: :class:`BuildTasksOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildTasksOperations>`\n '
api_version = self._get_api_version('build_tasks')
if (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import BuildTasksOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'build_tasks'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def builds(self):
'Instance depends on the API version:\n\n * 2018-02-01-preview: :class:`BuildsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildsOperations>`\n '
api_version = self._get_api_version('builds')
if (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import BuildsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'builds'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -6,728,441,991,317,288,000
|
Instance depends on the API version:
* 2018-02-01-preview: :class:`BuildsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
builds
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def builds(self):
'Instance depends on the API version:\n\n * 2018-02-01-preview: :class:`BuildsOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.BuildsOperations>`\n '
api_version = self._get_api_version('builds')
if (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import BuildsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'builds'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def connected_registries(self):
'Instance depends on the API version:\n\n * 2020-11-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ConnectedRegistriesOperations>`\n * 2021-06-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ConnectedRegistriesOperations>`\n * 2021-08-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ConnectedRegistriesOperations>`\n '
api_version = self._get_api_version('connected_registries')
if (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ConnectedRegistriesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ConnectedRegistriesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ConnectedRegistriesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'connected_registries'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 6,753,454,761,955,240,000
|
Instance depends on the API version:
* 2020-11-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ConnectedRegistriesOperations>`
* 2021-06-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ConnectedRegistriesOperations>`
* 2021-08-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ConnectedRegistriesOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
connected_registries
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def connected_registries(self):
'Instance depends on the API version:\n\n * 2020-11-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ConnectedRegistriesOperations>`\n * 2021-06-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ConnectedRegistriesOperations>`\n * 2021-08-01-preview: :class:`ConnectedRegistriesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ConnectedRegistriesOperations>`\n '
api_version = self._get_api_version('connected_registries')
if (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ConnectedRegistriesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ConnectedRegistriesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ConnectedRegistriesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'connected_registries'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def export_pipelines(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ExportPipelinesOperations>`\n * 2020-11-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ExportPipelinesOperations>`\n * 2021-06-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ExportPipelinesOperations>`\n * 2021-08-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ExportPipelinesOperations>`\n '
api_version = self._get_api_version('export_pipelines')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'export_pipelines'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 81,635,952,181,019,620
|
Instance depends on the API version:
* 2019-12-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ExportPipelinesOperations>`
* 2020-11-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ExportPipelinesOperations>`
* 2021-06-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ExportPipelinesOperations>`
* 2021-08-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ExportPipelinesOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
export_pipelines
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def export_pipelines(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ExportPipelinesOperations>`\n * 2020-11-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ExportPipelinesOperations>`\n * 2021-06-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ExportPipelinesOperations>`\n * 2021-08-01-preview: :class:`ExportPipelinesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ExportPipelinesOperations>`\n '
api_version = self._get_api_version('export_pipelines')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ExportPipelinesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'export_pipelines'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def import_pipelines(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ImportPipelinesOperations>`\n * 2020-11-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ImportPipelinesOperations>`\n * 2021-06-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ImportPipelinesOperations>`\n * 2021-08-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ImportPipelinesOperations>`\n '
api_version = self._get_api_version('import_pipelines')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'import_pipelines'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 2,839,135,378,482,908,000
|
Instance depends on the API version:
* 2019-12-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ImportPipelinesOperations>`
* 2020-11-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ImportPipelinesOperations>`
* 2021-06-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ImportPipelinesOperations>`
* 2021-08-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ImportPipelinesOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
import_pipelines
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def import_pipelines(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ImportPipelinesOperations>`\n * 2020-11-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ImportPipelinesOperations>`\n * 2021-06-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ImportPipelinesOperations>`\n * 2021-08-01-preview: :class:`ImportPipelinesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ImportPipelinesOperations>`\n '
api_version = self._get_api_version('import_pipelines')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ImportPipelinesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'import_pipelines'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def operations(self):
'Instance depends on the API version:\n\n * 2017-03-01: :class:`Operations<azure.mgmt.containerregistry.v2017_03_01.aio.operations.Operations>`\n * 2017-10-01: :class:`Operations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.Operations>`\n * 2019-05-01: :class:`Operations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.Operations>`\n * 2019-12-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.Operations>`\n * 2020-11-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.Operations>`\n * 2021-06-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.Operations>`\n * 2021-08-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.Operations>`\n '
api_version = self._get_api_version('operations')
if (api_version == '2017-03-01'):
from ..v2017_03_01.aio.operations import Operations as OperationClass
elif (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import Operations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import Operations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import Operations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import Operations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import Operations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import Operations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'operations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 2,949,915,227,870,088,000
|
Instance depends on the API version:
* 2017-03-01: :class:`Operations<azure.mgmt.containerregistry.v2017_03_01.aio.operations.Operations>`
* 2017-10-01: :class:`Operations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.Operations>`
* 2019-05-01: :class:`Operations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.Operations>`
* 2019-12-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.Operations>`
* 2020-11-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.Operations>`
* 2021-06-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.Operations>`
* 2021-08-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.Operations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
operations
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def operations(self):
'Instance depends on the API version:\n\n * 2017-03-01: :class:`Operations<azure.mgmt.containerregistry.v2017_03_01.aio.operations.Operations>`\n * 2017-10-01: :class:`Operations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.Operations>`\n * 2019-05-01: :class:`Operations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.Operations>`\n * 2019-12-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.Operations>`\n * 2020-11-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.Operations>`\n * 2021-06-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.Operations>`\n * 2021-08-01-preview: :class:`Operations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.Operations>`\n '
api_version = self._get_api_version('operations')
if (api_version == '2017-03-01'):
from ..v2017_03_01.aio.operations import Operations as OperationClass
elif (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import Operations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import Operations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import Operations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import Operations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import Operations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import Operations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'operations'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def pipeline_runs(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.PipelineRunsOperations>`\n * 2020-11-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.PipelineRunsOperations>`\n * 2021-06-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.PipelineRunsOperations>`\n * 2021-08-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.PipelineRunsOperations>`\n '
api_version = self._get_api_version('pipeline_runs')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import PipelineRunsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import PipelineRunsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import PipelineRunsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import PipelineRunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'pipeline_runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 4,067,161,058,127,691,000
|
Instance depends on the API version:
* 2019-12-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.PipelineRunsOperations>`
* 2020-11-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.PipelineRunsOperations>`
* 2021-06-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.PipelineRunsOperations>`
* 2021-08-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.PipelineRunsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
pipeline_runs
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def pipeline_runs(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.PipelineRunsOperations>`\n * 2020-11-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.PipelineRunsOperations>`\n * 2021-06-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.PipelineRunsOperations>`\n * 2021-08-01-preview: :class:`PipelineRunsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.PipelineRunsOperations>`\n '
api_version = self._get_api_version('pipeline_runs')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import PipelineRunsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import PipelineRunsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import PipelineRunsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import PipelineRunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'pipeline_runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def private_endpoint_connections(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n * 2020-11-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n * 2021-06-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n * 2021-08-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n '
api_version = self._get_api_version('private_endpoint_connections')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -1,083,633,000,285,955,200
|
Instance depends on the API version:
* 2019-12-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`
* 2020-11-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`
* 2021-06-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`
* 2021-08-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
private_endpoint_connections
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def private_endpoint_connections(self):
'Instance depends on the API version:\n\n * 2019-12-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n * 2020-11-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n * 2021-06-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n * 2021-08-01-preview: :class:`PrivateEndpointConnectionsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.PrivateEndpointConnectionsOperations>`\n '
api_version = self._get_api_version('private_endpoint_connections')
if (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import PrivateEndpointConnectionsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'private_endpoint_connections'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def registries(self):
'Instance depends on the API version:\n\n * 2017-03-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2017_03_01.aio.operations.RegistriesOperations>`\n * 2017-10-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.RegistriesOperations>`\n * 2018-02-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.RegistriesOperations>`\n * 2018-09-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.RegistriesOperations>`\n * 2019-04-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.RegistriesOperations>`\n * 2019-05-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.RegistriesOperations>`\n * 2019-05-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.RegistriesOperations>`\n * 2019-06-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.RegistriesOperations>`\n * 2019-12-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.RegistriesOperations>`\n * 2020-11-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.RegistriesOperations>`\n * 2021-06-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.RegistriesOperations>`\n * 2021-08-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.RegistriesOperations>`\n '
api_version = self._get_api_version('registries')
if (api_version == '2017-03-01'):
from ..v2017_03_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2018-09-01'):
from ..v2018_09_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-04-01'):
from ..v2019_04_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import RegistriesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'registries'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 6,331,585,228,933,197,000
|
Instance depends on the API version:
* 2017-03-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2017_03_01.aio.operations.RegistriesOperations>`
* 2017-10-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.RegistriesOperations>`
* 2018-02-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.RegistriesOperations>`
* 2018-09-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.RegistriesOperations>`
* 2019-04-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.RegistriesOperations>`
* 2019-05-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.RegistriesOperations>`
* 2019-05-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.RegistriesOperations>`
* 2019-06-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.RegistriesOperations>`
* 2019-12-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.RegistriesOperations>`
* 2020-11-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.RegistriesOperations>`
* 2021-06-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.RegistriesOperations>`
* 2021-08-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.RegistriesOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
registries
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def registries(self):
'Instance depends on the API version:\n\n * 2017-03-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2017_03_01.aio.operations.RegistriesOperations>`\n * 2017-10-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.RegistriesOperations>`\n * 2018-02-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2018_02_01_preview.aio.operations.RegistriesOperations>`\n * 2018-09-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.RegistriesOperations>`\n * 2019-04-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.RegistriesOperations>`\n * 2019-05-01: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.RegistriesOperations>`\n * 2019-05-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.RegistriesOperations>`\n * 2019-06-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.RegistriesOperations>`\n * 2019-12-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.RegistriesOperations>`\n * 2020-11-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.RegistriesOperations>`\n * 2021-06-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.RegistriesOperations>`\n * 2021-08-01-preview: :class:`RegistriesOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.RegistriesOperations>`\n '
api_version = self._get_api_version('registries')
if (api_version == '2017-03-01'):
from ..v2017_03_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2018-02-01-preview'):
from ..v2018_02_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2018-09-01'):
from ..v2018_09_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-04-01'):
from ..v2019_04_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import RegistriesOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import RegistriesOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'registries'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def replications(self):
'Instance depends on the API version:\n\n * 2017-10-01: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.ReplicationsOperations>`\n * 2019-05-01: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.ReplicationsOperations>`\n * 2019-12-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ReplicationsOperations>`\n * 2020-11-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ReplicationsOperations>`\n * 2021-06-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ReplicationsOperations>`\n * 2021-08-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ReplicationsOperations>`\n '
api_version = self._get_api_version('replications')
if (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ReplicationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'replications'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 1,605,084,691,358,823,000
|
Instance depends on the API version:
* 2017-10-01: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.ReplicationsOperations>`
* 2019-05-01: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.ReplicationsOperations>`
* 2019-12-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ReplicationsOperations>`
* 2020-11-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ReplicationsOperations>`
* 2021-06-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ReplicationsOperations>`
* 2021-08-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ReplicationsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
replications
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def replications(self):
'Instance depends on the API version:\n\n * 2017-10-01: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.ReplicationsOperations>`\n * 2019-05-01: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.ReplicationsOperations>`\n * 2019-12-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.ReplicationsOperations>`\n * 2020-11-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ReplicationsOperations>`\n * 2021-06-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ReplicationsOperations>`\n * 2021-08-01-preview: :class:`ReplicationsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ReplicationsOperations>`\n '
api_version = self._get_api_version('replications')
if (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ReplicationsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ReplicationsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'replications'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def runs(self):
'Instance depends on the API version:\n\n * 2018-09-01: :class:`RunsOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.RunsOperations>`\n * 2019-04-01: :class:`RunsOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.RunsOperations>`\n * 2019-06-01-preview: :class:`RunsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.RunsOperations>`\n '
api_version = self._get_api_version('runs')
if (api_version == '2018-09-01'):
from ..v2018_09_01.aio.operations import RunsOperations as OperationClass
elif (api_version == '2019-04-01'):
from ..v2019_04_01.aio.operations import RunsOperations as OperationClass
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import RunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -318,249,756,799,848,060
|
Instance depends on the API version:
* 2018-09-01: :class:`RunsOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.RunsOperations>`
* 2019-04-01: :class:`RunsOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.RunsOperations>`
* 2019-06-01-preview: :class:`RunsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.RunsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
runs
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def runs(self):
'Instance depends on the API version:\n\n * 2018-09-01: :class:`RunsOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.RunsOperations>`\n * 2019-04-01: :class:`RunsOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.RunsOperations>`\n * 2019-06-01-preview: :class:`RunsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.RunsOperations>`\n '
api_version = self._get_api_version('runs')
if (api_version == '2018-09-01'):
from ..v2018_09_01.aio.operations import RunsOperations as OperationClass
elif (api_version == '2019-04-01'):
from ..v2019_04_01.aio.operations import RunsOperations as OperationClass
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import RunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def scope_maps(self):
'Instance depends on the API version:\n\n * 2019-05-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.ScopeMapsOperations>`\n * 2020-11-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ScopeMapsOperations>`\n * 2021-06-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ScopeMapsOperations>`\n * 2021-08-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ScopeMapsOperations>`\n '
api_version = self._get_api_version('scope_maps')
if (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview.aio.operations import ScopeMapsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ScopeMapsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ScopeMapsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ScopeMapsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'scope_maps'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -7,855,342,236,790,188,000
|
Instance depends on the API version:
* 2019-05-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.ScopeMapsOperations>`
* 2020-11-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ScopeMapsOperations>`
* 2021-06-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ScopeMapsOperations>`
* 2021-08-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ScopeMapsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
scope_maps
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def scope_maps(self):
'Instance depends on the API version:\n\n * 2019-05-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.ScopeMapsOperations>`\n * 2020-11-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.ScopeMapsOperations>`\n * 2021-06-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.ScopeMapsOperations>`\n * 2021-08-01-preview: :class:`ScopeMapsOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.ScopeMapsOperations>`\n '
api_version = self._get_api_version('scope_maps')
if (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview.aio.operations import ScopeMapsOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import ScopeMapsOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import ScopeMapsOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import ScopeMapsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'scope_maps'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def task_runs(self):
'Instance depends on the API version:\n\n * 2019-06-01-preview: :class:`TaskRunsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.TaskRunsOperations>`\n '
api_version = self._get_api_version('task_runs')
if (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import TaskRunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'task_runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| 8,284,909,358,070,862,000
|
Instance depends on the API version:
* 2019-06-01-preview: :class:`TaskRunsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.TaskRunsOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
task_runs
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def task_runs(self):
'Instance depends on the API version:\n\n * 2019-06-01-preview: :class:`TaskRunsOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.TaskRunsOperations>`\n '
api_version = self._get_api_version('task_runs')
if (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import TaskRunsOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'task_runs'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def tasks(self):
'Instance depends on the API version:\n\n * 2018-09-01: :class:`TasksOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.TasksOperations>`\n * 2019-04-01: :class:`TasksOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.TasksOperations>`\n * 2019-06-01-preview: :class:`TasksOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.TasksOperations>`\n '
api_version = self._get_api_version('tasks')
if (api_version == '2018-09-01'):
from ..v2018_09_01.aio.operations import TasksOperations as OperationClass
elif (api_version == '2019-04-01'):
from ..v2019_04_01.aio.operations import TasksOperations as OperationClass
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import TasksOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'tasks'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -8,231,908,233,438,440,000
|
Instance depends on the API version:
* 2018-09-01: :class:`TasksOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.TasksOperations>`
* 2019-04-01: :class:`TasksOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.TasksOperations>`
* 2019-06-01-preview: :class:`TasksOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.TasksOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
tasks
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def tasks(self):
'Instance depends on the API version:\n\n * 2018-09-01: :class:`TasksOperations<azure.mgmt.containerregistry.v2018_09_01.aio.operations.TasksOperations>`\n * 2019-04-01: :class:`TasksOperations<azure.mgmt.containerregistry.v2019_04_01.aio.operations.TasksOperations>`\n * 2019-06-01-preview: :class:`TasksOperations<azure.mgmt.containerregistry.v2019_06_01_preview.aio.operations.TasksOperations>`\n '
api_version = self._get_api_version('tasks')
if (api_version == '2018-09-01'):
from ..v2018_09_01.aio.operations import TasksOperations as OperationClass
elif (api_version == '2019-04-01'):
from ..v2019_04_01.aio.operations import TasksOperations as OperationClass
elif (api_version == '2019-06-01-preview'):
from ..v2019_06_01_preview.aio.operations import TasksOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'tasks'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def tokens(self):
'Instance depends on the API version:\n\n * 2019-05-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.TokensOperations>`\n * 2020-11-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.TokensOperations>`\n * 2021-06-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.TokensOperations>`\n * 2021-08-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.TokensOperations>`\n '
api_version = self._get_api_version('tokens')
if (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview.aio.operations import TokensOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import TokensOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import TokensOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import TokensOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'tokens'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -6,086,204,719,500,567,000
|
Instance depends on the API version:
* 2019-05-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.TokensOperations>`
* 2020-11-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.TokensOperations>`
* 2021-06-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.TokensOperations>`
* 2021-08-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.TokensOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
tokens
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def tokens(self):
'Instance depends on the API version:\n\n * 2019-05-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2019_05_01_preview.aio.operations.TokensOperations>`\n * 2020-11-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.TokensOperations>`\n * 2021-06-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.TokensOperations>`\n * 2021-08-01-preview: :class:`TokensOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.TokensOperations>`\n '
api_version = self._get_api_version('tokens')
if (api_version == '2019-05-01-preview'):
from ..v2019_05_01_preview.aio.operations import TokensOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import TokensOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import TokensOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import TokensOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'tokens'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
@property
def webhooks(self):
'Instance depends on the API version:\n\n * 2017-10-01: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.WebhooksOperations>`\n * 2019-05-01: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.WebhooksOperations>`\n * 2019-12-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.WebhooksOperations>`\n * 2020-11-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.WebhooksOperations>`\n * 2021-06-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.WebhooksOperations>`\n * 2021-08-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.WebhooksOperations>`\n '
api_version = self._get_api_version('webhooks')
if (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import WebhooksOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'webhooks'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
| -5,219,705,358,391,033,000
|
Instance depends on the API version:
* 2017-10-01: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.WebhooksOperations>`
* 2019-05-01: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.WebhooksOperations>`
* 2019-12-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.WebhooksOperations>`
* 2020-11-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.WebhooksOperations>`
* 2021-06-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.WebhooksOperations>`
* 2021-08-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.WebhooksOperations>`
|
sdk/containerregistry/azure-mgmt-containerregistry/azure/mgmt/containerregistry/aio/_container_registry_management_client.py
|
webhooks
|
AFengKK/azure-sdk-for-python
|
python
|
@property
def webhooks(self):
'Instance depends on the API version:\n\n * 2017-10-01: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2017_10_01.aio.operations.WebhooksOperations>`\n * 2019-05-01: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2019_05_01.aio.operations.WebhooksOperations>`\n * 2019-12-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2019_12_01_preview.aio.operations.WebhooksOperations>`\n * 2020-11-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2020_11_01_preview.aio.operations.WebhooksOperations>`\n * 2021-06-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2021_06_01_preview.aio.operations.WebhooksOperations>`\n * 2021-08-01-preview: :class:`WebhooksOperations<azure.mgmt.containerregistry.v2021_08_01_preview.aio.operations.WebhooksOperations>`\n '
api_version = self._get_api_version('webhooks')
if (api_version == '2017-10-01'):
from ..v2017_10_01.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2019-05-01'):
from ..v2019_05_01.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2019-12-01-preview'):
from ..v2019_12_01_preview.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2020-11-01-preview'):
from ..v2020_11_01_preview.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2021-06-01-preview'):
from ..v2021_06_01_preview.aio.operations import WebhooksOperations as OperationClass
elif (api_version == '2021-08-01-preview'):
from ..v2021_08_01_preview.aio.operations import WebhooksOperations as OperationClass
else:
raise ValueError("API version {} does not have operation group 'webhooks'".format(api_version))
return OperationClass(self._client, self._config, Serializer(self._models_dict(api_version)), Deserializer(self._models_dict(api_version)))
|
def set_axes_equal(ax: plt.Axes, limits=None):
"Set 3D plot axes to equal scale.\n\n Make axes of 3D plot have equal scale so that spheres appear as\n spheres and cubes as cubes. Required since `ax.axis('equal')`\n and `ax.set_aspect('equal')` don't work on 3D.\n "
if (limits is None):
limits = np.array([ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()])
origin = np.mean(limits, axis=1)
radius = (0.5 * np.max(np.abs((limits[:, 1] - limits[:, 0]))))
_set_axes_radius(ax, origin, radius)
| -8,363,108,060,664,645,000
|
Set 3D plot axes to equal scale.
Make axes of 3D plot have equal scale so that spheres appear as
spheres and cubes as cubes. Required since `ax.axis('equal')`
and `ax.set_aspect('equal')` don't work on 3D.
|
env_pyrep/utils.py
|
set_axes_equal
|
Kexin-Wei/spinnup
|
python
|
def set_axes_equal(ax: plt.Axes, limits=None):
"Set 3D plot axes to equal scale.\n\n Make axes of 3D plot have equal scale so that spheres appear as\n spheres and cubes as cubes. Required since `ax.axis('equal')`\n and `ax.set_aspect('equal')` don't work on 3D.\n "
if (limits is None):
limits = np.array([ax.get_xlim3d(), ax.get_ylim3d(), ax.get_zlim3d()])
origin = np.mean(limits, axis=1)
radius = (0.5 * np.max(np.abs((limits[:, 1] - limits[:, 0]))))
_set_axes_radius(ax, origin, radius)
|
def __init__(self, account_moid=None, ancestors=None, create_time=None, mod_time=None, moid=None, object_type=None, owners=None, parent=None, tags=None, version_context=None, device_mo_id=None, dn=None, rn=None, model=None, revision=None, serial=None, vendor=None, blades=None, fanmodules=None, ioms=None, oper_state=None, psus=None, registered_device=None, sasexpanders=None, siocs=None, storage_enclosures=None):
'\n EquipmentChassis - a model defined in Swagger\n '
self._account_moid = None
self._ancestors = None
self._create_time = None
self._mod_time = None
self._moid = None
self._object_type = None
self._owners = None
self._parent = None
self._tags = None
self._version_context = None
self._device_mo_id = None
self._dn = None
self._rn = None
self._model = None
self._revision = None
self._serial = None
self._vendor = None
self._blades = None
self._fanmodules = None
self._ioms = None
self._oper_state = None
self._psus = None
self._registered_device = None
self._sasexpanders = None
self._siocs = None
self._storage_enclosures = None
if (account_moid is not None):
self.account_moid = account_moid
if (ancestors is not None):
self.ancestors = ancestors
if (create_time is not None):
self.create_time = create_time
if (mod_time is not None):
self.mod_time = mod_time
if (moid is not None):
self.moid = moid
if (object_type is not None):
self.object_type = object_type
if (owners is not None):
self.owners = owners
if (parent is not None):
self.parent = parent
if (tags is not None):
self.tags = tags
if (version_context is not None):
self.version_context = version_context
if (device_mo_id is not None):
self.device_mo_id = device_mo_id
if (dn is not None):
self.dn = dn
if (rn is not None):
self.rn = rn
if (model is not None):
self.model = model
if (revision is not None):
self.revision = revision
if (serial is not None):
self.serial = serial
if (vendor is not None):
self.vendor = vendor
if (blades is not None):
self.blades = blades
if (fanmodules is not None):
self.fanmodules = fanmodules
if (ioms is not None):
self.ioms = ioms
if (oper_state is not None):
self.oper_state = oper_state
if (psus is not None):
self.psus = psus
if (registered_device is not None):
self.registered_device = registered_device
if (sasexpanders is not None):
self.sasexpanders = sasexpanders
if (siocs is not None):
self.siocs = siocs
if (storage_enclosures is not None):
self.storage_enclosures = storage_enclosures
| 8,111,156,070,151,296,000
|
EquipmentChassis - a model defined in Swagger
|
intersight/models/equipment_chassis.py
|
__init__
|
fdemello/intersight-python
|
python
|
def __init__(self, account_moid=None, ancestors=None, create_time=None, mod_time=None, moid=None, object_type=None, owners=None, parent=None, tags=None, version_context=None, device_mo_id=None, dn=None, rn=None, model=None, revision=None, serial=None, vendor=None, blades=None, fanmodules=None, ioms=None, oper_state=None, psus=None, registered_device=None, sasexpanders=None, siocs=None, storage_enclosures=None):
'\n \n '
self._account_moid = None
self._ancestors = None
self._create_time = None
self._mod_time = None
self._moid = None
self._object_type = None
self._owners = None
self._parent = None
self._tags = None
self._version_context = None
self._device_mo_id = None
self._dn = None
self._rn = None
self._model = None
self._revision = None
self._serial = None
self._vendor = None
self._blades = None
self._fanmodules = None
self._ioms = None
self._oper_state = None
self._psus = None
self._registered_device = None
self._sasexpanders = None
self._siocs = None
self._storage_enclosures = None
if (account_moid is not None):
self.account_moid = account_moid
if (ancestors is not None):
self.ancestors = ancestors
if (create_time is not None):
self.create_time = create_time
if (mod_time is not None):
self.mod_time = mod_time
if (moid is not None):
self.moid = moid
if (object_type is not None):
self.object_type = object_type
if (owners is not None):
self.owners = owners
if (parent is not None):
self.parent = parent
if (tags is not None):
self.tags = tags
if (version_context is not None):
self.version_context = version_context
if (device_mo_id is not None):
self.device_mo_id = device_mo_id
if (dn is not None):
self.dn = dn
if (rn is not None):
self.rn = rn
if (model is not None):
self.model = model
if (revision is not None):
self.revision = revision
if (serial is not None):
self.serial = serial
if (vendor is not None):
self.vendor = vendor
if (blades is not None):
self.blades = blades
if (fanmodules is not None):
self.fanmodules = fanmodules
if (ioms is not None):
self.ioms = ioms
if (oper_state is not None):
self.oper_state = oper_state
if (psus is not None):
self.psus = psus
if (registered_device is not None):
self.registered_device = registered_device
if (sasexpanders is not None):
self.sasexpanders = sasexpanders
if (siocs is not None):
self.siocs = siocs
if (storage_enclosures is not None):
self.storage_enclosures = storage_enclosures
|
@property
def account_moid(self):
'\n Gets the account_moid of this EquipmentChassis.\n The Account ID for this managed object. \n\n :return: The account_moid of this EquipmentChassis.\n :rtype: str\n '
return self._account_moid
| 2,961,041,142,107,580,000
|
Gets the account_moid of this EquipmentChassis.
The Account ID for this managed object.
:return: The account_moid of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
account_moid
|
fdemello/intersight-python
|
python
|
@property
def account_moid(self):
'\n Gets the account_moid of this EquipmentChassis.\n The Account ID for this managed object. \n\n :return: The account_moid of this EquipmentChassis.\n :rtype: str\n '
return self._account_moid
|
@account_moid.setter
def account_moid(self, account_moid):
'\n Sets the account_moid of this EquipmentChassis.\n The Account ID for this managed object. \n\n :param account_moid: The account_moid of this EquipmentChassis.\n :type: str\n '
self._account_moid = account_moid
| 4,616,591,780,404,822,000
|
Sets the account_moid of this EquipmentChassis.
The Account ID for this managed object.
:param account_moid: The account_moid of this EquipmentChassis.
:type: str
|
intersight/models/equipment_chassis.py
|
account_moid
|
fdemello/intersight-python
|
python
|
@account_moid.setter
def account_moid(self, account_moid):
'\n Sets the account_moid of this EquipmentChassis.\n The Account ID for this managed object. \n\n :param account_moid: The account_moid of this EquipmentChassis.\n :type: str\n '
self._account_moid = account_moid
|
@property
def ancestors(self):
'\n Gets the ancestors of this EquipmentChassis.\n Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy. \n\n :return: The ancestors of this EquipmentChassis.\n :rtype: list[MoBaseMoRef]\n '
return self._ancestors
| 2,814,781,568,520,714,000
|
Gets the ancestors of this EquipmentChassis.
Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy.
:return: The ancestors of this EquipmentChassis.
:rtype: list[MoBaseMoRef]
|
intersight/models/equipment_chassis.py
|
ancestors
|
fdemello/intersight-python
|
python
|
@property
def ancestors(self):
'\n Gets the ancestors of this EquipmentChassis.\n Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy. \n\n :return: The ancestors of this EquipmentChassis.\n :rtype: list[MoBaseMoRef]\n '
return self._ancestors
|
@ancestors.setter
def ancestors(self, ancestors):
'\n Sets the ancestors of this EquipmentChassis.\n Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy. \n\n :param ancestors: The ancestors of this EquipmentChassis.\n :type: list[MoBaseMoRef]\n '
self._ancestors = ancestors
| -3,336,301,955,569,473,500
|
Sets the ancestors of this EquipmentChassis.
Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy.
:param ancestors: The ancestors of this EquipmentChassis.
:type: list[MoBaseMoRef]
|
intersight/models/equipment_chassis.py
|
ancestors
|
fdemello/intersight-python
|
python
|
@ancestors.setter
def ancestors(self, ancestors):
'\n Sets the ancestors of this EquipmentChassis.\n Ancestors is an array containing the MO references of the ancestors in the object containment hierarchy. \n\n :param ancestors: The ancestors of this EquipmentChassis.\n :type: list[MoBaseMoRef]\n '
self._ancestors = ancestors
|
@property
def create_time(self):
'\n Gets the create_time of this EquipmentChassis.\n The time when this managed object was created. \n\n :return: The create_time of this EquipmentChassis.\n :rtype: datetime\n '
return self._create_time
| -6,835,582,992,143,373,000
|
Gets the create_time of this EquipmentChassis.
The time when this managed object was created.
:return: The create_time of this EquipmentChassis.
:rtype: datetime
|
intersight/models/equipment_chassis.py
|
create_time
|
fdemello/intersight-python
|
python
|
@property
def create_time(self):
'\n Gets the create_time of this EquipmentChassis.\n The time when this managed object was created. \n\n :return: The create_time of this EquipmentChassis.\n :rtype: datetime\n '
return self._create_time
|
@create_time.setter
def create_time(self, create_time):
'\n Sets the create_time of this EquipmentChassis.\n The time when this managed object was created. \n\n :param create_time: The create_time of this EquipmentChassis.\n :type: datetime\n '
self._create_time = create_time
| 8,204,742,738,341,537,000
|
Sets the create_time of this EquipmentChassis.
The time when this managed object was created.
:param create_time: The create_time of this EquipmentChassis.
:type: datetime
|
intersight/models/equipment_chassis.py
|
create_time
|
fdemello/intersight-python
|
python
|
@create_time.setter
def create_time(self, create_time):
'\n Sets the create_time of this EquipmentChassis.\n The time when this managed object was created. \n\n :param create_time: The create_time of this EquipmentChassis.\n :type: datetime\n '
self._create_time = create_time
|
@property
def mod_time(self):
'\n Gets the mod_time of this EquipmentChassis.\n The time when this managed object was last modified. \n\n :return: The mod_time of this EquipmentChassis.\n :rtype: datetime\n '
return self._mod_time
| -792,131,247,139,084,400
|
Gets the mod_time of this EquipmentChassis.
The time when this managed object was last modified.
:return: The mod_time of this EquipmentChassis.
:rtype: datetime
|
intersight/models/equipment_chassis.py
|
mod_time
|
fdemello/intersight-python
|
python
|
@property
def mod_time(self):
'\n Gets the mod_time of this EquipmentChassis.\n The time when this managed object was last modified. \n\n :return: The mod_time of this EquipmentChassis.\n :rtype: datetime\n '
return self._mod_time
|
@mod_time.setter
def mod_time(self, mod_time):
'\n Sets the mod_time of this EquipmentChassis.\n The time when this managed object was last modified. \n\n :param mod_time: The mod_time of this EquipmentChassis.\n :type: datetime\n '
self._mod_time = mod_time
| -2,044,530,991,584,952,800
|
Sets the mod_time of this EquipmentChassis.
The time when this managed object was last modified.
:param mod_time: The mod_time of this EquipmentChassis.
:type: datetime
|
intersight/models/equipment_chassis.py
|
mod_time
|
fdemello/intersight-python
|
python
|
@mod_time.setter
def mod_time(self, mod_time):
'\n Sets the mod_time of this EquipmentChassis.\n The time when this managed object was last modified. \n\n :param mod_time: The mod_time of this EquipmentChassis.\n :type: datetime\n '
self._mod_time = mod_time
|
@property
def moid(self):
'\n Gets the moid of this EquipmentChassis.\n A unique identifier of this Managed Object instance. \n\n :return: The moid of this EquipmentChassis.\n :rtype: str\n '
return self._moid
| 7,679,648,597,472,186,000
|
Gets the moid of this EquipmentChassis.
A unique identifier of this Managed Object instance.
:return: The moid of this EquipmentChassis.
:rtype: str
|
intersight/models/equipment_chassis.py
|
moid
|
fdemello/intersight-python
|
python
|
@property
def moid(self):
'\n Gets the moid of this EquipmentChassis.\n A unique identifier of this Managed Object instance. \n\n :return: The moid of this EquipmentChassis.\n :rtype: str\n '
return self._moid
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.