hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f731817c0c67a924ce2ccfc634deaf68c68b5c57 | 54,791 | py | Python | numpy/polynomial/hermite.py | ivanov/numpy | 6d2665626e40f346bb5af8d780579f5a429ff9ba | [
"BSD-3-Clause"
] | null | null | null | numpy/polynomial/hermite.py | ivanov/numpy | 6d2665626e40f346bb5af8d780579f5a429ff9ba | [
"BSD-3-Clause"
] | null | null | null | numpy/polynomial/hermite.py | ivanov/numpy | 6d2665626e40f346bb5af8d780579f5a429ff9ba | [
"BSD-3-Clause"
] | null | null | null | """
Objects for dealing with Hermite series.
This module provides a number of objects (mostly functions) useful for
dealing with Hermite series, including a `Hermite` class that
encapsulates the usual arithmetic operations. (General information
on how this module represents and works with such polynomials is in the
docstring for its "parent" sub-package, `numpy.polynomial`).
Constants
---------
- `hermdomain` -- Hermite series default domain, [-1,1].
- `hermzero` -- Hermite series that evaluates identically to 0.
- `hermone` -- Hermite series that evaluates identically to 1.
- `hermx` -- Hermite series for the identity map, ``f(x) = x``.
Arithmetic
----------
- `hermmulx` -- multiply a Hermite series in ``P_i(x)`` by ``x``.
- `hermadd` -- add two Hermite series.
- `hermsub` -- subtract one Hermite series from another.
- `hermmul` -- multiply two Hermite series.
- `hermdiv` -- divide one Hermite series by another.
- `hermval` -- evaluate a Hermite series at given points.
- `hermval2d` -- evaluate a 2D Hermite series at given points.
- `hermval3d` -- evaluate a 3D Hermite series at given points.
- `hermgrid2d` -- evaluate a 2D Hermite series on a Cartesian product.
- `hermgrid3d` -- evaluate a 3D Hermite series on a Cartesian product.
Calculus
--------
- `hermder` -- differentiate a Hermite series.
- `hermint` -- integrate a Hermite series.
Misc Functions
--------------
- `hermfromroots` -- create a Hermite series with specified roots.
- `hermroots` -- find the roots of a Hermite series.
- `hermvander` -- Vandermonde-like matrix for Hermite polynomials.
- `hermvander2d` -- Vandermonde-like matrix for 2D power series.
- `hermvander3d` -- Vandermonde-like matrix for 3D power series.
- `hermgauss` -- Gauss-Hermite quadrature, points and weights.
- `hermweight` -- Hermite weight function.
- `hermcompanion` -- symmetrized companion matrix in Hermite form.
- `hermfit` -- least-squares fit returning a Hermite series.
- `hermtrim` -- trim leading coefficients from a Hermite series.
- `hermline` -- Hermite series of given straight line.
- `herm2poly` -- convert a Hermite series to a polynomial.
- `poly2herm` -- convert a polynomial to a Hermite series.
Classes
-------
- `Hermite` -- A Hermite series class.
See also
--------
`numpy.polynomial`
"""
from __future__ import division, absolute_import
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
import warnings
from .polytemplate import polytemplate
__all__ = ['hermzero', 'hermone', 'hermx', 'hermdomain', 'hermline',
'hermadd', 'hermsub', 'hermmulx', 'hermmul', 'hermdiv', 'hermpow',
'hermval', 'hermder', 'hermint', 'herm2poly', 'poly2herm',
'hermfromroots', 'hermvander', 'hermfit', 'hermtrim', 'hermroots',
'Hermite', 'hermval2d', 'hermval3d', 'hermgrid2d', 'hermgrid3d',
'hermvander2d', 'hermvander3d', 'hermcompanion', 'hermgauss',
'hermweight']
hermtrim = pu.trimcoef
def poly2herm(pol) :
"""
poly2herm(pol)
Convert a polynomial to a Hermite series.
Convert an array representing the coefficients of a polynomial (relative
to the "standard" basis) ordered from lowest degree to highest, to an
array of the coefficients of the equivalent Hermite series, ordered
from lowest to highest degree.
Parameters
----------
pol : array_like
1-D array containing the polynomial coefficients
Returns
-------
c : ndarray
1-D array containing the coefficients of the equivalent Hermite
series.
See Also
--------
herm2poly
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy.polynomial.hermite_e import poly2herme
>>> poly2herm(np.arange(4))
array([ 1. , 2.75 , 0.5 , 0.375])
"""
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1) :
res = hermadd(hermmulx(res), pol[i])
return res
def herm2poly(c) :
"""
Convert a Hermite series to a polynomial.
Convert an array representing the coefficients of a Hermite series,
ordered from lowest degree to highest, to an array of the coefficients
of the equivalent polynomial (relative to the "standard" basis) ordered
from lowest to highest degree.
Parameters
----------
c : array_like
1-D array containing the Hermite series coefficients, ordered
from lowest order term to highest.
Returns
-------
pol : ndarray
1-D array containing the coefficients of the equivalent polynomial
(relative to the "standard" basis) ordered from lowest order term
to highest.
See Also
--------
poly2herm
Notes
-----
The easy way to do conversions between polynomial basis sets
is to use the convert method of a class instance.
Examples
--------
>>> from numpy.polynomial.hermite import herm2poly
>>> herm2poly([ 1. , 2.75 , 0.5 , 0.375])
array([ 0., 1., 2., 3.])
"""
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n == 1:
return c
if n == 2:
c[1] *= 2
return c
else:
c0 = c[-2]
c1 = c[-1]
# i is the current degree of c1
for i in range(n - 1, 1, -1) :
tmp = c0
c0 = polysub(c[i - 2], c1*(2*(i - 1)))
c1 = polyadd(tmp, polymulx(c1)*2)
return polyadd(c0, polymulx(c1)*2)
#
# These are constant arrays are of integer type so as to be compatible
# with the widest range of other types, such as Decimal.
#
# Hermite
hermdomain = np.array([-1,1])
# Hermite coefficients representing zero.
hermzero = np.array([0])
# Hermite coefficients representing one.
hermone = np.array([1])
# Hermite coefficients representing the identity x.
hermx = np.array([0, 1/2])
def hermline(off, scl) :
"""
Hermite series whose graph is a straight line.
Parameters
----------
off, scl : scalars
The specified line is given by ``off + scl*x``.
Returns
-------
y : ndarray
This module's representation of the Hermite series for
``off + scl*x``.
See Also
--------
polyline, chebline
Examples
--------
>>> from numpy.polynomial.hermite import hermline, hermval
>>> hermval(0,hermline(3, 2))
3.0
>>> hermval(1,hermline(3, 2))
5.0
"""
if scl != 0 :
return np.array([off,scl/2])
else :
return np.array([off])
def hermfromroots(roots) :
"""
Generate a Hermite series with given roots.
The function returns the coefficients of the polynomial
.. math:: p(x) = (x - r_0) * (x - r_1) * ... * (x - r_n),
in Hermite form, where the `r_n` are the roots specified in `roots`.
If a zero has multiplicity n, then it must appear in `roots` n times.
For instance, if 2 is a root of multiplicity three and 3 is a root of
multiplicity 2, then `roots` looks something like [2, 2, 2, 3, 3]. The
roots can appear in any order.
If the returned coefficients are `c`, then
.. math:: p(x) = c_0 + c_1 * H_1(x) + ... + c_n * H_n(x)
The coefficient of the last term is not generally 1 for monic
polynomials in Hermite form.
Parameters
----------
roots : array_like
Sequence containing the roots.
Returns
-------
out : ndarray
1-D array of coefficients. If all roots are real then `out` is a
real array, if some of the roots are complex, then `out` is complex
even if all the coefficients in the result are real (see Examples
below).
See Also
--------
polyfromroots, legfromroots, lagfromroots, chebfromroots,
hermefromroots.
Examples
--------
>>> from numpy.polynomial.hermite import hermfromroots, hermval
>>> coef = hermfromroots((-1, 0, 1))
>>> hermval((-1, 0, 1), coef)
array([ 0., 0., 0.])
>>> coef = hermfromroots((-1j, 1j))
>>> hermval((-1j, 1j), coef)
array([ 0.+0.j, 0.+0.j])
"""
if len(roots) == 0 :
return np.ones(1)
else :
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [hermline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [hermmul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = hermmul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def hermadd(c1, c2):
"""
Add one Hermite series to another.
Returns the sum of two Hermite series `c1` + `c2`. The arguments
are sequences of coefficients ordered from lowest order term to
highest, i.e., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the Hermite series of their sum.
See Also
--------
hermsub, hermmul, hermdiv, hermpow
Notes
-----
Unlike multiplication, division, etc., the sum of two Hermite series
is a Hermite series (without having to "reproject" the result onto
the basis set) so addition, just like that of "standard" polynomials,
is simply "component-wise."
Examples
--------
>>> from numpy.polynomial.hermite import hermadd
>>> hermadd([1, 2, 3], [1, 2, 3, 4])
array([ 2., 4., 6., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2) :
c1[:c2.size] += c2
ret = c1
else :
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermsub(c1, c2):
"""
Subtract one Hermite series from another.
Returns the difference of two Hermite series `c1` - `c2`. The
sequences of coefficients are from lowest order term to highest, i.e.,
[1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Hermite series coefficients representing their difference.
See Also
--------
hermadd, hermmul, hermdiv, hermpow
Notes
-----
Unlike multiplication, division, etc., the difference of two Hermite
series is a Hermite series (without having to "reproject" the result
onto the basis set) so subtraction, just like that of "standard"
polynomials, is simply "component-wise."
Examples
--------
>>> from numpy.polynomial.hermite import hermsub
>>> hermsub([1, 2, 3, 4], [1, 2, 3])
array([ 0., 0., 0., 4.])
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2) :
c1[:c2.size] -= c2
ret = c1
else :
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermmulx(c):
"""Multiply a Hermite series by x.
Multiply the Hermite series `c` by x, where x is the independent
variable.
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Array representing the result of the multiplication.
Notes
-----
The multiplication uses the recursion relationship for Hermite
polynomials in the form
.. math::
xP_i(x) = (P_{i + 1}(x)/2 + i*P_{i - 1}(x))
Examples
--------
>>> from numpy.polynomial.hermite import hermmulx
>>> hermmulx([1, 2, 3])
array([ 2. , 6.5, 1. , 1.5])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
# The zero series needs special treatment
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]/2
for i in range(1, len(c)):
prd[i + 1] = c[i]/2
prd[i - 1] += c[i]*i
return prd
def hermmul(c1, c2):
"""
Multiply one Hermite series by another.
Returns the product of two Hermite series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Hermite series coefficients representing their product.
See Also
--------
hermadd, hermsub, hermdiv, hermpow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Hermite polynomial basis set. Thus, to express
the product as a Hermite series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite import hermmul
>>> hermmul([1, 2, 3], [0, 1, 2])
array([ 52., 29., 52., 7., 6.])
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else :
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1) :
tmp = c0
nd = nd - 1
c0 = hermsub(c[-i]*xs, c1*(2*(nd - 1)))
c1 = hermadd(tmp, hermmulx(c1)*2)
return hermadd(c0, hermmulx(c1)*2)
def hermdiv(c1, c2):
"""
Divide one Hermite series by another.
Returns the quotient-with-remainder of two Hermite series
`c1` / `c2`. The arguments are sequences of coefficients from lowest
order "term" to highest, e.g., [1,2,3] represents the series
``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Hermite series coefficients ordered from low to
high.
Returns
-------
[quo, rem] : ndarrays
Of Hermite series coefficients representing the quotient and
remainder.
See Also
--------
hermadd, hermsub, hermmul, hermpow
Notes
-----
In general, the (polynomial) division of one Hermite series by another
results in quotient and remainder terms that are not in the Hermite
polynomial basis set. Thus, to express these results as a Hermite
series, it is necessary to "reproject" the results onto the Hermite
basis set, which may produce "unintuitive" (but correct) results; see
Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite import hermdiv
>>> hermdiv([ 52., 29., 52., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 0.]))
>>> hermdiv([ 54., 31., 52., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 2., 2.]))
>>> hermdiv([ 53., 30., 52., 7., 6.], [0, 1, 2])
(array([ 1., 2., 3.]), array([ 1., 1.]))
"""
# c1, c2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0 :
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2 :
return c1[:1]*0, c1
elif lc2 == 1 :
return c1/c2[-1], c1[:1]*0
else :
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = hermmul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def hermpow(c, pow, maxpower=16) :
"""Raise a Hermite series to a power.
Returns the Hermite series `c` raised to the power `pow`. The
argument `c` is a sequence of coefficients ordered from low to high.
i.e., [1,2,3] is the series ``P_0 + 2*P_1 + 3*P_2.``
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to
high.
pow : integer
Power to which the series will be raised
maxpower : integer, optional
Maximum power allowed. This is mainly to limit growth of the series
to unmanageable size. Default is 16
Returns
-------
coef : ndarray
Hermite series of power.
See Also
--------
hermadd, hermsub, hermmul, hermdiv
Examples
--------
>>> from numpy.polynomial.hermite import hermpow
>>> hermpow([1, 2, 3], 2)
array([ 81., 52., 82., 12., 9.])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0 :
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower :
raise ValueError("Power is too large")
elif power == 0 :
return np.array([1], dtype=c.dtype)
elif power == 1 :
return c
else :
# This can be made more efficient by using powers of two
# in the usual way.
prd = c
for i in range(2, power + 1) :
prd = hermmul(prd, c)
return prd
def hermder(c, m=1, scl=1, axis=0) :
"""
Differentiate a Hermite series.
Returns the Hermite series coefficients `c` differentiated `m` times
along `axis`. At each iteration the result is multiplied by `scl` (the
scaling factor is for use in a linear change of variable). The argument
`c` is an array of coefficients from low to high degree along each
axis, e.g., [1,2,3] represents the series ``1*H_0 + 2*H_1 + 3*H_2``
while [[1,2],[1,2]] represents ``1*H_0(x)*H_0(y) + 1*H_1(x)*H_0(y) +
2*H_0(x)*H_1(y) + 2*H_1(x)*H_1(y)`` if axis=0 is ``x`` and axis=1 is
``y``.
Parameters
----------
c : array_like
Array of Hermite series coefficients. If `c` is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Number of derivatives taken, must be non-negative. (Default: 1)
scl : scalar, optional
Each differentiation is multiplied by `scl`. The end result is
multiplication by ``scl**m``. This is for use in a linear change of
variable. (Default: 1)
axis : int, optional
Axis over which the derivative is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
der : ndarray
Hermite series of the derivative.
See Also
--------
hermint
Notes
-----
In general, the result of differentiating a Hermite series does not
resemble the same operation on a power series. Thus the result of this
function may be "unintuitive," albeit correct; see Examples section
below.
Examples
--------
>>> from numpy.polynomial.hermite import hermder
>>> hermder([ 1. , 0.5, 0.5, 0.5])
array([ 1., 2., 3.])
>>> hermder([-0.5, 1./2., 1./8., 1./12., 1./16.], m=2)
array([ 1., 2., 3.])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
c = c[:1]*0
else :
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 0, -1):
der[j - 1] = (2*j)*c[j]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
"""
Integrate a Hermite series.
Returns the Hermite series coefficients `c` integrated `m` times from
`lbnd` along `axis`. At each iteration the resulting series is
**multiplied** by `scl` and an integration constant, `k`, is added.
The scaling factor is for use in a linear change of variable. ("Buyer
beware": note that, depending on what one is doing, one may want `scl`
to be the reciprocal of what one might expect; for more information,
see the Notes section below.) The argument `c` is an array of
coefficients from low to high degree along each axis, e.g., [1,2,3]
represents the series ``H_0 + 2*H_1 + 3*H_2`` while [[1,2],[1,2]]
represents ``1*H_0(x)*H_0(y) + 1*H_1(x)*H_0(y) + 2*H_0(x)*H_1(y) +
2*H_1(x)*H_1(y)`` if axis=0 is ``x`` and axis=1 is ``y``.
Parameters
----------
c : array_like
Array of Hermite series coefficients. If c is multidimensional the
different axis correspond to different variables with the degree in
each axis given by the corresponding index.
m : int, optional
Order of integration, must be positive. (Default: 1)
k : {[], list, scalar}, optional
Integration constant(s). The value of the first integral at
``lbnd`` is the first value in the list, the value of the second
integral at ``lbnd`` is the second value, etc. If ``k == []`` (the
default), all constants are set to zero. If ``m == 1``, a single
scalar can be given instead of a list.
lbnd : scalar, optional
The lower bound of the integral. (Default: 0)
scl : scalar, optional
Following each integration the result is *multiplied* by `scl`
before the integration constant is added. (Default: 1)
axis : int, optional
Axis over which the integral is taken. (Default: 0).
.. versionadded:: 1.7.0
Returns
-------
S : ndarray
Hermite series coefficients of the integral.
Raises
------
ValueError
If ``m < 0``, ``len(k) > m``, ``np.isscalar(lbnd) == False``, or
``np.isscalar(scl) == False``.
See Also
--------
hermder
Notes
-----
Note that the result of each integration is *multiplied* by `scl`.
Why is this important to note? Say one is making a linear change of
variable :math:`u = ax + b` in an integral relative to `x`. Then
.. math::`dx = du/a`, so one will need to set `scl` equal to
:math:`1/a` - perhaps not what one would have first thought.
Also note that, in general, the result of integrating a C-series needs
to be "reprojected" onto the C-series basis set. Thus, typically,
the result of this function is "unintuitive," albeit correct; see
Examples section below.
Examples
--------
>>> from numpy.polynomial.hermite import hermint
>>> hermint([1,2,3]) # integrate once, value 0 at 0.
array([ 1. , 0.5, 0.5, 0.5])
>>> hermint([1,2,3], m=2) # integrate twice, value & deriv 0 at 0
array([-0.5 , 0.5 , 0.125 , 0.08333333, 0.0625 ])
>>> hermint([1,2,3], k=1) # integrate once, value 1 at 0.
array([ 2. , 0.5, 0.5, 0.5])
>>> hermint([1,2,3], lbnd=-1) # integrate once, value 0 at -1
array([-2. , 0.5, 0.5, 0.5])
>>> hermint([1,2,3], m=2, k=[1,2], lbnd=-1)
array([ 1.66666667, -0.5 , 0.125 , 0.08333333, 0.0625 ])
"""
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0 :
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt :
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt) :
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]/2
for j in range(1, n):
tmp[j + 1] = c[j]/(2*(j + 1))
tmp[0] += k[i] - hermval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermval(x, c, tensor=True):
"""
Evaluate an Hermite series at points x.
If `c` is of length `n + 1`, this function returns the value:
.. math:: p(x) = c_0 * H_0(x) + c_1 * H_1(x) + ... + c_n * H_n(x)
The parameter `x` is converted to an array only if it is a tuple or a
list, otherwise it is treated as a scalar. In either case, either `x`
or its elements must support multiplication and addition both with
themselves and with the elements of `c`.
If `c` is a 1-D array, then `p(x)` will have the same shape as `x`. If
`c` is multidimensional, then the shape of the result depends on the
value of `tensor`. If `tensor` is true the shape will be c.shape[1:] +
x.shape. If `tensor` is false the shape will be c.shape[1:]. Note that
scalars have shape (,).
Trailing zeros in the coefficients will be used in the evaluation, so
they should be avoided if efficiency is a concern.
Parameters
----------
x : array_like, compatible object
If `x` is a list or tuple, it is converted to an ndarray, otherwise
it is left unchanged and treated as a scalar. In either case, `x`
or its elements must support addition and multiplication with
with themselves and with the elements of `c`.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree n are contained in c[n]. If `c` is multidimensional the
remaining indices enumerate multiple polynomials. In the two
dimensional case the coefficients may be thought of as stored in
the columns of `c`.
tensor : boolean, optional
If True, the shape of the coefficient array is extended with ones
on the right, one for each dimension of `x`. Scalars have dimension 0
for this action. The result is that every column of coefficients in
`c` is evaluated for every element of `x`. If False, `x` is broadcast
over the columns of `c` for the evaluation. This keyword is useful
when `c` is multidimensional. The default value is True.
.. versionadded:: 1.7.0
Returns
-------
values : ndarray, algebra_like
The shape of the return value is described above.
See Also
--------
hermval2d, hermgrid2d, hermval3d, hermgrid3d
Notes
-----
The evaluation uses Clenshaw recursion, aka synthetic division.
Examples
--------
>>> from numpy.polynomial.hermite import hermval
>>> coef = [1,2,3]
>>> hermval(1, coef)
11.0
>>> hermval([[1,2],[3,4]], coef)
array([[ 11., 51.],
[ 115., 203.]])
"""
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
x2 = x*2
if len(c) == 1 :
c0 = c[0]
c1 = 0
elif len(c) == 2 :
c0 = c[0]
c1 = c[1]
else :
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1) :
tmp = c0
nd = nd - 1
c0 = c[-i] - c1*(2*(nd - 1))
c1 = tmp + c1*x2
return c0 + c1*x2
def hermval2d(x, y, c):
"""
Evaluate a 2-D Hermite series at points (x, y).
This function returns the values:
.. math:: p(x,y) = \\sum_{i,j} c_{i,j} * H_i(x) * H_j(y)
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars and they
must have the same shape after conversion. In either case, either `x`
and `y` or their elements must support multiplication and addition both
with themselves and with the elements of `c`.
If `c` is a 1-D array a one is implicitly appended to its shape to make
it 2-D. The shape of the result will be c.shape[2:] + x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points `(x, y)`,
where `x` and `y` must have the same shape. If `x` or `y` is a list
or tuple, it is first converted to an ndarray, otherwise it is left
unchanged and if it isn't an ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term
of multi-degree i,j is contained in ``c[i,j]``. If `c` has
dimension greater than two the remaining indices enumerate multiple
sets of coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points formed with
pairs of corresponding values from `x` and `y`.
See Also
--------
hermval, hermgrid2d, hermval3d, hermgrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = hermval(x, c)
c = hermval(y, c, tensor=False)
return c
def hermgrid2d(x, y, c):
"""
Evaluate a 2-D Hermite series on the Cartesian product of x and y.
This function returns the values:
.. math:: p(a,b) = \sum_{i,j} c_{i,j} * H_i(a) * H_j(b)
where the points `(a, b)` consist of all pairs formed by taking
`a` from `x` and `b` from `y`. The resulting points form a grid with
`x` in the first dimension and `y` in the second.
The parameters `x` and `y` are converted to arrays only if they are
tuples or a lists, otherwise they are treated as a scalars. In either
case, either `x` and `y` or their elements must support multiplication
and addition both with themselves and with the elements of `c`.
If `c` has fewer than two dimensions, ones are implicitly appended to
its shape to make it 2-D. The shape of the result will be c.shape[2:] +
x.shape.
Parameters
----------
x, y : array_like, compatible objects
The two dimensional series is evaluated at the points in the
Cartesian product of `x` and `y`. If `x` or `y` is a list or
tuple, it is first converted to an ndarray, otherwise it is left
unchanged and, if it isn't an ndarray, it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
hermval, hermval2d, hermval3d, hermgrid3d
Notes
-----
.. versionadded::1.7.0
"""
c = hermval(x, c)
c = hermval(y, c)
return c
def hermval3d(x, y, z, c):
"""
Evaluate a 3-D Hermite series at points (x, y, z).
This function returns the values:
.. math:: p(x,y,z) = \\sum_{i,j,k} c_{i,j,k} * H_i(x) * H_j(y) * H_k(z)
The parameters `x`, `y`, and `z` are converted to arrays only if
they are tuples or a lists, otherwise they are treated as a scalars and
they must have the same shape after conversion. In either case, either
`x`, `y`, and `z` or their elements must support multiplication and
addition both with themselves and with the elements of `c`.
If `c` has fewer than 3 dimensions, ones are implicitly appended to its
shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape.
Parameters
----------
x, y, z : array_like, compatible object
The three dimensional series is evaluated at the points
`(x, y, z)`, where `x`, `y`, and `z` must have the same shape. If
any of `x`, `y`, or `z` is a list or tuple, it is first converted
to an ndarray, otherwise it is left unchanged and if it isn't an
ndarray it is treated as a scalar.
c : array_like
Array of coefficients ordered so that the coefficient of the term of
multi-degree i,j,k is contained in ``c[i,j,k]``. If `c` has dimension
greater than 3 the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the multidimensional polynomial on points formed with
triples of corresponding values from `x`, `y`, and `z`.
See Also
--------
hermval, hermval2d, hermgrid2d, hermgrid3d
Notes
-----
.. versionadded::1.7.0
"""
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = hermval(x, c)
c = hermval(y, c, tensor=False)
c = hermval(z, c, tensor=False)
return c
def hermgrid3d(x, y, z, c):
"""
Evaluate a 3-D Hermite series on the Cartesian product of x, y, and z.
This function returns the values:
.. math:: p(a,b,c) = \\sum_{i,j,k} c_{i,j,k} * H_i(a) * H_j(b) * H_k(c)
where the points `(a, b, c)` consist of all triples formed by taking
`a` from `x`, `b` from `y`, and `c` from `z`. The resulting points form
a grid with `x` in the first dimension, `y` in the second, and `z` in
the third.
The parameters `x`, `y`, and `z` are converted to arrays only if they
are tuples or a lists, otherwise they are treated as a scalars. In
either case, either `x`, `y`, and `z` or their elements must support
multiplication and addition both with themselves and with the elements
of `c`.
If `c` has fewer than three dimensions, ones are implicitly appended to
its shape to make it 3-D. The shape of the result will be c.shape[3:] +
x.shape + y.shape + z.shape.
Parameters
----------
x, y, z : array_like, compatible objects
The three dimensional series is evaluated at the points in the
Cartesian product of `x`, `y`, and `z`. If `x`,`y`, or `z` is a
list or tuple, it is first converted to an ndarray, otherwise it is
left unchanged and, if it isn't an ndarray, it is treated as a
scalar.
c : array_like
Array of coefficients ordered so that the coefficients for terms of
degree i,j are contained in ``c[i,j]``. If `c` has dimension
greater than two the remaining indices enumerate multiple sets of
coefficients.
Returns
-------
values : ndarray, compatible object
The values of the two dimensional polynomial at points in the Cartesian
product of `x` and `y`.
See Also
--------
hermval, hermval2d, hermgrid2d, hermval3d
Notes
-----
.. versionadded::1.7.0
"""
c = hermval(x, c)
c = hermval(y, c)
c = hermval(z, c)
return c
def hermvander(x, deg) :
"""Pseudo-Vandermonde matrix of given degree.
Returns the pseudo-Vandermonde matrix of degree `deg` and sample points
`x`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., i] = H_i(x),
where `0 <= i <= deg`. The leading indices of `V` index the elements of
`x` and the last index is the degree of the Hermite polynomial.
If `c` is a 1-D array of coefficients of length `n + 1` and `V` is the
array ``V = hermvander(x, n)``, then ``np.dot(V, c)`` and
``hermval(x, c)`` are the same up to roundoff. This equivalence is
useful both for least squares fitting and for the evaluation of a large
number of Hermite series of the same degree and sample points.
Parameters
----------
x : array_like
Array of points. The dtype is converted to float64 or complex128
depending on whether any of the elements are complex. If `x` is
scalar it is converted to a 1-D array.
deg : int
Degree of the resulting matrix.
Returns
-------
vander : ndarray
The pseudo-Vandermonde matrix. The shape of the returned matrix is
``x.shape + (deg + 1,)``, where The last index is the degree of the
corresponding Hermite polynomial. The dtype will be the same as
the converted `x`.
Examples
--------
>>> from numpy.polynomial.hermite import hermvander
>>> x = np.array([-1, 0, 1])
>>> hermvander(x, 3)
array([[ 1., -2., 2., 4.],
[ 1., 0., -2., -0.],
[ 1., 2., 2., -4.]])
"""
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
v[0] = x*0 + 1
if ideg > 0 :
x2 = x*2
v[1] = x2
for i in range(2, ideg + 1) :
v[i] = (v[i-1]*x2 - v[i-2]*(2*(i - 1)))
return np.rollaxis(v, 0, v.ndim)
def hermvander2d(x, y, deg) :
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y)`. The pseudo-Vandermonde matrix is defined by
.. math:: V[..., deg[1]*i + j] = H_i(x) * H_j(y),
where `0 <= i <= deg[0]` and `0 <= j <= deg[1]`. The leading indices of
`V` index the points `(x, y)` and the last index encodes the degrees of
the Hermite polynomials.
If ``V = hermvander2d(x, y, [xdeg, ydeg])``, then the columns of `V`
correspond to the elements of a 2-D coefficient array `c` of shape
(xdeg + 1, ydeg + 1) in the order
.. math:: c_{00}, c_{01}, c_{02} ... , c_{10}, c_{11}, c_{12} ...
and ``np.dot(V, c.flat)`` and ``hermval2d(x, y, c)`` will be the same
up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 2-D Hermite
series of the same degrees and sample points.
Parameters
----------
x, y : array_like
Arrays of point coordinates, all of the same shape. The dtypes
will be converted to either float64 or complex128 depending on
whether any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg].
Returns
-------
vander2d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)`. The dtype will be the same
as the converted `x` and `y`.
See Also
--------
hermvander, hermvander3d. hermval2d, hermval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = hermvander(x, degx)
vy = hermvander(y, degy)
v = vx[..., None]*vy[..., None, :]
return v.reshape(v.shape[:-2] + (-1,))
def hermvander3d(x, y, z, deg) :
"""Pseudo-Vandermonde matrix of given degrees.
Returns the pseudo-Vandermonde matrix of degrees `deg` and sample
points `(x, y, z)`. If `l, m, n` are the given degrees in `x, y, z`,
then The pseudo-Vandermonde matrix is defined by
.. math:: V[..., (m+1)(n+1)i + (n+1)j + k] = H_i(x)*H_j(y)*H_k(z),
where `0 <= i <= l`, `0 <= j <= m`, and `0 <= j <= n`. The leading
indices of `V` index the points `(x, y, z)` and the last index encodes
the degrees of the Hermite polynomials.
If ``V = hermvander3d(x, y, z, [xdeg, ydeg, zdeg])``, then the columns
of `V` correspond to the elements of a 3-D coefficient array `c` of
shape (xdeg + 1, ydeg + 1, zdeg + 1) in the order
.. math:: c_{000}, c_{001}, c_{002},... , c_{010}, c_{011}, c_{012},...
and ``np.dot(V, c.flat)`` and ``hermval3d(x, y, z, c)`` will be the
same up to roundoff. This equivalence is useful both for least squares
fitting and for the evaluation of a large number of 3-D Hermite
series of the same degrees and sample points.
Parameters
----------
x, y, z : array_like
Arrays of point coordinates, all of the same shape. The dtypes will
be converted to either float64 or complex128 depending on whether
any of the elements are complex. Scalars are converted to 1-D
arrays.
deg : list of ints
List of maximum degrees of the form [x_deg, y_deg, z_deg].
Returns
-------
vander3d : ndarray
The shape of the returned matrix is ``x.shape + (order,)``, where
:math:`order = (deg[0]+1)*(deg([1]+1)*(deg[2]+1)`. The dtype will
be the same as the converted `x`, `y`, and `z`.
See Also
--------
hermvander, hermvander3d. hermval2d, hermval3d
Notes
-----
.. versionadded::1.7.0
"""
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = hermvander(x, degx)
vy = hermvander(y, degy)
vz = hermvander(z, degz)
v = vx[..., None, None]*vy[..., None, :, None]*vz[..., None, None, :]
return v.reshape(v.shape[:-3] + (-1,))
def hermfit(x, y, deg, rcond=None, full=False, w=None):
"""
Least squares fit of Hermite series to data.
Return the coefficients of a Hermite series of degree `deg` that is the
least squares fit to the data values `y` given at points `x`. If `y` is
1-D the returned coefficients will also be 1-D. If `y` is 2-D multiple
fits are done, one for each column of `y`, and the resulting
coefficients are stored in the corresponding columns of a 2-D return.
The fitted polynomial(s) are in the form
.. math:: p(x) = c_0 + c_1 * H_1(x) + ... + c_n * H_n(x),
where `n` is `deg`.
Since numpy version 1.7.0, hermfit also supports NA. If any of the
elements of `x`, `y`, or `w` are NA, then the corresponding rows of the
linear least squares problem (see Notes) are set to 0. If `y` is 2-D,
then an NA in any row of `y` invalidates that whole row.
Parameters
----------
x : array_like, shape (M,)
x-coordinates of the M sample points ``(x[i], y[i])``.
y : array_like, shape (M,) or (M, K)
y-coordinates of the sample points. Several data sets of sample
points sharing the same x-coordinates can be fitted at once by
passing in a 2D-array that contains one dataset per column.
deg : int
Degree of the fitting polynomial
rcond : float, optional
Relative condition number of the fit. Singular values smaller than
this relative to the largest singular value will be ignored. The
default value is len(x)*eps, where eps is the relative precision of
the float type, about 2e-16 in most cases.
full : bool, optional
Switch determining nature of return value. When it is False (the
default) just the coefficients are returned, when True diagnostic
information from the singular value decomposition is also returned.
w : array_like, shape (`M`,), optional
Weights. If not None, the contribution of each point
``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the
weights are chosen so that the errors of the products ``w[i]*y[i]``
all have the same variance. The default value is None.
Returns
-------
coef : ndarray, shape (M,) or (M, K)
Hermite coefficients ordered from low to high. If `y` was 2-D,
the coefficients for the data in column k of `y` are in column
`k`.
[residuals, rank, singular_values, rcond] : present when `full` = True
Residuals of the least-squares fit, the effective rank of the
scaled Vandermonde matrix and its singular values, and the
specified value of `rcond`. For more details, see `linalg.lstsq`.
Warns
-----
RankWarning
The rank of the coefficient matrix in the least-squares fit is
deficient. The warning is only raised if `full` = False. The
warnings can be turned off by
>>> import warnings
>>> warnings.simplefilter('ignore', RankWarning)
See Also
--------
chebfit, legfit, lagfit, polyfit, hermefit
hermval : Evaluates a Hermite series.
hermvander : Vandermonde matrix of Hermite series.
hermweight : Hermite weight function
linalg.lstsq : Computes a least-squares fit from the matrix.
scipy.interpolate.UnivariateSpline : Computes spline fits.
Notes
-----
The solution is the coefficients of the Hermite series `p` that
minimizes the sum of the weighted squared errors
.. math:: E = \\sum_j w_j^2 * |y_j - p(x_j)|^2,
where the :math:`w_j` are the weights. This problem is solved by
setting up the (typically) overdetermined matrix equation
.. math:: V(x) * c = w * y,
where `V` is the weighted pseudo Vandermonde matrix of `x`, `c` are the
coefficients to be solved for, `w` are the weights, `y` are the
observed values. This equation is then solved using the singular value
decomposition of `V`.
If some of the singular values of `V` are so small that they are
neglected, then a `RankWarning` will be issued. This means that the
coefficient values may be poorly determined. Using a lower order fit
will usually get rid of the warning. The `rcond` parameter can also be
set to a value smaller than its default, but the resulting fit may be
spurious and have large contributions from roundoff error.
Fits using Hermite series are probably most useful when the data can be
approximated by ``sqrt(w(x)) * p(x)``, where `w(x)` is the Hermite
weight. In that case the weight ``sqrt(w(x[i])`` should be used
together with data values ``y[i]/sqrt(w(x[i])``. The weight function is
available as `hermweight`.
References
----------
.. [1] Wikipedia, "Curve fitting",
http://en.wikipedia.org/wiki/Curve_fitting
Examples
--------
>>> from numpy.polynomial.hermite import hermfit, hermval
>>> x = np.linspace(-10, 10)
>>> err = np.random.randn(len(x))/10
>>> y = hermval(x, [1, 2, 3]) + err
>>> hermfit(x, y, 2)
array([ 0.97902637, 1.99849131, 3.00006 ])
"""
order = int(deg) + 1
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
# check arguments.
if deg < 0 :
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2 :
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
# set up the least squares matrices in transposed form
lhs = hermvander(x, deg).T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# apply weights. Don't use inplace operations as they
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None :
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full :
return c, [resids, rank, s, rcond]
else :
return c
def hermcompanion(c):
"""Return the scaled companion matrix of c.
The basis polynomials are scaled so that the companion matrix is
symmetric when `c` is an Hermite basis polynomial. This provides
better eigenvalue estimates than the unscaled case and for basis
polynomials the eigenvalues are guaranteed to be real if
`numpy.linalg.eigvalsh` is used to obtain them.
Parameters
----------
c : array_like
1-D array of Hermite series coefficients ordered from low to high
degree.
Returns
-------
mat : ndarray
Scaled companion matrix of dimensions (deg, deg).
Notes
-----
.. versionadded::1.7.0
"""
accprod = np.multiply.accumulate
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array(-.5*c[0]/c[1])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = np.hstack((1., np.sqrt(2.*np.arange(1,n))))
scl = np.multiply.accumulate(scl)
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.sqrt(.5*np.arange(1,n))
bot[...] = top
mat[:,-1] -= (c[:-1]/c[-1])*(scl/scl[-1])*.5
return mat
def hermroots(c):
"""
Compute the roots of a Hermite series.
Return the roots (a.k.a. "zeros") of the polynomial
.. math:: p(x) = \\sum_i c[i] * H_i(x).
Parameters
----------
c : 1-D array_like
1-D array of coefficients.
Returns
-------
out : ndarray
Array of the roots of the series. If all the roots are real,
then `out` is also real, otherwise it is complex.
See Also
--------
polyroots, legroots, lagroots, chebroots, hermeroots
Notes
-----
The root estimates are obtained as the eigenvalues of the companion
matrix, Roots far from the origin of the complex plane may have large
errors due to the numerical instability of the series for such
values. Roots with multiplicity greater than 1 will also show larger
errors as the value of the series near such points is relatively
insensitive to errors in the roots. Isolated roots near the origin can
be improved by a few iterations of Newton's method.
The Hermite series basis polynomials aren't powers of `x` so the
results of this function may seem unintuitive.
Examples
--------
>>> from numpy.polynomial.hermite import hermroots, hermfromroots
>>> coef = hermfromroots([-1, 0, 1])
>>> coef
array([ 0. , 0.25 , 0. , 0.125])
>>> hermroots(coef)
array([ -1.00000000e+00, -1.38777878e-17, 1.00000000e+00])
"""
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) <= 1 :
return np.array([], dtype=c.dtype)
if len(c) == 2 :
return np.array([-.5*c[0]/c[1]])
m = hermcompanion(c)
r = la.eigvals(m)
r.sort()
return r
def hermgauss(deg):
"""
Gauss-Hermite quadrature.
Computes the sample points and weights for Gauss-Hermite quadrature.
These sample points and weights will correctly integrate polynomials of
degree :math:`2*deg - 1` or less over the interval :math:`[-\inf, \inf]`
with the weight function :math:`f(x) = \exp(-x^2)`.
Parameters
----------
deg : int
Number of sample points and weights. It must be >= 1.
Returns
-------
x : ndarray
1-D ndarray containing the sample points.
y : ndarray
1-D ndarray containing the weights.
Notes
-----
.. versionadded::1.7.0
The results have only been tested up to degree 100, higher degrees may
be problematic. The weights are determined by using the fact that
.. math:: w_k = c / (H'_n(x_k) * H_{n-1}(x_k))
where :math:`c` is a constant independent of :math:`k` and :math:`x_k`
is the k'th root of :math:`H_n`, and then scaling the results to get
the right value when integrating 1.
"""
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = hermcompanion(c)
x = la.eigvals(m)
x.sort()
# improve roots by one application of Newton
dy = hermval(x, c)
df = hermval(x, hermder(c))
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = hermval(x, c[1:])
fm /= np.abs(fm).max()
df /= np.abs(df).max()
w = 1/(fm * df)
# for Hermite we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= np.sqrt(np.pi) / w.sum()
return x, w
def hermweight(x):
"""
Weight function of the Hermite polynomials.
The weight function is :math:`\exp(-x^2)` and the interval of
integration is :math:`[-\inf, \inf]`. the Hermite polynomials are
orthogonal, but not normalized, with respect to this weight function.
Parameters
----------
x : array_like
Values at which the weight function will be computed.
Returns
-------
w : ndarray
The weight function at `x`.
Notes
-----
.. versionadded::1.7.0
"""
w = np.exp(-x**2)
return w
#
# Hermite series class
#
exec(polytemplate.substitute(name='Hermite', nick='herm', domain='[-1,1]'))
| 31.291262 | 79 | 0.597416 | from __future__ import division, absolute_import
import numpy as np
import numpy.linalg as la
from . import polyutils as pu
import warnings
from .polytemplate import polytemplate
__all__ = ['hermzero', 'hermone', 'hermx', 'hermdomain', 'hermline',
'hermadd', 'hermsub', 'hermmulx', 'hermmul', 'hermdiv', 'hermpow',
'hermval', 'hermder', 'hermint', 'herm2poly', 'poly2herm',
'hermfromroots', 'hermvander', 'hermfit', 'hermtrim', 'hermroots',
'Hermite', 'hermval2d', 'hermval3d', 'hermgrid2d', 'hermgrid3d',
'hermvander2d', 'hermvander3d', 'hermcompanion', 'hermgauss',
'hermweight']
hermtrim = pu.trimcoef
def poly2herm(pol) :
[pol] = pu.as_series([pol])
deg = len(pol) - 1
res = 0
for i in range(deg, -1, -1) :
res = hermadd(hermmulx(res), pol[i])
return res
def herm2poly(c) :
from .polynomial import polyadd, polysub, polymulx
[c] = pu.as_series([c])
n = len(c)
if n == 1:
return c
if n == 2:
c[1] *= 2
return c
else:
c0 = c[-2]
c1 = c[-1]
for i in range(n - 1, 1, -1) :
tmp = c0
c0 = polysub(c[i - 2], c1*(2*(i - 1)))
c1 = polyadd(tmp, polymulx(c1)*2)
return polyadd(c0, polymulx(c1)*2)
hermdomain = np.array([-1,1])
hermzero = np.array([0])
hermone = np.array([1])
hermx = np.array([0, 1/2])
def hermline(off, scl) :
if scl != 0 :
return np.array([off,scl/2])
else :
return np.array([off])
def hermfromroots(roots) :
if len(roots) == 0 :
return np.ones(1)
else :
[roots] = pu.as_series([roots], trim=False)
roots.sort()
p = [hermline(-r, 1) for r in roots]
n = len(p)
while n > 1:
m, r = divmod(n, 2)
tmp = [hermmul(p[i], p[i+m]) for i in range(m)]
if r:
tmp[0] = hermmul(tmp[0], p[-1])
p = tmp
n = m
return p[0]
def hermadd(c1, c2):
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2) :
c1[:c2.size] += c2
ret = c1
else :
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermsub(c1, c2):
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2) :
c1[:c2.size] -= c2
ret = c1
else :
c2 = -c2
c2[:c1.size] += c1
ret = c2
return pu.trimseq(ret)
def hermmulx(c):
[c] = pu.as_series([c])
if len(c) == 1 and c[0] == 0:
return c
prd = np.empty(len(c) + 1, dtype=c.dtype)
prd[0] = c[0]*0
prd[1] = c[0]/2
for i in range(1, len(c)):
prd[i + 1] = c[i]/2
prd[i - 1] += c[i]*i
return prd
def hermmul(c1, c2):
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else :
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1) :
tmp = c0
nd = nd - 1
c0 = hermsub(c[-i]*xs, c1*(2*(nd - 1)))
c1 = hermadd(tmp, hermmulx(c1)*2)
return hermadd(c0, hermmulx(c1)*2)
def hermdiv(c1, c2):
[c1, c2] = pu.as_series([c1, c2])
if c2[-1] == 0 :
raise ZeroDivisionError()
lc1 = len(c1)
lc2 = len(c2)
if lc1 < lc2 :
return c1[:1]*0, c1
elif lc2 == 1 :
return c1/c2[-1], c1[:1]*0
else :
quo = np.empty(lc1 - lc2 + 1, dtype=c1.dtype)
rem = c1
for i in range(lc1 - lc2, - 1, -1):
p = hermmul([0]*i + [1], c2)
q = rem[-1]/p[-1]
rem = rem[:-1] - q*p[:-1]
quo[i] = q
return quo, pu.trimseq(rem)
def hermpow(c, pow, maxpower=16) :
[c] = pu.as_series([c])
power = int(pow)
if power != pow or power < 0 :
raise ValueError("Power must be a non-negative integer.")
elif maxpower is not None and power > maxpower :
raise ValueError("Power is too large")
elif power == 0 :
return np.array([1], dtype=c.dtype)
elif power == 1 :
return c
else :
prd = c
for i in range(2, power + 1) :
prd = hermmul(prd, c)
return prd
def hermder(c, m=1, scl=1, axis=0) :
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of derivation must be integer")
if cnt < 0:
raise ValueError("The order of derivation must be non-negative")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
n = len(c)
if cnt >= n:
c = c[:1]*0
else :
for i in range(cnt):
n = n - 1
c *= scl
der = np.empty((n,) + c.shape[1:], dtype=c.dtype)
for j in range(n, 0, -1):
der[j - 1] = (2*j)*c[j]
c = der
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermint(c, m=1, k=[], lbnd=0, scl=1, axis=0):
c = np.array(c, ndmin=1, copy=1)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if not np.iterable(k):
k = [k]
cnt, iaxis = [int(t) for t in [m, axis]]
if cnt != m:
raise ValueError("The order of integration must be integer")
if cnt < 0 :
raise ValueError("The order of integration must be non-negative")
if len(k) > cnt :
raise ValueError("Too many integration constants")
if iaxis != axis:
raise ValueError("The axis must be integer")
if not -c.ndim <= iaxis < c.ndim:
raise ValueError("The axis is out of range")
if iaxis < 0:
iaxis += c.ndim
if cnt == 0:
return c
c = np.rollaxis(c, iaxis)
k = list(k) + [0]*(cnt - len(k))
for i in range(cnt) :
n = len(c)
c *= scl
if n == 1 and np.all(c[0] == 0):
c[0] += k[i]
else:
tmp = np.empty((n + 1,) + c.shape[1:], dtype=c.dtype)
tmp[0] = c[0]*0
tmp[1] = c[0]/2
for j in range(1, n):
tmp[j + 1] = c[j]/(2*(j + 1))
tmp[0] += k[i] - hermval(lbnd, tmp)
c = tmp
c = np.rollaxis(c, 0, iaxis + 1)
return c
def hermval(x, c, tensor=True):
c = np.array(c, ndmin=1, copy=0)
if c.dtype.char in '?bBhHiIlLqQpP':
c = c.astype(np.double)
if isinstance(x, (tuple, list)):
x = np.asarray(x)
if isinstance(x, np.ndarray) and tensor:
c = c.reshape(c.shape + (1,)*x.ndim)
x2 = x*2
if len(c) == 1 :
c0 = c[0]
c1 = 0
elif len(c) == 2 :
c0 = c[0]
c1 = c[1]
else :
nd = len(c)
c0 = c[-2]
c1 = c[-1]
for i in range(3, len(c) + 1) :
tmp = c0
nd = nd - 1
c0 = c[-i] - c1*(2*(nd - 1))
c1 = tmp + c1*x2
return c0 + c1*x2
def hermval2d(x, y, c):
try:
x, y = np.array((x, y), copy=0)
except:
raise ValueError('x, y are incompatible')
c = hermval(x, c)
c = hermval(y, c, tensor=False)
return c
def hermgrid2d(x, y, c):
c = hermval(x, c)
c = hermval(y, c)
return c
def hermval3d(x, y, z, c):
try:
x, y, z = np.array((x, y, z), copy=0)
except:
raise ValueError('x, y, z are incompatible')
c = hermval(x, c)
c = hermval(y, c, tensor=False)
c = hermval(z, c, tensor=False)
return c
def hermgrid3d(x, y, z, c):
c = hermval(x, c)
c = hermval(y, c)
c = hermval(z, c)
return c
def hermvander(x, deg) :
ideg = int(deg)
if ideg != deg:
raise ValueError("deg must be integer")
if ideg < 0:
raise ValueError("deg must be non-negative")
x = np.array(x, copy=0, ndmin=1) + 0.0
dims = (ideg + 1,) + x.shape
dtyp = x.dtype
v = np.empty(dims, dtype=dtyp)
v[0] = x*0 + 1
if ideg > 0 :
x2 = x*2
v[1] = x2
for i in range(2, ideg + 1) :
v[i] = (v[i-1]*x2 - v[i-2]*(2*(i - 1)))
return np.rollaxis(v, 0, v.ndim)
def hermvander2d(x, y, deg) :
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy = ideg
x, y = np.array((x, y), copy=0) + 0.0
vx = hermvander(x, degx)
vy = hermvander(y, degy)
v = vx[..., None]*vy[..., None, :]
return v.reshape(v.shape[:-2] + (-1,))
def hermvander3d(x, y, z, deg) :
ideg = [int(d) for d in deg]
is_valid = [id == d and id >= 0 for id, d in zip(ideg, deg)]
if is_valid != [1, 1, 1]:
raise ValueError("degrees must be non-negative integers")
degx, degy, degz = ideg
x, y, z = np.array((x, y, z), copy=0) + 0.0
vx = hermvander(x, degx)
vy = hermvander(y, degy)
vz = hermvander(z, degz)
v = vx[..., None, None]*vy[..., None, :, None]*vz[..., None, None, :]
return v.reshape(v.shape[:-3] + (-1,))
def hermfit(x, y, deg, rcond=None, full=False, w=None):
order = int(deg) + 1
x = np.asarray(x) + 0.0
y = np.asarray(y) + 0.0
if deg < 0 :
raise ValueError("expected deg >= 0")
if x.ndim != 1:
raise TypeError("expected 1D vector for x")
if x.size == 0:
raise TypeError("expected non-empty vector for x")
if y.ndim < 1 or y.ndim > 2 :
raise TypeError("expected 1D or 2D array for y")
if len(x) != len(y):
raise TypeError("expected x and y to have same length")
lhs = hermvander(x, deg).T
rhs = y.T
if w is not None:
w = np.asarray(w) + 0.0
if w.ndim != 1:
raise TypeError("expected 1D vector for w")
if len(x) != len(w):
raise TypeError("expected x and w to have same length")
# can cause problems with NA.
lhs = lhs * w
rhs = rhs * w
# set rcond
if rcond is None :
rcond = len(x)*np.finfo(x.dtype).eps
# Determine the norms of the design matrix columns.
if issubclass(lhs.dtype.type, np.complexfloating):
scl = np.sqrt((np.square(lhs.real) + np.square(lhs.imag)).sum(1))
else:
scl = np.sqrt(np.square(lhs).sum(1))
scl[scl == 0] = 1
# Solve the least squares problem.
c, resids, rank, s = la.lstsq(lhs.T/scl, rhs.T, rcond)
c = (c.T/scl).T
# warn on rank reduction
if rank != order and not full:
msg = "The fit may be poorly conditioned"
warnings.warn(msg, pu.RankWarning)
if full :
return c, [resids, rank, s, rcond]
else :
return c
def hermcompanion(c):
accprod = np.multiply.accumulate
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) < 2:
raise ValueError('Series must have maximum degree of at least 1.')
if len(c) == 2:
return np.array(-.5*c[0]/c[1])
n = len(c) - 1
mat = np.zeros((n, n), dtype=c.dtype)
scl = np.hstack((1., np.sqrt(2.*np.arange(1,n))))
scl = np.multiply.accumulate(scl)
top = mat.reshape(-1)[1::n+1]
bot = mat.reshape(-1)[n::n+1]
top[...] = np.sqrt(.5*np.arange(1,n))
bot[...] = top
mat[:,-1] -= (c[:-1]/c[-1])*(scl/scl[-1])*.5
return mat
def hermroots(c):
# c is a trimmed copy
[c] = pu.as_series([c])
if len(c) <= 1 :
return np.array([], dtype=c.dtype)
if len(c) == 2 :
return np.array([-.5*c[0]/c[1]])
m = hermcompanion(c)
r = la.eigvals(m)
r.sort()
return r
def hermgauss(deg):
ideg = int(deg)
if ideg != deg or ideg < 1:
raise ValueError("deg must be a non-negative integer")
# first approximation of roots. We use the fact that the companion
# matrix is symmetric in this case in order to obtain better zeros.
c = np.array([0]*deg + [1])
m = hermcompanion(c)
x = la.eigvals(m)
x.sort()
# improve roots by one application of Newton
dy = hermval(x, c)
df = hermval(x, hermder(c))
x -= dy/df
# compute the weights. We scale the factor to avoid possible numerical
# overflow.
fm = hermval(x, c[1:])
fm /= np.abs(fm).max()
df /= np.abs(df).max()
w = 1/(fm * df)
# for Hermite we can also symmetrize
w = (w + w[::-1])/2
x = (x - x[::-1])/2
# scale w to get the right value
w *= np.sqrt(np.pi) / w.sum()
return x, w
def hermweight(x):
w = np.exp(-x**2)
return w
#
# Hermite series class
#
exec(polytemplate.substitute(name='Hermite', nick='herm', domain='[-1,1]'))
| true | true |
f73182b901161d28b67b66626d7189fdce69c7df | 368 | py | Python | utilities/models.py | codes-dev/qubwebs-blog | 0ba9372b2ec83ad181dbd31eb009a17a4a7acaf0 | [
"MIT"
] | null | null | null | utilities/models.py | codes-dev/qubwebs-blog | 0ba9372b2ec83ad181dbd31eb009a17a4a7acaf0 | [
"MIT"
] | null | null | null | utilities/models.py | codes-dev/qubwebs-blog | 0ba9372b2ec83ad181dbd31eb009a17a4a7acaf0 | [
"MIT"
] | null | null | null | from django.db import models
# Create your models here.
class TimeStampMixin(models.Model):
"""
An abstract base class model that provides self-updating
``created_at`` and ``updated_at`` fields.
"""
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True | 24.533333 | 60 | 0.701087 | from django.db import models
class TimeStampMixin(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True | true | true |
f73182c1280def3ee7b87cc1f8310cdf39c87b12 | 16,051 | py | Python | fairseq/fairseq/data/indexed_dataset.py | oguzdemirbasci/DynamicVocabAbstractiveSummariser | 2e8ba9efd6eddd7d1870d540638f05c80bfe9894 | [
"MIT"
] | null | null | null | fairseq/fairseq/data/indexed_dataset.py | oguzdemirbasci/DynamicVocabAbstractiveSummariser | 2e8ba9efd6eddd7d1870d540638f05c80bfe9894 | [
"MIT"
] | null | null | null | fairseq/fairseq/data/indexed_dataset.py | oguzdemirbasci/DynamicVocabAbstractiveSummariser | 2e8ba9efd6eddd7d1870d540638f05c80bfe9894 | [
"MIT"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from functools import lru_cache
import os
import shutil
import struct
import numpy as np
import torch
from . import FairseqDataset
def __best_fitting_dtype(vocab_size=None):
if vocab_size is not None and vocab_size < 65500:
return np.uint16
else:
return np.int32
def get_available_dataset_impl():
return ['raw', 'lazy', 'cached', 'mmap']
def infer_dataset_impl(path):
if IndexedRawTextDataset.exists(path):
return 'raw'
elif IndexedDataset.exists(path):
with open(index_file_path(path), 'rb') as f:
magic = f.read(8)
if magic == IndexedDataset._HDR_MAGIC:
return 'cached'
elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]:
return 'mmap'
else:
return None
else:
return None
def make_builder(out_file, impl, vocab_size=None):
if impl == 'mmap':
return MMapIndexedDatasetBuilder(out_file, dtype=__best_fitting_dtype(vocab_size))
else:
return IndexedDatasetBuilder(out_file)
def make_dataset(path, impl, fix_lua_indexing=False, dictionary=None):
if impl == 'raw' and IndexedRawTextDataset.exists(path):
assert dictionary is not None
return IndexedRawTextDataset(path, dictionary)
elif impl == 'lazy' and IndexedDataset.exists(path):
return IndexedDataset(path, fix_lua_indexing=fix_lua_indexing)
elif impl == 'cached' and IndexedDataset.exists(path):
return IndexedCachedDataset(path, fix_lua_indexing=fix_lua_indexing)
elif impl == 'mmap' and MMapIndexedDataset.exists(path):
return MMapIndexedDataset(path)
return None
def dataset_exists(path, impl):
if impl == 'raw':
return IndexedRawTextDataset.exists(path)
elif impl == 'mmap':
return MMapIndexedDataset.exists(path)
else:
return IndexedDataset.exists(path)
def read_longs(f, n):
a = np.empty(n, dtype=np.int64)
f.readinto(a)
return a
def write_longs(f, a):
f.write(np.array(a, dtype=np.int64))
dtypes = {
1: np.uint8,
2: np.int8,
3: np.int16,
4: np.int32,
5: np.int64,
6: np.float,
7: np.double,
8: np.uint16
}
def code(dtype):
for k in dtypes.keys():
if dtypes[k] == dtype:
return k
raise ValueError(dtype)
def index_file_path(prefix_path):
return prefix_path + '.idx'
def data_file_path(prefix_path):
return prefix_path + '.bin'
class IndexedDataset(FairseqDataset):
"""Loader for TorchNet IndexedDataset"""
_HDR_MAGIC = b'TNTIDX\x00\x00'
def __init__(self, path, fix_lua_indexing=False):
super().__init__()
self.path = path
self.fix_lua_indexing = fix_lua_indexing
self.data_file = None
self.read_index(path)
def read_index(self, path):
with open(index_file_path(path), 'rb') as f:
magic = f.read(8)
assert magic == self._HDR_MAGIC, (
'Index file doesn\'t match expected format. '
'Make sure that --dataset-impl is configured properly.'
)
version = f.read(8)
assert struct.unpack('<Q', version) == (1,)
code, self.element_size = struct.unpack('<QQ', f.read(16))
self.dtype = dtypes[code]
self._len, self.s = struct.unpack('<QQ', f.read(16))
self.dim_offsets = read_longs(f, self._len + 1)
self.data_offsets = read_longs(f, self._len + 1)
self.sizes = read_longs(f, self.s)
def read_data(self, path):
self.data_file = open(data_file_path(path), 'rb', buffering=0)
def check_index(self, i):
if i < 0 or i >= self._len:
raise IndexError('index out of range')
def __del__(self):
if self.data_file:
self.data_file.close()
@lru_cache(maxsize=8)
def __getitem__(self, i):
if not self.data_file:
self.read_data(self.path)
self.check_index(i)
tensor_size = self.sizes[self.dim_offsets[i]:self.dim_offsets[i + 1]]
a = np.empty(tensor_size, dtype=self.dtype)
self.data_file.seek(self.data_offsets[i] * self.element_size)
self.data_file.readinto(a)
item = torch.from_numpy(a).long()
if self.fix_lua_indexing:
item -= 1 # subtract 1 for 0-based indexing
return item
def __len__(self):
return self._len
def num_tokens(self, index):
return self.sizes[index]
def size(self, index):
return self.sizes[index]
@staticmethod
def exists(path):
return (
os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))
)
@property
def supports_prefetch(self):
return False # avoid prefetching to save memory
class IndexedCachedDataset(IndexedDataset):
def __init__(self, path, fix_lua_indexing=False):
super().__init__(path, fix_lua_indexing=fix_lua_indexing)
self.cache = None
self.cache_index = {}
@property
def supports_prefetch(self):
return True
def prefetch(self, indices):
if all(i in self.cache_index for i in indices):
return
if not self.data_file:
self.read_data(self.path)
indices = sorted(set(indices))
total_size = 0
for i in indices:
total_size += self.data_offsets[i + 1] - self.data_offsets[i]
self.cache = np.empty(total_size, dtype=self.dtype)
ptx = 0
self.cache_index.clear()
for i in indices:
self.cache_index[i] = ptx
size = self.data_offsets[i + 1] - self.data_offsets[i]
a = self.cache[ptx: ptx + size]
self.data_file.seek(self.data_offsets[i] * self.element_size)
self.data_file.readinto(a)
ptx += size
if self.data_file:
# close and delete data file after prefetch so we can pickle
self.data_file.close()
self.data_file = None
@lru_cache(maxsize=8)
def __getitem__(self, i):
self.check_index(i)
tensor_size = self.sizes[self.dim_offsets[i]:self.dim_offsets[i + 1]]
a = np.empty(tensor_size, dtype=self.dtype)
ptx = self.cache_index[i]
np.copyto(a, np.reshape(self.cache[ptx: ptx + a.size], tensor_size))
item = torch.from_numpy(a).long()
if self.fix_lua_indexing:
item -= 1 # subtract 1 for 0-based indexing
return item
class IndexedRawTextDataset(FairseqDataset):
"""Takes a text file as input and binarizes it in memory at instantiation.
Original lines are also kept in memory"""
def __init__(self, path, dictionary, append_eos=True, reverse_order=False):
self.tokens_list = []
self.lines = []
self.sizes = []
self.append_eos = append_eos
self.reverse_order = reverse_order
self.read_data(path, dictionary)
self.size = len(self.tokens_list)
def read_data(self, path, dictionary):
with open(path, 'r', encoding='utf-8') as f:
for line in f:
self.lines.append(line.strip('\n'))
tokens = dictionary.encode_line(
line, add_if_not_exist=False,
append_eos=self.append_eos, reverse_order=self.reverse_order,
).long()
self.tokens_list.append(tokens)
self.sizes.append(len(tokens))
self.sizes = np.array(self.sizes)
def check_index(self, i):
if i < 0 or i >= self.size:
raise IndexError('index out of range')
@lru_cache(maxsize=8)
def __getitem__(self, i):
self.check_index(i)
return self.tokens_list[i]
def get_original_text(self, i):
self.check_index(i)
return self.lines[i]
def __del__(self):
pass
def __len__(self):
return self.size
def num_tokens(self, index):
return self.sizes[index]
def size(self, index):
return self.sizes[index]
@staticmethod
def exists(path):
return os.path.exists(path)
class IndexedDatasetBuilder(object):
element_sizes = {
np.uint8: 1,
np.int8: 1,
np.int16: 2,
np.int32: 4,
np.int64: 8,
np.float: 4,
np.double: 8
}
def __init__(self, out_file, dtype=np.int32):
self.out_file = open(out_file, 'wb')
self.dtype = dtype
self.data_offsets = [0]
self.dim_offsets = [0]
self.sizes = []
self.element_size = self.element_sizes[self.dtype]
def add_item(self, tensor):
# +1 for Lua compatibility
bytes = self.out_file.write(np.array(tensor.numpy() + 1, dtype=self.dtype))
self.data_offsets.append(self.data_offsets[-1] + bytes / self.element_size)
for s in tensor.size():
self.sizes.append(s)
self.dim_offsets.append(self.dim_offsets[-1] + len(tensor.size()))
def merge_file_(self, another_file):
index = IndexedDataset(another_file)
assert index.dtype == self.dtype
begin = self.data_offsets[-1]
for offset in index.data_offsets[1:]:
self.data_offsets.append(begin + offset)
self.sizes.extend(index.sizes)
begin = self.dim_offsets[-1]
for dim_offset in index.dim_offsets[1:]:
self.dim_offsets.append(begin + dim_offset)
with open(data_file_path(another_file), 'rb') as f:
while True:
data = f.read(1024)
if data:
self.out_file.write(data)
else:
break
def finalize(self, index_file):
self.out_file.close()
index = open(index_file, 'wb')
index.write(b'TNTIDX\x00\x00')
index.write(struct.pack('<Q', 1))
index.write(struct.pack('<QQ', code(self.dtype), self.element_size))
index.write(struct.pack('<QQ', len(self.data_offsets) - 1, len(self.sizes)))
write_longs(index, self.dim_offsets)
write_longs(index, self.data_offsets)
write_longs(index, self.sizes)
index.close()
def _warmup_mmap_file(path):
with open(path, 'rb') as stream:
while stream.read(100 * 1024 * 1024):
pass
class MMapIndexedDataset(torch.utils.data.Dataset):
class Index(object):
_HDR_MAGIC = b'MMIDIDX\x00\x00'
@classmethod
def writer(cls, path, dtype):
class _Writer(object):
def __enter__(self):
self._file = open(path, 'wb')
self._file.write(cls._HDR_MAGIC)
self._file.write(struct.pack('<Q', 1))
self._file.write(struct.pack('<B', code(dtype)))
return self
@staticmethod
def _get_pointers(sizes):
dtype_size = dtype().itemsize
address = 0
pointers = []
for size in sizes:
pointers.append(address)
address += size * dtype_size
return pointers
def write(self, sizes):
pointers = self._get_pointers(sizes)
self._file.write(struct.pack('<Q', len(sizes)))
sizes = np.array(sizes, dtype=np.int32)
self._file.write(sizes.tobytes(order='C'))
del sizes
pointers = np.array(pointers, dtype=np.int64)
self._file.write(pointers.tobytes(order='C'))
del pointers
def __exit__(self, exc_type, exc_val, exc_tb):
self._file.close()
return _Writer()
def __init__(self, path):
with open(path, 'rb') as stream:
magic_test = stream.read(9)
assert self._HDR_MAGIC == magic_test, (
'Index file doesn\'t match expected format. '
'Make sure that --dataset-impl is configured properly.'
)
version = struct.unpack('<Q', stream.read(8))
assert (1,) == version
dtype_code, = struct.unpack('<B', stream.read(1))
self._dtype = dtypes[dtype_code]
self._dtype_size = self._dtype().itemsize
self._len = struct.unpack('<Q', stream.read(8))[0]
offset = stream.tell()
_warmup_mmap_file(path)
self._bin_buffer_mmap = np.memmap(path, mode='r', order='C')
self._bin_buffer = memoryview(self._bin_buffer_mmap)
self._sizes = np.frombuffer(self._bin_buffer, dtype=np.int32, count=self._len, offset=offset)
self._pointers = np.frombuffer(self._bin_buffer, dtype=np.int64, count=self._len,
offset=offset + self._sizes.nbytes)
def __del__(self):
self._bin_buffer_mmap._mmap.close()
del self._bin_buffer_mmap
@property
def dtype(self):
return self._dtype
@property
def sizes(self):
return self._sizes
@lru_cache(maxsize=8)
def __getitem__(self, i):
return self._pointers[i], self._sizes[i]
def __len__(self):
return self._len
def __init__(self, path):
super().__init__()
self._path = None
self._index = None
self._bin_buffer = None
self._do_init(path)
def __getstate__(self):
return self._path
def __setstate__(self, state):
self._do_init(state)
def _do_init(self, path):
self._path = path
self._index = self.Index(index_file_path(self._path))
_warmup_mmap_file(data_file_path(self._path))
self._bin_buffer_mmap = np.memmap(data_file_path(self._path), mode='r', order='C')
self._bin_buffer = memoryview(self._bin_buffer_mmap)
def __del__(self):
self._bin_buffer_mmap._mmap.close()
del self._bin_buffer_mmap
del self._index
def __len__(self):
return len(self._index)
@lru_cache(maxsize=8)
def __getitem__(self, i):
ptr, size = self._index[i]
np_array = np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=size, offset=ptr)
if self._index.dtype != np.int64:
np_array = np_array.astype(np.int64)
return torch.from_numpy(np_array)
@property
def sizes(self):
return self._index.sizes
@property
def supports_prefetch(self):
return False
@staticmethod
def exists(path):
return (
os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))
)
class MMapIndexedDatasetBuilder(object):
def __init__(self, out_file, dtype=np.int64):
self._data_file = open(out_file, 'wb')
self._dtype = dtype
self._sizes = []
def add_item(self, tensor):
np_array = np.array(tensor.numpy(), dtype=self._dtype)
self._data_file.write(np_array.tobytes(order='C'))
self._sizes.append(np_array.size)
def merge_file_(self, another_file):
# Concatenate index
index = MMapIndexedDataset.Index(index_file_path(another_file))
assert index.dtype == self._dtype
for size in index.sizes:
self._sizes.append(size)
# Concatenate data
with open(data_file_path(another_file), 'rb') as f:
shutil.copyfileobj(f, self._data_file)
def finalize(self, index_file):
self._data_file.close()
with MMapIndexedDataset.Index.writer(index_file, self._dtype) as index:
index.write(self._sizes)
| 30.631679 | 105 | 0.592611 |
from functools import lru_cache
import os
import shutil
import struct
import numpy as np
import torch
from . import FairseqDataset
def __best_fitting_dtype(vocab_size=None):
if vocab_size is not None and vocab_size < 65500:
return np.uint16
else:
return np.int32
def get_available_dataset_impl():
return ['raw', 'lazy', 'cached', 'mmap']
def infer_dataset_impl(path):
if IndexedRawTextDataset.exists(path):
return 'raw'
elif IndexedDataset.exists(path):
with open(index_file_path(path), 'rb') as f:
magic = f.read(8)
if magic == IndexedDataset._HDR_MAGIC:
return 'cached'
elif magic == MMapIndexedDataset.Index._HDR_MAGIC[:8]:
return 'mmap'
else:
return None
else:
return None
def make_builder(out_file, impl, vocab_size=None):
if impl == 'mmap':
return MMapIndexedDatasetBuilder(out_file, dtype=__best_fitting_dtype(vocab_size))
else:
return IndexedDatasetBuilder(out_file)
def make_dataset(path, impl, fix_lua_indexing=False, dictionary=None):
if impl == 'raw' and IndexedRawTextDataset.exists(path):
assert dictionary is not None
return IndexedRawTextDataset(path, dictionary)
elif impl == 'lazy' and IndexedDataset.exists(path):
return IndexedDataset(path, fix_lua_indexing=fix_lua_indexing)
elif impl == 'cached' and IndexedDataset.exists(path):
return IndexedCachedDataset(path, fix_lua_indexing=fix_lua_indexing)
elif impl == 'mmap' and MMapIndexedDataset.exists(path):
return MMapIndexedDataset(path)
return None
def dataset_exists(path, impl):
if impl == 'raw':
return IndexedRawTextDataset.exists(path)
elif impl == 'mmap':
return MMapIndexedDataset.exists(path)
else:
return IndexedDataset.exists(path)
def read_longs(f, n):
a = np.empty(n, dtype=np.int64)
f.readinto(a)
return a
def write_longs(f, a):
f.write(np.array(a, dtype=np.int64))
dtypes = {
1: np.uint8,
2: np.int8,
3: np.int16,
4: np.int32,
5: np.int64,
6: np.float,
7: np.double,
8: np.uint16
}
def code(dtype):
for k in dtypes.keys():
if dtypes[k] == dtype:
return k
raise ValueError(dtype)
def index_file_path(prefix_path):
return prefix_path + '.idx'
def data_file_path(prefix_path):
return prefix_path + '.bin'
class IndexedDataset(FairseqDataset):
_HDR_MAGIC = b'TNTIDX\x00\x00'
def __init__(self, path, fix_lua_indexing=False):
super().__init__()
self.path = path
self.fix_lua_indexing = fix_lua_indexing
self.data_file = None
self.read_index(path)
def read_index(self, path):
with open(index_file_path(path), 'rb') as f:
magic = f.read(8)
assert magic == self._HDR_MAGIC, (
'Index file doesn\'t match expected format. '
'Make sure that --dataset-impl is configured properly.'
)
version = f.read(8)
assert struct.unpack('<Q', version) == (1,)
code, self.element_size = struct.unpack('<QQ', f.read(16))
self.dtype = dtypes[code]
self._len, self.s = struct.unpack('<QQ', f.read(16))
self.dim_offsets = read_longs(f, self._len + 1)
self.data_offsets = read_longs(f, self._len + 1)
self.sizes = read_longs(f, self.s)
def read_data(self, path):
self.data_file = open(data_file_path(path), 'rb', buffering=0)
def check_index(self, i):
if i < 0 or i >= self._len:
raise IndexError('index out of range')
def __del__(self):
if self.data_file:
self.data_file.close()
@lru_cache(maxsize=8)
def __getitem__(self, i):
if not self.data_file:
self.read_data(self.path)
self.check_index(i)
tensor_size = self.sizes[self.dim_offsets[i]:self.dim_offsets[i + 1]]
a = np.empty(tensor_size, dtype=self.dtype)
self.data_file.seek(self.data_offsets[i] * self.element_size)
self.data_file.readinto(a)
item = torch.from_numpy(a).long()
if self.fix_lua_indexing:
item -= 1 # subtract 1 for 0-based indexing
return item
def __len__(self):
return self._len
def num_tokens(self, index):
return self.sizes[index]
def size(self, index):
return self.sizes[index]
@staticmethod
def exists(path):
return (
os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))
)
@property
def supports_prefetch(self):
return False # avoid prefetching to save memory
class IndexedCachedDataset(IndexedDataset):
def __init__(self, path, fix_lua_indexing=False):
super().__init__(path, fix_lua_indexing=fix_lua_indexing)
self.cache = None
self.cache_index = {}
@property
def supports_prefetch(self):
return True
def prefetch(self, indices):
if all(i in self.cache_index for i in indices):
return
if not self.data_file:
self.read_data(self.path)
indices = sorted(set(indices))
total_size = 0
for i in indices:
total_size += self.data_offsets[i + 1] - self.data_offsets[i]
self.cache = np.empty(total_size, dtype=self.dtype)
ptx = 0
self.cache_index.clear()
for i in indices:
self.cache_index[i] = ptx
size = self.data_offsets[i + 1] - self.data_offsets[i]
a = self.cache[ptx: ptx + size]
self.data_file.seek(self.data_offsets[i] * self.element_size)
self.data_file.readinto(a)
ptx += size
if self.data_file:
# close and delete data file after prefetch so we can pickle
self.data_file.close()
self.data_file = None
@lru_cache(maxsize=8)
def __getitem__(self, i):
self.check_index(i)
tensor_size = self.sizes[self.dim_offsets[i]:self.dim_offsets[i + 1]]
a = np.empty(tensor_size, dtype=self.dtype)
ptx = self.cache_index[i]
np.copyto(a, np.reshape(self.cache[ptx: ptx + a.size], tensor_size))
item = torch.from_numpy(a).long()
if self.fix_lua_indexing:
item -= 1 # subtract 1 for 0-based indexing
return item
class IndexedRawTextDataset(FairseqDataset):
def __init__(self, path, dictionary, append_eos=True, reverse_order=False):
self.tokens_list = []
self.lines = []
self.sizes = []
self.append_eos = append_eos
self.reverse_order = reverse_order
self.read_data(path, dictionary)
self.size = len(self.tokens_list)
def read_data(self, path, dictionary):
with open(path, 'r', encoding='utf-8') as f:
for line in f:
self.lines.append(line.strip('\n'))
tokens = dictionary.encode_line(
line, add_if_not_exist=False,
append_eos=self.append_eos, reverse_order=self.reverse_order,
).long()
self.tokens_list.append(tokens)
self.sizes.append(len(tokens))
self.sizes = np.array(self.sizes)
def check_index(self, i):
if i < 0 or i >= self.size:
raise IndexError('index out of range')
@lru_cache(maxsize=8)
def __getitem__(self, i):
self.check_index(i)
return self.tokens_list[i]
def get_original_text(self, i):
self.check_index(i)
return self.lines[i]
def __del__(self):
pass
def __len__(self):
return self.size
def num_tokens(self, index):
return self.sizes[index]
def size(self, index):
return self.sizes[index]
@staticmethod
def exists(path):
return os.path.exists(path)
class IndexedDatasetBuilder(object):
element_sizes = {
np.uint8: 1,
np.int8: 1,
np.int16: 2,
np.int32: 4,
np.int64: 8,
np.float: 4,
np.double: 8
}
def __init__(self, out_file, dtype=np.int32):
self.out_file = open(out_file, 'wb')
self.dtype = dtype
self.data_offsets = [0]
self.dim_offsets = [0]
self.sizes = []
self.element_size = self.element_sizes[self.dtype]
def add_item(self, tensor):
# +1 for Lua compatibility
bytes = self.out_file.write(np.array(tensor.numpy() + 1, dtype=self.dtype))
self.data_offsets.append(self.data_offsets[-1] + bytes / self.element_size)
for s in tensor.size():
self.sizes.append(s)
self.dim_offsets.append(self.dim_offsets[-1] + len(tensor.size()))
def merge_file_(self, another_file):
index = IndexedDataset(another_file)
assert index.dtype == self.dtype
begin = self.data_offsets[-1]
for offset in index.data_offsets[1:]:
self.data_offsets.append(begin + offset)
self.sizes.extend(index.sizes)
begin = self.dim_offsets[-1]
for dim_offset in index.dim_offsets[1:]:
self.dim_offsets.append(begin + dim_offset)
with open(data_file_path(another_file), 'rb') as f:
while True:
data = f.read(1024)
if data:
self.out_file.write(data)
else:
break
def finalize(self, index_file):
self.out_file.close()
index = open(index_file, 'wb')
index.write(b'TNTIDX\x00\x00')
index.write(struct.pack('<Q', 1))
index.write(struct.pack('<QQ', code(self.dtype), self.element_size))
index.write(struct.pack('<QQ', len(self.data_offsets) - 1, len(self.sizes)))
write_longs(index, self.dim_offsets)
write_longs(index, self.data_offsets)
write_longs(index, self.sizes)
index.close()
def _warmup_mmap_file(path):
with open(path, 'rb') as stream:
while stream.read(100 * 1024 * 1024):
pass
class MMapIndexedDataset(torch.utils.data.Dataset):
class Index(object):
_HDR_MAGIC = b'MMIDIDX\x00\x00'
@classmethod
def writer(cls, path, dtype):
class _Writer(object):
def __enter__(self):
self._file = open(path, 'wb')
self._file.write(cls._HDR_MAGIC)
self._file.write(struct.pack('<Q', 1))
self._file.write(struct.pack('<B', code(dtype)))
return self
@staticmethod
def _get_pointers(sizes):
dtype_size = dtype().itemsize
address = 0
pointers = []
for size in sizes:
pointers.append(address)
address += size * dtype_size
return pointers
def write(self, sizes):
pointers = self._get_pointers(sizes)
self._file.write(struct.pack('<Q', len(sizes)))
sizes = np.array(sizes, dtype=np.int32)
self._file.write(sizes.tobytes(order='C'))
del sizes
pointers = np.array(pointers, dtype=np.int64)
self._file.write(pointers.tobytes(order='C'))
del pointers
def __exit__(self, exc_type, exc_val, exc_tb):
self._file.close()
return _Writer()
def __init__(self, path):
with open(path, 'rb') as stream:
magic_test = stream.read(9)
assert self._HDR_MAGIC == magic_test, (
'Index file doesn\'t match expected format. '
'Make sure that --dataset-impl is configured properly.'
)
version = struct.unpack('<Q', stream.read(8))
assert (1,) == version
dtype_code, = struct.unpack('<B', stream.read(1))
self._dtype = dtypes[dtype_code]
self._dtype_size = self._dtype().itemsize
self._len = struct.unpack('<Q', stream.read(8))[0]
offset = stream.tell()
_warmup_mmap_file(path)
self._bin_buffer_mmap = np.memmap(path, mode='r', order='C')
self._bin_buffer = memoryview(self._bin_buffer_mmap)
self._sizes = np.frombuffer(self._bin_buffer, dtype=np.int32, count=self._len, offset=offset)
self._pointers = np.frombuffer(self._bin_buffer, dtype=np.int64, count=self._len,
offset=offset + self._sizes.nbytes)
def __del__(self):
self._bin_buffer_mmap._mmap.close()
del self._bin_buffer_mmap
@property
def dtype(self):
return self._dtype
@property
def sizes(self):
return self._sizes
@lru_cache(maxsize=8)
def __getitem__(self, i):
return self._pointers[i], self._sizes[i]
def __len__(self):
return self._len
def __init__(self, path):
super().__init__()
self._path = None
self._index = None
self._bin_buffer = None
self._do_init(path)
def __getstate__(self):
return self._path
def __setstate__(self, state):
self._do_init(state)
def _do_init(self, path):
self._path = path
self._index = self.Index(index_file_path(self._path))
_warmup_mmap_file(data_file_path(self._path))
self._bin_buffer_mmap = np.memmap(data_file_path(self._path), mode='r', order='C')
self._bin_buffer = memoryview(self._bin_buffer_mmap)
def __del__(self):
self._bin_buffer_mmap._mmap.close()
del self._bin_buffer_mmap
del self._index
def __len__(self):
return len(self._index)
@lru_cache(maxsize=8)
def __getitem__(self, i):
ptr, size = self._index[i]
np_array = np.frombuffer(self._bin_buffer, dtype=self._index.dtype, count=size, offset=ptr)
if self._index.dtype != np.int64:
np_array = np_array.astype(np.int64)
return torch.from_numpy(np_array)
@property
def sizes(self):
return self._index.sizes
@property
def supports_prefetch(self):
return False
@staticmethod
def exists(path):
return (
os.path.exists(index_file_path(path)) and os.path.exists(data_file_path(path))
)
class MMapIndexedDatasetBuilder(object):
def __init__(self, out_file, dtype=np.int64):
self._data_file = open(out_file, 'wb')
self._dtype = dtype
self._sizes = []
def add_item(self, tensor):
np_array = np.array(tensor.numpy(), dtype=self._dtype)
self._data_file.write(np_array.tobytes(order='C'))
self._sizes.append(np_array.size)
def merge_file_(self, another_file):
index = MMapIndexedDataset.Index(index_file_path(another_file))
assert index.dtype == self._dtype
for size in index.sizes:
self._sizes.append(size)
with open(data_file_path(another_file), 'rb') as f:
shutil.copyfileobj(f, self._data_file)
def finalize(self, index_file):
self._data_file.close()
with MMapIndexedDataset.Index.writer(index_file, self._dtype) as index:
index.write(self._sizes)
| true | true |
f731833e514a80da7c83975fe621231e7f8586e1 | 994 | py | Python | assets/code/practice-example-four.py | david-story/david-story.github.io | 4a02854207b86946befd933d6f23773f6239d7da | [
"MIT"
] | null | null | null | assets/code/practice-example-four.py | david-story/david-story.github.io | 4a02854207b86946befd933d6f23773f6239d7da | [
"MIT"
] | null | null | null | assets/code/practice-example-four.py | david-story/david-story.github.io | 4a02854207b86946befd933d6f23773f6239d7da | [
"MIT"
] | null | null | null | """
Practice File 4
Created by David Story
Description: Some nice examples of things you can do with functions from the following libraries:
- sys
- os
- time
- datetime
"""
import sys
import os
import time
import datetime
# get the time
print(time.time())
# get the current date
print(datetime.date.today())
# prints the current working directory
print(os.getcwd())
# saves your current working directory
your_working_directory = os.getcwd()
# changes directory to the C:\\ drive
os.chdir("C:\\")
# prints the current working directory
print(os.getcwd())
# change to the original current working directory
os.chdir(your_working_directory)
# prints the directory
print(os.getcwd())
# gets number of cores on your cpu
computer_cpu_count = os.cpu_count()
print("This computer has this many CPU cores:", computer_cpu_count)
seconds = 2
time_1 = time.time()
time.sleep(seconds)
time_2 = time.time()
print("The sleep operation took this many seconds:", (time_2-time_1)) | 19.490196 | 97 | 0.740443 |
import sys
import os
import time
import datetime
print(time.time())
print(datetime.date.today())
print(os.getcwd())
your_working_directory = os.getcwd()
os.chdir("C:\\")
print(os.getcwd())
os.chdir(your_working_directory)
print(os.getcwd())
computer_cpu_count = os.cpu_count()
print("This computer has this many CPU cores:", computer_cpu_count)
seconds = 2
time_1 = time.time()
time.sleep(seconds)
time_2 = time.time()
print("The sleep operation took this many seconds:", (time_2-time_1)) | true | true |
f731835a4a4d1abb694ef9dec2e9777f59218f60 | 390 | py | Python | NewRelicApiParser/REST/AlertsViolations/__init__.py | Bharat23/newrelic-api-parser | c55d508387fde33af9bdc93f16aae3cb2a2e5f13 | [
"MIT"
] | null | null | null | NewRelicApiParser/REST/AlertsViolations/__init__.py | Bharat23/newrelic-api-parser | c55d508387fde33af9bdc93f16aae3cb2a2e5f13 | [
"MIT"
] | 1 | 2021-07-30T17:32:37.000Z | 2021-07-30T17:32:37.000Z | NewRelicApiParser/REST/AlertsViolations/__init__.py | Bharat23/newrelic-api-parser | c55d508387fde33af9bdc93f16aae3cb2a2e5f13 | [
"MIT"
] | null | null | null | from NewRelicApiParser.Base import BaseNewRelic
class AlertsViolations(BaseNewRelic):
def __init__(self, API_KEY):
super().__init__(API_KEY)
def get_list(self, options: dict = {}) -> dict:
"""
fetch the alert violations for new relic
"""
url = self.BASE_URI + '/alerts_violations.json'
return super().get_data(url, options=options) | 30 | 55 | 0.651282 | from NewRelicApiParser.Base import BaseNewRelic
class AlertsViolations(BaseNewRelic):
def __init__(self, API_KEY):
super().__init__(API_KEY)
def get_list(self, options: dict = {}) -> dict:
url = self.BASE_URI + '/alerts_violations.json'
return super().get_data(url, options=options) | true | true |
f731839f44074592ff4c8fc120911ce2b706f06b | 2,652 | py | Python | utils/audio.py | deciding/Voicefilter | dda34da9d1cfca48102b2d1b4274bfd76e5a2e1c | [
"Apache-2.0",
"MIT"
] | 3 | 2020-03-25T06:27:20.000Z | 2021-02-28T12:59:14.000Z | utils/audio.py | deciding/Voicefilter | dda34da9d1cfca48102b2d1b4274bfd76e5a2e1c | [
"Apache-2.0",
"MIT"
] | null | null | null | utils/audio.py | deciding/Voicefilter | dda34da9d1cfca48102b2d1b4274bfd76e5a2e1c | [
"Apache-2.0",
"MIT"
] | null | null | null | # adapted from Keith Ito's tacotron implementation
# https://github.com/keithito/tacotron/blob/master/util/audio.py
import librosa
import numpy as np
class Audio():
def __init__(self, hp):
self.hp = hp
self.mel_basis = librosa.filters.mel(sr=hp.audio.sample_rate,
n_fft=hp.embedder.n_fft,
n_mels=hp.embedder.num_mels)
self.audio_mel_basis = librosa.filters.mel(sr=hp.audio.sample_rate,
n_fft=hp.audio.n_fft,
n_mels=80)
def get_mel(self, y):
y = librosa.core.stft(y=y, n_fft=self.hp.embedder.n_fft,
hop_length=self.hp.audio.hop_length,
win_length=self.hp.audio.win_length,
window='hann')
magnitudes = np.abs(y) ** 2
mel = np.log10(np.dot(self.mel_basis, magnitudes) + 1e-6)
return mel
def wav2spec(self, y):
D = self.stft(y)
S = self.amp_to_db(np.abs(D)) - self.hp.audio.ref_level_db
S, D = self.normalize(S), np.angle(D)
S, D = S.T, D.T # to make [time, freq]
return S, D
def wav2mel(self, y):
D = self.stft(y)
S = np.dot(self.audio_mel_basis, np.abs(D))
S = self.amp_to_db(S) - self.hp.audio.ref_level_db
S = self.normalize(S)
return S.T
def spec2wav(self, spectrogram, phase):
spectrogram, phase = spectrogram.T, phase.T
# used during inference only
# spectrogram: enhanced output
# phase: use noisy input's phase, so no GLA is required
S = self.db_to_amp(self.denormalize(spectrogram) + self.hp.audio.ref_level_db)
return self.istft(S, phase)
def stft(self, y):
return librosa.stft(y=y, n_fft=self.hp.audio.n_fft,
hop_length=self.hp.audio.hop_length,
win_length=self.hp.audio.win_length)
def istft(self, mag, phase):
stft_matrix = mag * np.exp(1j*phase)
return librosa.istft(stft_matrix,
hop_length=self.hp.audio.hop_length,
win_length=self.hp.audio.win_length)
def amp_to_db(self, x):
return 20.0 * np.log10(np.maximum(1e-5, x))
def db_to_amp(self, x):
return np.power(10.0, x * 0.05)
def normalize(self, S):
return np.clip(S / -self.hp.audio.min_level_db, -1.0, 0.0) + 1.0
def denormalize(self, S):
return (np.clip(S, 0.0, 1.0) - 1.0) * -self.hp.audio.min_level_db
| 36.833333 | 86 | 0.549397 |
# https://github.com/keithito/tacotron/blob/master/util/audio.py
import librosa
import numpy as np
class Audio():
def __init__(self, hp):
self.hp = hp
self.mel_basis = librosa.filters.mel(sr=hp.audio.sample_rate,
n_fft=hp.embedder.n_fft,
n_mels=hp.embedder.num_mels)
self.audio_mel_basis = librosa.filters.mel(sr=hp.audio.sample_rate,
n_fft=hp.audio.n_fft,
n_mels=80)
def get_mel(self, y):
y = librosa.core.stft(y=y, n_fft=self.hp.embedder.n_fft,
hop_length=self.hp.audio.hop_length,
win_length=self.hp.audio.win_length,
window='hann')
magnitudes = np.abs(y) ** 2
mel = np.log10(np.dot(self.mel_basis, magnitudes) + 1e-6)
return mel
def wav2spec(self, y):
D = self.stft(y)
S = self.amp_to_db(np.abs(D)) - self.hp.audio.ref_level_db
S, D = self.normalize(S), np.angle(D)
S, D = S.T, D.T # to make [time, freq]
return S, D
def wav2mel(self, y):
D = self.stft(y)
S = np.dot(self.audio_mel_basis, np.abs(D))
S = self.amp_to_db(S) - self.hp.audio.ref_level_db
S = self.normalize(S)
return S.T
def spec2wav(self, spectrogram, phase):
spectrogram, phase = spectrogram.T, phase.T
# used during inference only
# spectrogram: enhanced output
# phase: use noisy input's phase, so no GLA is required
S = self.db_to_amp(self.denormalize(spectrogram) + self.hp.audio.ref_level_db)
return self.istft(S, phase)
def stft(self, y):
return librosa.stft(y=y, n_fft=self.hp.audio.n_fft,
hop_length=self.hp.audio.hop_length,
win_length=self.hp.audio.win_length)
def istft(self, mag, phase):
stft_matrix = mag * np.exp(1j*phase)
return librosa.istft(stft_matrix,
hop_length=self.hp.audio.hop_length,
win_length=self.hp.audio.win_length)
def amp_to_db(self, x):
return 20.0 * np.log10(np.maximum(1e-5, x))
def db_to_amp(self, x):
return np.power(10.0, x * 0.05)
def normalize(self, S):
return np.clip(S / -self.hp.audio.min_level_db, -1.0, 0.0) + 1.0
def denormalize(self, S):
return (np.clip(S, 0.0, 1.0) - 1.0) * -self.hp.audio.min_level_db
| true | true |
f73183abb453edfa3f42627d23629fd71241bbd0 | 1,790 | py | Python | keymint_cli/verb/__init__.py | keymint/keymint_cli | 977995ade32cf2b3a6394bda1c05f80a9fcc3369 | [
"Apache-2.0"
] | null | null | null | keymint_cli/verb/__init__.py | keymint/keymint_cli | 977995ade32cf2b3a6394bda1c05f80a9fcc3369 | [
"Apache-2.0"
] | null | null | null | keymint_cli/verb/__init__.py | keymint/keymint_cli | 977995ade32cf2b3a6394bda1c05f80a9fcc3369 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016-2017 Dirk Thomas
# Copyright 2017 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from keymint_cli.plugin_system import instantiate_extensions
from keymint_cli.plugin_system import PLUGIN_SYSTEM_VERSION
from keymint_cli.plugin_system import satisfies_version
class VerbExtension:
"""
The interface for verb extensions.
The following properties must be defined:
* `NAME` (will be set to the entry point name)
The following methods must be defined:
* `main`
"""
NAME = None
EXTENSION_POINT_VERSION = '0.1'
def __init__(self):
super(VerbExtension, self).__init__()
satisfies_version(PLUGIN_SYSTEM_VERSION, '^0.1')
def get_verb_extensions(name):
extensions = instantiate_extensions(name)
for name, extension in extensions.items():
extension.NAME = name
return extensions
def add_task_arguments(parser, task_name):
plugins = get_verb_extensions(task_name)
for plugin_name, plugin in plugins.items():
group = parser.add_argument_group(
title="Arguments for '{plugin_name}' packages"
.format_map(locals()))
func = getattr(plugin, 'add_%s_arguments' % task_name, None)
if func:
func(group)
| 31.964286 | 74 | 0.721229 |
from keymint_cli.plugin_system import instantiate_extensions
from keymint_cli.plugin_system import PLUGIN_SYSTEM_VERSION
from keymint_cli.plugin_system import satisfies_version
class VerbExtension:
NAME = None
EXTENSION_POINT_VERSION = '0.1'
def __init__(self):
super(VerbExtension, self).__init__()
satisfies_version(PLUGIN_SYSTEM_VERSION, '^0.1')
def get_verb_extensions(name):
extensions = instantiate_extensions(name)
for name, extension in extensions.items():
extension.NAME = name
return extensions
def add_task_arguments(parser, task_name):
plugins = get_verb_extensions(task_name)
for plugin_name, plugin in plugins.items():
group = parser.add_argument_group(
title="Arguments for '{plugin_name}' packages"
.format_map(locals()))
func = getattr(plugin, 'add_%s_arguments' % task_name, None)
if func:
func(group)
| true | true |
f7318473c64262e9daec3211788af8404125cccc | 722 | py | Python | webview/monitor/migrations/0002_auto_20191022_1952.py | dw0rdptr/2019_IoT_GoToDouble | 7f8a3005710b57199e918be1a0d8b9047918bce2 | [
"MIT"
] | 4 | 2019-10-23T04:43:09.000Z | 2019-10-24T11:25:37.000Z | webview/monitor/migrations/0002_auto_20191022_1952.py | dw0rdptr/2019_IoT_GoToDouble | 7f8a3005710b57199e918be1a0d8b9047918bce2 | [
"MIT"
] | 10 | 2019-10-22T12:06:42.000Z | 2019-10-24T17:39:31.000Z | webview/monitor/migrations/0002_auto_20191022_1952.py | epikjjh/2019_IoT_GoToDouble | 1d80eb87b7d59da90a7d0a9225209bfb4704a045 | [
"MIT"
] | 5 | 2019-10-22T08:04:26.000Z | 2019-10-24T17:31:40.000Z | # Generated by Django 2.2.6 on 2019-10-22 10:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('monitor', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Sposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('angle', models.FloatField()),
('distance', models.FloatField()),
('time', models.DateTimeField(auto_now_add=True)),
],
),
migrations.RenameModel(
old_name='Position',
new_name='Fposition',
),
]
| 26.740741 | 114 | 0.548476 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('monitor', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Sposition',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('angle', models.FloatField()),
('distance', models.FloatField()),
('time', models.DateTimeField(auto_now_add=True)),
],
),
migrations.RenameModel(
old_name='Position',
new_name='Fposition',
),
]
| true | true |
f73184b05282cc90c8544a41fce552460381a5d0 | 17,311 | py | Python | src/k2hash/tests/test_k2hash.py | ggtakec/k2hash_python | 9d20d36aaf28d0dd2497d39b43286d50e7200fcc | [
"MIT"
] | 1 | 2022-03-02T10:27:43.000Z | 2022-03-02T10:27:43.000Z | src/k2hash/tests/test_k2hash.py | ggtakec/k2hash_python | 9d20d36aaf28d0dd2497d39b43286d50e7200fcc | [
"MIT"
] | null | null | null | src/k2hash/tests/test_k2hash.py | ggtakec/k2hash_python | 9d20d36aaf28d0dd2497d39b43286d50e7200fcc | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# K2hash Python Driver
#
# Copyright (c) 2022 Yahoo Japan Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# AUTHOR: Hirotaka Wakabayashi
# CREATE: Tue Feb 08 2022
# REVISION:
#
import unittest
import k2hash
import logging
import ctypes
import time
class TestK2hashIterator(unittest.TestCase):
def test_K2hashIterator_construct(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
ki = k2hash.K2hashIterator(db)
self.assertTrue(isinstance(ki, k2hash.K2hashIterator))
self.assertTrue(key == next(ki))
db.close()
def test_K2hashIterator_construct_key(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey1"
subval = "subval1"
self.assertTrue(db.add_subkey(key, subkey, subval), True)
ki = k2hash.K2hashIterator(db, key)
self.assertTrue(isinstance(ki, k2hash.K2hashIterator))
self.assertTrue(subkey == next(ki))
db.close()
class TestK2hash(unittest.TestCase):
def test_K2hash_construct(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
db.close()
def test_K2hash_get_iterator(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
ki = db.get_iterator()
# Note: handle should be undefined before setting no keys.
self.assertTrue(isinstance(ki, k2hash.K2hashIterator))
db.close()
def test_K2hash_set(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_get(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
@unittest.skip("skipping because no plugin lib prepared")
def test_K2hash_add_attribute_plugin_lib(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_add_decryption_password(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
password = "secretstring"
self.assertTrue(db.add_decryption_password(password), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key, password), val)
self.assertTrue(db.get(key), "")
db.close()
def test_K2hash_add_subkey(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set(subkey, subval), True)
self.assertTrue(db.get(subkey), subval)
self.assertTrue(db.add_subkey(key, subkey, subval), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
db.close()
def test_K2hash_begin_tx(self):
db = k2hash.K2hash()
# for debugging, uncomment the following line.
# db.set_log_level(k2hash.LogLevel.ERROR)
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_tx_log = "test.log"
self.assertTrue(db.begin_tx(k2h_tx_log), True)
# TODO how to check whether transaction is enabled.
db.close()
def test_K2hash_close(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.close(), True)
def test_K2hash_create(self):
k2h_file = "test.k2h"
self.assertTrue(k2hash.K2hash.create(k2h_file), True)
def test_K2hash_dump_to_file(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_file = "test.k2h"
self.assertTrue(db.dump_to_file(k2h_file), val)
db.close()
def test_K2hash_enable_encryption(self):
db = k2hash.K2hash()
# db.set_log_level(k2hash.LogLevel.ERROR)
self.assertTrue(isinstance(db, k2hash.K2hash))
password = "secretstring"
# Calls set_default_encryption_password before calling enable_encryption
self.assertTrue(db.set_default_encryption_password(password), True)
self.assertTrue(db.enable_encryption(), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key, password), val)
# for debugging, uncomment the following line.
db.close()
def test_K2hash_enable_history(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
self.assertTrue(db.enable_history(), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_enable_mtime(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
self.assertTrue(db.enable_mtime(), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_get_attributes(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
attr_key = "attrkey1"
attr_val = "attrval1"
attrs = {attr_key: attr_val}
self.assertTrue(db.set_attribute(key, attr_key, attr_val), True)
self.assertTrue(db.get_attributes(key), attrs)
db.close()
def test_K2hash_handle(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
# TODO insufficient check
self.assertTrue(db.handle != k2hash.K2hash.K2H_INVALID_HANDLE)
db.close()
def test_K2hash_get_subkeys(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set(subkey, subval), True)
self.assertTrue(db.get(subkey), subval)
self.assertTrue(db.add_subkey(key, subkey, subval), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
db.close()
def test_K2hash_get_tx_file_fd(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_tx_log = "test.log"
self.assertTrue(db.begin_tx(k2h_tx_log), True)
# TODO how to check whether transaction is enabled.
self.assertTrue(db.get_tx_file_fd() != k2hash.K2hash.K2H_INVALID_HANDLE)
db.close()
def test_K2hash_get_tx_pool_size(self):
self.assertTrue(k2hash.K2hash.get_tx_pool_size() == 0)
def test_K2hash_libk2hash(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(isinstance(db.libk2hash, ctypes.CDLL))
db.close()
def test_K2hash_libc(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(isinstance(db.libc, ctypes.CDLL))
db.close()
def test_K2hash_load_from_file(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_file = "test.k2h"
self.assertTrue(db.dump_to_file(k2h_file), val)
db.close()
db = None
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
self.assertTrue(db.load_from_file(k2h_file), val)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_print_attribute_plugins(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_attribute_plugins(), True)
db.close()
def test_K2hash_print_attributes(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_attributes(), True)
db.close()
def test_K2hash_print_data_stats(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_data_stats(), True)
db.close()
def test_K2hash_print_table_stats(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_table_stats(), True)
db.close()
def test_K2hash_remove(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.remove(key), True)
self.assertTrue(db.get(key) == "")
db.close()
def test_K2hash_remove_subkeys(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set(subkey, subval), True)
self.assertTrue(db.get(subkey), subval)
self.assertTrue(db.add_subkey(key, subkey, subval), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
self.assertTrue(db.remove_subkeys(key, [subkey]), True)
self.assertTrue(db.get_subkeys(key) == [])
self.assertTrue(db.get(subkey) == "")
db.close()
def test_K2hash_rename(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
newkey = key[::-1]
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.rename(key, newkey), val)
self.assertTrue(db.get(newkey), val)
db.close()
def test_K2hash_set_attribute(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
attr_key = "attrkey1"
attr_val = "attrval1"
attrs = {attr_key: attr_val}
self.assertTrue(db.set_attribute(key, attr_key, attr_val), True)
self.assertTrue(db.get_attributes(key), attrs)
db.close()
def test_K2hash_set_encryption_password_file(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
# 1. make the password file for test
password_file="password.txt"
password = "secretstring"
import os
with open(password_file, 'w') as f:
print("{}".format(password), file=f)
# 2. call the api
self.assertTrue(db.set_encryption_password_file(password_file), True)
db.close()
def test_K2hash_set_expiration_duration(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
duration = 3
self.assertTrue(db.set_expiration_duration(duration), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
time.sleep(duration + 1)
self.assertTrue(db.get(key) == "")
db.close()
def test_K2hash_set_log_level(self):
db = k2hash.K2hash()
db.set_log_level(k2hash.LogLevel.ERROR)
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.set_log_level(k2hash.LogLevel.WARNING)
db.close()
def test_K2hash_set_subkeys(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set_subkeys(key, {subkey: subval}), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
db.close()
def test_K2hash_set_tx_pool_size(self):
self.assertTrue(k2hash.K2hash.set_tx_pool_size(1), True)
self.assertTrue(k2hash.K2hash.get_tx_pool_size() == 1)
def test_K2hash_stop_tx(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_tx_log = "test.log"
self.assertTrue(db.begin_tx(k2h_tx_log), True)
# TODO how to check whether transaction is enabled.
self.assertTrue(db.get_tx_file_fd() != k2hash.K2hash.K2H_INVALID_HANDLE)
self.assertTrue(db.stop_tx(), True)
db.close()
def test_K2hash_repr(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertRegex(repr(db), "<_K2hash _k2hfile=.*")
db.close()
def test_K2hash_version(self):
self.assertTrue(k2hash.K2hash.version() == None)
if __name__ == '__main__':
unittest.main()
#
# Local variables:
# tab-width: 4
# c-basic-offset: 4
# End:
# vim600: expandtab sw=4 ts=4 fdm=marker
# vim<600: expandtab sw=4 ts=4
#
| 34.691383 | 80 | 0.617122 |
import unittest
import k2hash
import logging
import ctypes
import time
class TestK2hashIterator(unittest.TestCase):
def test_K2hashIterator_construct(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
ki = k2hash.K2hashIterator(db)
self.assertTrue(isinstance(ki, k2hash.K2hashIterator))
self.assertTrue(key == next(ki))
db.close()
def test_K2hashIterator_construct_key(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey1"
subval = "subval1"
self.assertTrue(db.add_subkey(key, subkey, subval), True)
ki = k2hash.K2hashIterator(db, key)
self.assertTrue(isinstance(ki, k2hash.K2hashIterator))
self.assertTrue(subkey == next(ki))
db.close()
class TestK2hash(unittest.TestCase):
def test_K2hash_construct(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
db.close()
def test_K2hash_get_iterator(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
ki = db.get_iterator()
self.assertTrue(isinstance(ki, k2hash.K2hashIterator))
db.close()
def test_K2hash_set(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_get(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
@unittest.skip("skipping because no plugin lib prepared")
def test_K2hash_add_attribute_plugin_lib(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_add_decryption_password(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
password = "secretstring"
self.assertTrue(db.add_decryption_password(password), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key, password), val)
self.assertTrue(db.get(key), "")
db.close()
def test_K2hash_add_subkey(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set(subkey, subval), True)
self.assertTrue(db.get(subkey), subval)
self.assertTrue(db.add_subkey(key, subkey, subval), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
db.close()
def test_K2hash_begin_tx(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_tx_log = "test.log"
self.assertTrue(db.begin_tx(k2h_tx_log), True)
db.close()
def test_K2hash_close(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.close(), True)
def test_K2hash_create(self):
k2h_file = "test.k2h"
self.assertTrue(k2hash.K2hash.create(k2h_file), True)
def test_K2hash_dump_to_file(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_file = "test.k2h"
self.assertTrue(db.dump_to_file(k2h_file), val)
db.close()
def test_K2hash_enable_encryption(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
password = "secretstring"
self.assertTrue(db.set_default_encryption_password(password), True)
self.assertTrue(db.enable_encryption(), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key, password), val)
db.close()
def test_K2hash_enable_history(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
self.assertTrue(db.enable_history(), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_enable_mtime(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
self.assertTrue(db.enable_mtime(), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_get_attributes(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
attr_key = "attrkey1"
attr_val = "attrval1"
attrs = {attr_key: attr_val}
self.assertTrue(db.set_attribute(key, attr_key, attr_val), True)
self.assertTrue(db.get_attributes(key), attrs)
db.close()
def test_K2hash_handle(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.handle != k2hash.K2hash.K2H_INVALID_HANDLE)
db.close()
def test_K2hash_get_subkeys(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set(subkey, subval), True)
self.assertTrue(db.get(subkey), subval)
self.assertTrue(db.add_subkey(key, subkey, subval), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
db.close()
def test_K2hash_get_tx_file_fd(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_tx_log = "test.log"
self.assertTrue(db.begin_tx(k2h_tx_log), True)
self.assertTrue(db.get_tx_file_fd() != k2hash.K2hash.K2H_INVALID_HANDLE)
db.close()
def test_K2hash_get_tx_pool_size(self):
self.assertTrue(k2hash.K2hash.get_tx_pool_size() == 0)
def test_K2hash_libk2hash(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(isinstance(db.libk2hash, ctypes.CDLL))
db.close()
def test_K2hash_libc(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(isinstance(db.libc, ctypes.CDLL))
db.close()
def test_K2hash_load_from_file(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_file = "test.k2h"
self.assertTrue(db.dump_to_file(k2h_file), val)
db.close()
db = None
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
self.assertTrue(db.load_from_file(k2h_file), val)
self.assertTrue(db.get(key), val)
db.close()
def test_K2hash_print_attribute_plugins(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_attribute_plugins(), True)
db.close()
def test_K2hash_print_attributes(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_attributes(), True)
db.close()
def test_K2hash_print_data_stats(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_data_stats(), True)
db.close()
def test_K2hash_print_table_stats(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.print_table_stats(), True)
db.close()
def test_K2hash_remove(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.remove(key), True)
self.assertTrue(db.get(key) == "")
db.close()
def test_K2hash_remove_subkeys(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set(subkey, subval), True)
self.assertTrue(db.get(subkey), subval)
self.assertTrue(db.add_subkey(key, subkey, subval), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
self.assertTrue(db.remove_subkeys(key, [subkey]), True)
self.assertTrue(db.get_subkeys(key) == [])
self.assertTrue(db.get(subkey) == "")
db.close()
def test_K2hash_rename(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
newkey = key[::-1]
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertTrue(db.rename(key, newkey), val)
self.assertTrue(db.get(newkey), val)
db.close()
def test_K2hash_set_attribute(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
attr_key = "attrkey1"
attr_val = "attrval1"
attrs = {attr_key: attr_val}
self.assertTrue(db.set_attribute(key, attr_key, attr_val), True)
self.assertTrue(db.get_attributes(key), attrs)
db.close()
def test_K2hash_set_encryption_password_file(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
password_file="password.txt"
password = "secretstring"
import os
with open(password_file, 'w') as f:
print("{}".format(password), file=f)
self.assertTrue(db.set_encryption_password_file(password_file), True)
db.close()
def test_K2hash_set_expiration_duration(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
duration = 3
self.assertTrue(db.set_expiration_duration(duration), True)
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
time.sleep(duration + 1)
self.assertTrue(db.get(key) == "")
db.close()
def test_K2hash_set_log_level(self):
db = k2hash.K2hash()
db.set_log_level(k2hash.LogLevel.ERROR)
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
db.set_log_level(k2hash.LogLevel.WARNING)
db.close()
def test_K2hash_set_subkeys(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
subkey = "subkey"
subval = "subval"
self.assertTrue(db.set_subkeys(key, {subkey: subval}), True)
self.assertTrue(db.get_subkeys(key) == [subkey])
db.close()
def test_K2hash_set_tx_pool_size(self):
self.assertTrue(k2hash.K2hash.set_tx_pool_size(1), True)
self.assertTrue(k2hash.K2hash.get_tx_pool_size() == 1)
def test_K2hash_stop_tx(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
k2h_tx_log = "test.log"
self.assertTrue(db.begin_tx(k2h_tx_log), True)
self.assertTrue(db.get_tx_file_fd() != k2hash.K2hash.K2H_INVALID_HANDLE)
self.assertTrue(db.stop_tx(), True)
db.close()
def test_K2hash_repr(self):
db = k2hash.K2hash()
self.assertTrue(isinstance(db, k2hash.K2hash))
key = "hello"
val = "world"
self.assertTrue(db.set(key, val), True)
self.assertTrue(db.get(key), val)
self.assertRegex(repr(db), "<_K2hash _k2hfile=.*")
db.close()
def test_K2hash_version(self):
self.assertTrue(k2hash.K2hash.version() == None)
if __name__ == '__main__':
unittest.main()
| true | true |
f73184b337ca7e743751238f2a113e71bb8c75e0 | 7,364 | py | Python | tests/rundb/test_sqldb.py | EdmondIguazio/mlrun | e63b34a610788ebe522ce7a46642e26927e39882 | [
"Apache-2.0"
] | null | null | null | tests/rundb/test_sqldb.py | EdmondIguazio/mlrun | e63b34a610788ebe522ce7a46642e26927e39882 | [
"Apache-2.0"
] | null | null | null | tests/rundb/test_sqldb.py | EdmondIguazio/mlrun | e63b34a610788ebe522ce7a46642e26927e39882 | [
"Apache-2.0"
] | 1 | 2021-05-05T14:19:46.000Z | 2021-05-05T14:19:46.000Z | # Copyright 2019 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SQLDB specific tests, common tests should be in test_dbs.py"""
from collections import defaultdict
from contextlib import contextmanager
from datetime import datetime, timedelta
from unittest.mock import Mock
import pytest
from sqlalchemy.orm import Session
from mlrun.api.db.sqldb.db import SQLDB
from mlrun.api.db.sqldb.models import _tagged
from tests.conftest import new_run
@contextmanager
def patch(obj, **kw):
old = {}
for k, v in kw.items():
old[k] = getattr(obj, k)
setattr(obj, k, v)
try:
yield obj
finally:
for k, v in old.items():
setattr(obj, k, v)
def test_list_artifact_tags(db: SQLDB, db_session: Session):
db.store_artifact(db_session, "k1", {}, "1", tag="t1", project="p1")
db.store_artifact(db_session, "k1", {}, "2", tag="t2", project="p1")
db.store_artifact(db_session, "k1", {}, "2", tag="t2", project="p2")
tags = db.list_artifact_tags(db_session, "p1")
assert {"t1", "t2"} == set(tags), "bad tags"
def test_list_artifact_date(db: SQLDB, db_session: Session):
t1 = datetime(2020, 2, 16)
t2 = t1 - timedelta(days=7)
t3 = t2 - timedelta(days=7)
prj = "p7"
db.store_artifact(db_session, "k1", {"updated": t1}, "u1", project=prj)
db.store_artifact(db_session, "k2", {"updated": t2}, "u2", project=prj)
db.store_artifact(db_session, "k3", {"updated": t3}, "u3", project=prj)
arts = db.list_artifacts(db_session, project=prj, since=t3, tag="*")
assert 3 == len(arts), "since t3"
arts = db.list_artifacts(db_session, project=prj, since=t2, tag="*")
assert 2 == len(arts), "since t2"
arts = db.list_artifacts(
db_session, project=prj, since=t1 + timedelta(days=1), tag="*"
)
assert not arts, "since t1+"
arts = db.list_artifacts(db_session, project=prj, until=t2, tag="*")
assert 2 == len(arts), "until t2"
arts = db.list_artifacts(db_session, project=prj, since=t2, until=t2, tag="*")
assert 1 == len(arts), "since/until t2"
def test_list_projects(db: SQLDB, db_session: Session):
for i in range(10):
run = new_run("s1", {"l1": "v1", "l2": "v2"}, x=1)
db.store_run(db_session, run, "u7", project=f"prj{i % 3}", iter=i)
assert {"prj0", "prj1", "prj2"} == {p.name for p in db.list_projects(db_session)}
def test_run_iter0(db: SQLDB, db_session: Session):
uid, prj = "uid39", "lemon"
run = new_run("s1", {"l1": "v1", "l2": "v2"}, x=1)
for i in range(7):
db.store_run(db_session, run, uid, prj, i)
db._get_run(db_session, uid, prj, 0) # See issue 140
def test_artifacts_latest(db: SQLDB, db_session: Session):
k1, u1, art1 = "k1", "u1", {"a": 1}
prj = "p38"
db.store_artifact(db_session, k1, art1, u1, project=prj)
arts = db.list_artifacts(db_session, project=prj, tag="latest")
assert art1["a"] == arts[0]["a"], "bad artifact"
u2, art2 = "u2", {"a": 17}
db.store_artifact(db_session, k1, art2, u2, project=prj)
arts = db.list_artifacts(db_session, project=prj, tag="latest")
assert 2 == len(arts), "count"
assert art2["a"] == arts[1]["a"], "bad artifact"
k2, u3, art3 = "k2", "u3", {"a": 99}
db.store_artifact(db_session, k2, art3, u3, project=prj)
arts = db.list_artifacts(db_session, project=prj, tag="latest")
assert 3 == len(arts), "number"
assert {1, 17, 99} == set(art["a"] for art in arts), "latest"
@pytest.mark.parametrize("cls", _tagged)
def test_tags(db: SQLDB, db_session: Session, cls):
p1, n1 = "prj1", "name1"
obj1, obj2, obj3 = cls(), cls(), cls()
db_session.add(obj1)
db_session.add(obj2)
db_session.add(obj3)
db_session.commit()
db.tag_objects(db_session, [obj1, obj2], p1, n1)
objs = db.find_tagged(db_session, p1, n1)
assert {obj1, obj2} == set(objs), "find tags"
db.del_tag(db_session, p1, n1)
objs = db.find_tagged(db_session, p1, n1)
assert [] == objs, "find tags after del"
def _tag_objs(db: SQLDB, db_session: Session, count, project, tags):
by_tag = defaultdict(list)
for i in range(count):
cls = _tagged[i % len(_tagged)]
obj = cls()
by_tag[tags[i % len(tags)]].append(obj)
db_session.add(obj)
db_session.commit()
for tag, objs in by_tag.items():
db.tag_objects(db_session, objs, project, tag)
def test_list_tags(db: SQLDB, db_session: Session):
p1, tags1 = "prj1", ["a", "b", "c"]
_tag_objs(db, db_session, 17, p1, tags1)
p2, tags2 = "prj2", ["b", "c", "d", "e"]
_tag_objs(db, db_session, 11, p2, tags2)
tags = db.list_tags(db_session, p1)
assert set(tags) == set(tags1), "tags"
def test_projects(db: SQLDB, db_session: Session):
prj1 = {
"name": "p1",
"description": "banana",
# 'users': ['u1', 'u2'],
"spec": {"company": "ACME"},
"state": "active",
"created": datetime.now(),
}
pid1 = db.add_project(db_session, prj1)
p1 = db.get_project(db_session, project_id=pid1)
assert p1, f"project {pid1} not found"
out = {
"name": p1.name,
"description": p1.description,
# 'users': sorted(u.name for u in p1.users),
"spec": p1.spec,
"state": p1.state,
"created": p1.created,
}
assert prj1 == out, "bad project"
data = {"description": "lemon"}
db.update_project(db_session, p1.name, data)
p1 = db.get_project(db_session, project_id=pid1)
assert data["description"] == p1.description, "bad update"
prj2 = {"name": "p2"}
db.add_project(db_session, prj2)
prjs = {p.name for p in db.list_projects(db_session)}
assert {prj1["name"], prj2["name"]} == prjs, "list"
def test_cache_projects(db: SQLDB, db_session: Session):
assert 0 == len(db._projects), "empty cache"
name = "prj348"
db.add_project(db_session, {"name": name})
assert {name} == db._projects, "project"
mock = Mock()
with patch(db, add_project=mock):
db._ensure_project(db_session, name)
mock.assert_not_called()
mock = Mock()
with patch(db, add_project=mock):
db._ensure_project(db_session, name + "-new")
mock.assert_called_once()
project_2_name = "project-2"
db.add_project(db_session, {"name": project_2_name})
db._projects = set()
mock = Mock()
with patch(db, add_project=mock):
db._ensure_project(db_session, name)
mock.assert_not_called()
# def test_function_latest(db: SQLDB, db_session: Session):
# fn1, t1 = {'x': 1}, 'u83'
# fn2, t2 = {'x': 2}, 'u23'
# prj, name = 'p388', 'n3023'
# db.store_function(db_session, fn1, name, prj, t1)
# db.store_function(db_session, fn2, name, prj, t2)
#
# fn = db.get_function(db_session, name, prj, 'latest')
# assert fn2 == fn, 'latest'
| 33.022422 | 85 | 0.629957 |
from collections import defaultdict
from contextlib import contextmanager
from datetime import datetime, timedelta
from unittest.mock import Mock
import pytest
from sqlalchemy.orm import Session
from mlrun.api.db.sqldb.db import SQLDB
from mlrun.api.db.sqldb.models import _tagged
from tests.conftest import new_run
@contextmanager
def patch(obj, **kw):
old = {}
for k, v in kw.items():
old[k] = getattr(obj, k)
setattr(obj, k, v)
try:
yield obj
finally:
for k, v in old.items():
setattr(obj, k, v)
def test_list_artifact_tags(db: SQLDB, db_session: Session):
db.store_artifact(db_session, "k1", {}, "1", tag="t1", project="p1")
db.store_artifact(db_session, "k1", {}, "2", tag="t2", project="p1")
db.store_artifact(db_session, "k1", {}, "2", tag="t2", project="p2")
tags = db.list_artifact_tags(db_session, "p1")
assert {"t1", "t2"} == set(tags), "bad tags"
def test_list_artifact_date(db: SQLDB, db_session: Session):
t1 = datetime(2020, 2, 16)
t2 = t1 - timedelta(days=7)
t3 = t2 - timedelta(days=7)
prj = "p7"
db.store_artifact(db_session, "k1", {"updated": t1}, "u1", project=prj)
db.store_artifact(db_session, "k2", {"updated": t2}, "u2", project=prj)
db.store_artifact(db_session, "k3", {"updated": t3}, "u3", project=prj)
arts = db.list_artifacts(db_session, project=prj, since=t3, tag="*")
assert 3 == len(arts), "since t3"
arts = db.list_artifacts(db_session, project=prj, since=t2, tag="*")
assert 2 == len(arts), "since t2"
arts = db.list_artifacts(
db_session, project=prj, since=t1 + timedelta(days=1), tag="*"
)
assert not arts, "since t1+"
arts = db.list_artifacts(db_session, project=prj, until=t2, tag="*")
assert 2 == len(arts), "until t2"
arts = db.list_artifacts(db_session, project=prj, since=t2, until=t2, tag="*")
assert 1 == len(arts), "since/until t2"
def test_list_projects(db: SQLDB, db_session: Session):
for i in range(10):
run = new_run("s1", {"l1": "v1", "l2": "v2"}, x=1)
db.store_run(db_session, run, "u7", project=f"prj{i % 3}", iter=i)
assert {"prj0", "prj1", "prj2"} == {p.name for p in db.list_projects(db_session)}
def test_run_iter0(db: SQLDB, db_session: Session):
uid, prj = "uid39", "lemon"
run = new_run("s1", {"l1": "v1", "l2": "v2"}, x=1)
for i in range(7):
db.store_run(db_session, run, uid, prj, i)
db._get_run(db_session, uid, prj, 0)
def test_artifacts_latest(db: SQLDB, db_session: Session):
k1, u1, art1 = "k1", "u1", {"a": 1}
prj = "p38"
db.store_artifact(db_session, k1, art1, u1, project=prj)
arts = db.list_artifacts(db_session, project=prj, tag="latest")
assert art1["a"] == arts[0]["a"], "bad artifact"
u2, art2 = "u2", {"a": 17}
db.store_artifact(db_session, k1, art2, u2, project=prj)
arts = db.list_artifacts(db_session, project=prj, tag="latest")
assert 2 == len(arts), "count"
assert art2["a"] == arts[1]["a"], "bad artifact"
k2, u3, art3 = "k2", "u3", {"a": 99}
db.store_artifact(db_session, k2, art3, u3, project=prj)
arts = db.list_artifacts(db_session, project=prj, tag="latest")
assert 3 == len(arts), "number"
assert {1, 17, 99} == set(art["a"] for art in arts), "latest"
@pytest.mark.parametrize("cls", _tagged)
def test_tags(db: SQLDB, db_session: Session, cls):
p1, n1 = "prj1", "name1"
obj1, obj2, obj3 = cls(), cls(), cls()
db_session.add(obj1)
db_session.add(obj2)
db_session.add(obj3)
db_session.commit()
db.tag_objects(db_session, [obj1, obj2], p1, n1)
objs = db.find_tagged(db_session, p1, n1)
assert {obj1, obj2} == set(objs), "find tags"
db.del_tag(db_session, p1, n1)
objs = db.find_tagged(db_session, p1, n1)
assert [] == objs, "find tags after del"
def _tag_objs(db: SQLDB, db_session: Session, count, project, tags):
by_tag = defaultdict(list)
for i in range(count):
cls = _tagged[i % len(_tagged)]
obj = cls()
by_tag[tags[i % len(tags)]].append(obj)
db_session.add(obj)
db_session.commit()
for tag, objs in by_tag.items():
db.tag_objects(db_session, objs, project, tag)
def test_list_tags(db: SQLDB, db_session: Session):
p1, tags1 = "prj1", ["a", "b", "c"]
_tag_objs(db, db_session, 17, p1, tags1)
p2, tags2 = "prj2", ["b", "c", "d", "e"]
_tag_objs(db, db_session, 11, p2, tags2)
tags = db.list_tags(db_session, p1)
assert set(tags) == set(tags1), "tags"
def test_projects(db: SQLDB, db_session: Session):
prj1 = {
"name": "p1",
"description": "banana",
"spec": {"company": "ACME"},
"state": "active",
"created": datetime.now(),
}
pid1 = db.add_project(db_session, prj1)
p1 = db.get_project(db_session, project_id=pid1)
assert p1, f"project {pid1} not found"
out = {
"name": p1.name,
"description": p1.description,
"spec": p1.spec,
"state": p1.state,
"created": p1.created,
}
assert prj1 == out, "bad project"
data = {"description": "lemon"}
db.update_project(db_session, p1.name, data)
p1 = db.get_project(db_session, project_id=pid1)
assert data["description"] == p1.description, "bad update"
prj2 = {"name": "p2"}
db.add_project(db_session, prj2)
prjs = {p.name for p in db.list_projects(db_session)}
assert {prj1["name"], prj2["name"]} == prjs, "list"
def test_cache_projects(db: SQLDB, db_session: Session):
assert 0 == len(db._projects), "empty cache"
name = "prj348"
db.add_project(db_session, {"name": name})
assert {name} == db._projects, "project"
mock = Mock()
with patch(db, add_project=mock):
db._ensure_project(db_session, name)
mock.assert_not_called()
mock = Mock()
with patch(db, add_project=mock):
db._ensure_project(db_session, name + "-new")
mock.assert_called_once()
project_2_name = "project-2"
db.add_project(db_session, {"name": project_2_name})
db._projects = set()
mock = Mock()
with patch(db, add_project=mock):
db._ensure_project(db_session, name)
mock.assert_not_called()
| true | true |
f731851b0bbc67a1b6af6e831923eba84d6c28aa | 10,964 | py | Python | rustplus/api/rust_api.py | olijeffers0n/rustplus.py | 0ecaaecc345848b47abe27bc1fd8b7ef7aebfd23 | [
"MIT"
] | 25 | 2021-05-03T11:08:55.000Z | 2022-03-14T00:56:50.000Z | rustplus/api/rust_api.py | olijeffers0n/rustplus.py | 0ecaaecc345848b47abe27bc1fd8b7ef7aebfd23 | [
"MIT"
] | 9 | 2021-06-15T10:38:42.000Z | 2022-03-26T11:45:03.000Z | rustplus/api/rust_api.py | olijeffers0n/rustplus.py | 0ecaaecc345848b47abe27bc1fd8b7ef7aebfd23 | [
"MIT"
] | 13 | 2021-06-08T18:35:17.000Z | 2022-03-26T00:44:08.000Z | import asyncio
from asyncio.futures import Future
from typing import List
from PIL import Image
from io import BytesIO
from importlib import resources
from datetime import datetime
from collections import defaultdict
from .base_rust_api import BaseRustSocket
from .structures import RustTime, RustInfo, RustMap, RustMarker, RustChatMessage, RustTeamInfo, RustTeamMember, RustTeamNote, RustEntityInfo, RustContents, RustItem
from .remote.rustplus_pb2 import *
from .remote import HeartBeat
from ..commands import CommandOptions
from ..exceptions import *
from ..utils import *
class RustSocket(BaseRustSocket):
def __init__(self, ip: str = None, port: str = None, steamid: int = None, playertoken: int = None, command_options : CommandOptions = None, raise_ratelimit_exception : bool = True, ratelimit_limit : int = 25, ratelimit_refill : int = 3) -> None:
super().__init__(ip=ip, port=port, steamid=steamid, playertoken=playertoken, command_options=command_options, raise_ratelimit_exception=raise_ratelimit_exception, ratelimit_limit=ratelimit_limit, ratelimit_refill=ratelimit_refill, heartbeat=HeartBeat(self))
def entity_event(self, eid):
"""
Decorator to register a smart device listener
"""
def wrap_func(coro):
def entity_event_callback(future : Future):
try:
entity_info : RustEntityInfo = future.result()
self.remote.event_handler.register_event(eid, (coro, loop, entity_info.type))
except:
raise SmartDeviceRegistrationError("Not Found")
loop = asyncio.get_event_loop()
future = asyncio.run_coroutine_threadsafe(self.get_entity_info(eid), loop)
future.add_done_callback(entity_event_callback)
return wrap_func
async def get_time(self) -> RustTime:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getTime.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
response = await self.remote.get_response(app_request.seq, app_request)
return format_time(response)
async def send_team_message(self, message: str) -> None:
await self._handle_ratelimit(2)
app_send_message = AppSendMessage()
app_send_message.message = message
app_request = self._generate_protobuf()
app_request.sendTeamMessage.CopyFrom(app_send_message)
self.remote.ignored_responses.append(app_request.seq)
await self.remote.send_message(app_request)
async def get_info(self) -> RustInfo:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getInfo.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
response = await self.remote.get_response(app_request.seq, app_request)
return RustInfo(response.response.info)
async def get_team_chat(self) -> List[RustChatMessage]:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getTeamChat.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
messages = (await self.remote.get_response(app_request.seq, app_request)).response.teamChat.messages
return [RustChatMessage(message) for message in messages]
async def get_team_info(self):
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getTeamInfo.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return RustTeamInfo(app_message.response.teamInfo)
async def get_markers(self) -> List[RustMarker]:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getMapMarkers.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return [RustMarker(marker) for marker in app_message.response.mapMarkers.markers]
async def get_raw_map_data(self) -> RustMap:
await self._handle_ratelimit(5)
app_request = self._generate_protobuf()
app_request.getMap.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return RustMap(app_message.response.map)
async def get_map(self, add_icons: bool = False, add_events: bool = False, add_vending_machines: bool = False, override_images: dict = {}) -> Image:
MAPSIZE = int((await self.get_info()).size)
await self._handle_ratelimit(5 + 1 if [add_icons, add_events, add_vending_machines].count(True) >= 1 else 0)
app_request = self._generate_protobuf()
app_request.getMap.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
map = app_message.response.map
monuments = list(map.monuments)
try:
image = Image.open(BytesIO(map.jpgImage))
except:
raise ImageError("Invalid bytes for the image")
image = image.crop((500,500,map.height-500,map.width-500))
map = image.resize((MAPSIZE,MAPSIZE), Image.ANTIALIAS)
if add_icons or add_events or add_vending_machines:
mapMarkers = await self.get_markers()
if add_icons:
for monument in monuments:
if str(monument.token) == "DungeonBase":
continue
icon = convert_monument(monument.token, override_images)
if monument.token in override_images:
icon = icon.resize((150, 150))
if str(monument.token) == "train_tunnel_display_name":
icon = icon.resize((100, 125))
map.paste(icon, (format_cood(int(monument.x), int(monument.y), MAPSIZE)), icon)
if add_vending_machines:
with resources.path("rustplus.api.icons", "vending_machine.png") as path:
vendingMachine = Image.open(path).convert("RGBA")
vendingMachine = vendingMachine.resize((100, 100))
for marker in mapMarkers:
if add_events:
if marker.type == 2 or marker.type == 4 or marker.type == 5 or marker.type == 6:
icon = convert_marker(str(marker.type), marker.rotation)
if marker.type == 6:
x = marker.x
y = marker.y
if y > MAPSIZE: y = MAPSIZE
if y < 0: y = 100
if x > MAPSIZE: x = MAPSIZE - 75
if x < 0: x = 50
map.paste(icon, (int(x), MAPSIZE - int(y)), icon)
else:
map.paste(icon, (format_cood(int(marker.x), int(marker.y), MAPSIZE)), icon)
if add_vending_machines and marker.type == 3:
map.paste(vendingMachine, (int(marker.x) - 50, MAPSIZE - int(marker.y) - 50), vendingMachine)
return map.resize((2000, 2000), Image.ANTIALIAS)
async def get_entity_info(self, eid: int = None) -> RustEntityInfo:
await self._handle_ratelimit()
if eid is None:
raise ValueError("EID cannot be None")
app_request = self._generate_protobuf()
app_request.entityId = eid
app_request.getEntityInfo.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return RustEntityInfo(app_message.response.entityInfo)
async def _update_smart_device(self, eid : int, value : bool) -> None:
await self._handle_ratelimit()
entityValue = AppSetEntityValue()
entityValue.value = value
app_request = self._generate_protobuf()
app_request.entityId = eid
app_request.setEntityValue.CopyFrom(entityValue)
self.remote.ignored_responses.append(app_request.seq)
await self.remote.send_message(app_request)
async def turn_on_smart_switch(self, eid: int = None) -> None:
if eid is None:
raise ValueError("EID cannot be None")
await self._update_smart_device(eid, True)
async def turn_off_smart_switch(self, eid: int = None) -> None:
if eid is None:
raise ValueError("EID cannot be None")
await self._update_smart_device(eid, False)
async def promote_to_team_leader(self, steamid: int = None) -> None:
if steamid is None:
raise ValueError("SteamID cannot be None")
await self._handle_ratelimit()
leaderPacket = AppPromoteToLeader()
leaderPacket.steamId = steamid
app_request = self._generate_protobuf()
app_request.promoteToLeader.CopyFrom(leaderPacket)
self.remote.ignored_responses.append(app_request.seq)
await self.remote.send_message(app_request)
async def get_current_events(self) -> List[RustMarker]:
return [marker for marker in (await self.get_markers()) if marker.type == 2 or marker.type == 4 or marker.type == 5 or marker.type == 6]
async def get_tc_storage_contents(self, eid: int = None, combine_stacks: bool = False) -> RustContents:
if eid is None:
raise ValueError("EID cannot be None")
returnedData = await self.get_entity_info(eid)
targetTime = datetime.utcfromtimestamp(int(returnedData.protectionExpiry))
difference = targetTime - datetime.utcnow()
items = []
for item in returnedData.items:
items.append(RustItem(translate_id_to_stack(item.itemId), item.itemId, item.quantity, item.itemIsBlueprint))
if combine_stacks:
mergedMap = defaultdict(tuple)
for item in items:
data = mergedMap[str(item.itemId)]
if data:
count = int(data[0]) + int(item.quantity)
mergedMap[str(item.itemId)] = (count, bool(item.isBlueprint))
else:
mergedMap[str(item.itemId)] = (int(item.quantity), bool(item.isBlueprint))
items = []
for key in mergedMap.keys():
items.append(RustItem(translate_id_to_stack(key), key, int(mergedMap[key][0]), bool(mergedMap[key][1])))
return RustContents(difference, bool(returnedData.hasProtection), items)
| 36.915825 | 265 | 0.639639 | import asyncio
from asyncio.futures import Future
from typing import List
from PIL import Image
from io import BytesIO
from importlib import resources
from datetime import datetime
from collections import defaultdict
from .base_rust_api import BaseRustSocket
from .structures import RustTime, RustInfo, RustMap, RustMarker, RustChatMessage, RustTeamInfo, RustTeamMember, RustTeamNote, RustEntityInfo, RustContents, RustItem
from .remote.rustplus_pb2 import *
from .remote import HeartBeat
from ..commands import CommandOptions
from ..exceptions import *
from ..utils import *
class RustSocket(BaseRustSocket):
def __init__(self, ip: str = None, port: str = None, steamid: int = None, playertoken: int = None, command_options : CommandOptions = None, raise_ratelimit_exception : bool = True, ratelimit_limit : int = 25, ratelimit_refill : int = 3) -> None:
super().__init__(ip=ip, port=port, steamid=steamid, playertoken=playertoken, command_options=command_options, raise_ratelimit_exception=raise_ratelimit_exception, ratelimit_limit=ratelimit_limit, ratelimit_refill=ratelimit_refill, heartbeat=HeartBeat(self))
def entity_event(self, eid):
def wrap_func(coro):
def entity_event_callback(future : Future):
try:
entity_info : RustEntityInfo = future.result()
self.remote.event_handler.register_event(eid, (coro, loop, entity_info.type))
except:
raise SmartDeviceRegistrationError("Not Found")
loop = asyncio.get_event_loop()
future = asyncio.run_coroutine_threadsafe(self.get_entity_info(eid), loop)
future.add_done_callback(entity_event_callback)
return wrap_func
async def get_time(self) -> RustTime:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getTime.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
response = await self.remote.get_response(app_request.seq, app_request)
return format_time(response)
async def send_team_message(self, message: str) -> None:
await self._handle_ratelimit(2)
app_send_message = AppSendMessage()
app_send_message.message = message
app_request = self._generate_protobuf()
app_request.sendTeamMessage.CopyFrom(app_send_message)
self.remote.ignored_responses.append(app_request.seq)
await self.remote.send_message(app_request)
async def get_info(self) -> RustInfo:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getInfo.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
response = await self.remote.get_response(app_request.seq, app_request)
return RustInfo(response.response.info)
async def get_team_chat(self) -> List[RustChatMessage]:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getTeamChat.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
messages = (await self.remote.get_response(app_request.seq, app_request)).response.teamChat.messages
return [RustChatMessage(message) for message in messages]
async def get_team_info(self):
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getTeamInfo.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return RustTeamInfo(app_message.response.teamInfo)
async def get_markers(self) -> List[RustMarker]:
await self._handle_ratelimit()
app_request = self._generate_protobuf()
app_request.getMapMarkers.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return [RustMarker(marker) for marker in app_message.response.mapMarkers.markers]
async def get_raw_map_data(self) -> RustMap:
await self._handle_ratelimit(5)
app_request = self._generate_protobuf()
app_request.getMap.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return RustMap(app_message.response.map)
async def get_map(self, add_icons: bool = False, add_events: bool = False, add_vending_machines: bool = False, override_images: dict = {}) -> Image:
MAPSIZE = int((await self.get_info()).size)
await self._handle_ratelimit(5 + 1 if [add_icons, add_events, add_vending_machines].count(True) >= 1 else 0)
app_request = self._generate_protobuf()
app_request.getMap.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
map = app_message.response.map
monuments = list(map.monuments)
try:
image = Image.open(BytesIO(map.jpgImage))
except:
raise ImageError("Invalid bytes for the image")
image = image.crop((500,500,map.height-500,map.width-500))
map = image.resize((MAPSIZE,MAPSIZE), Image.ANTIALIAS)
if add_icons or add_events or add_vending_machines:
mapMarkers = await self.get_markers()
if add_icons:
for monument in monuments:
if str(monument.token) == "DungeonBase":
continue
icon = convert_monument(monument.token, override_images)
if monument.token in override_images:
icon = icon.resize((150, 150))
if str(monument.token) == "train_tunnel_display_name":
icon = icon.resize((100, 125))
map.paste(icon, (format_cood(int(monument.x), int(monument.y), MAPSIZE)), icon)
if add_vending_machines:
with resources.path("rustplus.api.icons", "vending_machine.png") as path:
vendingMachine = Image.open(path).convert("RGBA")
vendingMachine = vendingMachine.resize((100, 100))
for marker in mapMarkers:
if add_events:
if marker.type == 2 or marker.type == 4 or marker.type == 5 or marker.type == 6:
icon = convert_marker(str(marker.type), marker.rotation)
if marker.type == 6:
x = marker.x
y = marker.y
if y > MAPSIZE: y = MAPSIZE
if y < 0: y = 100
if x > MAPSIZE: x = MAPSIZE - 75
if x < 0: x = 50
map.paste(icon, (int(x), MAPSIZE - int(y)), icon)
else:
map.paste(icon, (format_cood(int(marker.x), int(marker.y), MAPSIZE)), icon)
if add_vending_machines and marker.type == 3:
map.paste(vendingMachine, (int(marker.x) - 50, MAPSIZE - int(marker.y) - 50), vendingMachine)
return map.resize((2000, 2000), Image.ANTIALIAS)
async def get_entity_info(self, eid: int = None) -> RustEntityInfo:
await self._handle_ratelimit()
if eid is None:
raise ValueError("EID cannot be None")
app_request = self._generate_protobuf()
app_request.entityId = eid
app_request.getEntityInfo.CopyFrom(AppEmpty())
await self.remote.send_message(app_request)
app_message = await self.remote.get_response(app_request.seq, app_request)
return RustEntityInfo(app_message.response.entityInfo)
async def _update_smart_device(self, eid : int, value : bool) -> None:
await self._handle_ratelimit()
entityValue = AppSetEntityValue()
entityValue.value = value
app_request = self._generate_protobuf()
app_request.entityId = eid
app_request.setEntityValue.CopyFrom(entityValue)
self.remote.ignored_responses.append(app_request.seq)
await self.remote.send_message(app_request)
async def turn_on_smart_switch(self, eid: int = None) -> None:
if eid is None:
raise ValueError("EID cannot be None")
await self._update_smart_device(eid, True)
async def turn_off_smart_switch(self, eid: int = None) -> None:
if eid is None:
raise ValueError("EID cannot be None")
await self._update_smart_device(eid, False)
async def promote_to_team_leader(self, steamid: int = None) -> None:
if steamid is None:
raise ValueError("SteamID cannot be None")
await self._handle_ratelimit()
leaderPacket = AppPromoteToLeader()
leaderPacket.steamId = steamid
app_request = self._generate_protobuf()
app_request.promoteToLeader.CopyFrom(leaderPacket)
self.remote.ignored_responses.append(app_request.seq)
await self.remote.send_message(app_request)
async def get_current_events(self) -> List[RustMarker]:
return [marker for marker in (await self.get_markers()) if marker.type == 2 or marker.type == 4 or marker.type == 5 or marker.type == 6]
async def get_tc_storage_contents(self, eid: int = None, combine_stacks: bool = False) -> RustContents:
if eid is None:
raise ValueError("EID cannot be None")
returnedData = await self.get_entity_info(eid)
targetTime = datetime.utcfromtimestamp(int(returnedData.protectionExpiry))
difference = targetTime - datetime.utcnow()
items = []
for item in returnedData.items:
items.append(RustItem(translate_id_to_stack(item.itemId), item.itemId, item.quantity, item.itemIsBlueprint))
if combine_stacks:
mergedMap = defaultdict(tuple)
for item in items:
data = mergedMap[str(item.itemId)]
if data:
count = int(data[0]) + int(item.quantity)
mergedMap[str(item.itemId)] = (count, bool(item.isBlueprint))
else:
mergedMap[str(item.itemId)] = (int(item.quantity), bool(item.isBlueprint))
items = []
for key in mergedMap.keys():
items.append(RustItem(translate_id_to_stack(key), key, int(mergedMap[key][0]), bool(mergedMap[key][1])))
return RustContents(difference, bool(returnedData.hasProtection), items)
| true | true |
f73185ddbd238f0f843033c23cdf25d8077121c5 | 989 | py | Python | tests/sparseml/onnx/test_imports.py | clementpoiret/sparseml | 8442a6ef8ba11fb02f5e51472dd68b72438539b9 | [
"Apache-2.0"
] | 922 | 2021-02-04T17:51:54.000Z | 2022-03-31T20:49:26.000Z | tests/sparseml/onnx/test_imports.py | clementpoiret/sparseml | 8442a6ef8ba11fb02f5e51472dd68b72438539b9 | [
"Apache-2.0"
] | 197 | 2021-02-04T22:17:21.000Z | 2022-03-31T13:58:55.000Z | tests/sparseml/onnx/test_imports.py | clementpoiret/sparseml | 8442a6ef8ba11fb02f5e51472dd68b72438539b9 | [
"Apache-2.0"
] | 80 | 2021-02-04T22:20:14.000Z | 2022-03-30T19:36:15.000Z | # Copyright (c) 2021 - present / Neuralmagic, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def test_imports():
# flake8: noqa
from sparseml.onnx import (
check_onnx_install,
check_onnxruntime_install,
detect_framework,
framework_info,
is_supported,
onnx,
onnx_err,
onnxruntime,
onnxruntime_err,
require_onnx,
require_onnxruntime,
sparsification_info,
)
| 30.90625 | 76 | 0.695652 |
def test_imports():
from sparseml.onnx import (
check_onnx_install,
check_onnxruntime_install,
detect_framework,
framework_info,
is_supported,
onnx,
onnx_err,
onnxruntime,
onnxruntime_err,
require_onnx,
require_onnxruntime,
sparsification_info,
)
| true | true |
f73186d8775e20db4e181ff54bb6713045cc02fc | 7,745 | py | Python | demo/demo.py | hummat/detectron2 | ef2f4df474b4a07049cada4793392e8e36c3e746 | [
"Apache-2.0"
] | null | null | null | demo/demo.py | hummat/detectron2 | ef2f4df474b4a07049cada4793392e8e36c3e746 | [
"Apache-2.0"
] | null | null | null | demo/demo.py | hummat/detectron2 | ef2f4df474b4a07049cada4793392e8e36c3e746 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates.
import argparse
import glob
import multiprocessing as mp
import numpy as np
import os
import tempfile
import time
import warnings
import cv2
import tqdm
from detectron2.config import get_cfg
from detectron2.data.detection_utils import read_image
from detectron2.utils.logger import setup_logger
from predictor import VisualizationDemo
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.data.datasets import load_coco_json
path_to_coco_json = "/home/matthias/Data/Ubuntu/data/datasets/porta_filter/front3d/coco_data/coco_annotations.json"
path_to_images = "/home/matthias/Data/Ubuntu/data/datasets/porta_filter/front3d"
# path_to_config_yaml = "/home/matthias/Data/Ubuntu/git/detectron2/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml"
DatasetCatalog.register("porta_filter", lambda: load_coco_json(path_to_coco_json, path_to_images))
MetadataCatalog.get("porta_filter").set(thing_classes=["porta filter"], json_file=path_to_coco_json, image_root=path_to_images)
# constants
WINDOW_NAME = "COCO detections"
def setup_cfg(args):
# load config from file and command-line arguments
cfg = get_cfg()
# To use demo for Panoptic-DeepLab, please uncomment the following two lines.
# from detectron2.projects.panoptic_deeplab import add_panoptic_deeplab_config # noqa
# add_panoptic_deeplab_config(cfg)
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
# Set score_threshold for builtin models
cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold
cfg.freeze()
return cfg
def get_parser():
parser = argparse.ArgumentParser(description="Detectron2 demo for builtin configs")
parser.add_argument(
"--config-file",
default="configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument("--webcam", action="store_true", help="Take inputs from webcam.")
parser.add_argument("--video-input", help="Path to video file.")
parser.add_argument(
"--input",
nargs="+",
help="A list of space separated input images; "
"or a single glob pattern such as 'directory/*.jpg'",
)
parser.add_argument(
"--output",
help="A file or directory to save output visualizations. "
"If not given, will show output in an OpenCV window.",
)
parser.add_argument(
"--confidence-threshold",
type=float,
default=0.5,
help="Minimum score for instance predictions to be shown",
)
parser.add_argument(
"--opts",
help="Modify config options using the command-line 'KEY VALUE' pairs",
default=[],
nargs=argparse.REMAINDER,
)
return parser
def test_opencv_video_format(codec, file_ext):
with tempfile.TemporaryDirectory(prefix="video_format_test") as dir:
filename = os.path.join(dir, "test_file" + file_ext)
writer = cv2.VideoWriter(
filename=filename,
fourcc=cv2.VideoWriter_fourcc(*codec),
fps=float(30),
frameSize=(10, 10),
isColor=True,
)
[writer.write(np.zeros((10, 10, 3), np.uint8)) for _ in range(30)]
writer.release()
if os.path.isfile(filename):
return True
return False
if __name__ == "__main__":
mp.set_start_method("spawn", force=True)
args = get_parser().parse_args()
setup_logger(name="fvcore")
logger = setup_logger()
logger.info("Arguments: " + str(args))
cfg = setup_cfg(args)
demo = VisualizationDemo(cfg)
if args.input:
if len(args.input) == 1:
args.input = glob.glob(os.path.expanduser(args.input[0]))
assert args.input, "The input path(s) was not found"
for path in tqdm.tqdm(args.input, disable=not args.output):
# use PIL, to be consistent with evaluation
img = read_image(path, format="BGR")
start_time = time.time()
predictions, visualized_output = demo.run_on_image(img)
logger.info(
"{}: {} in {:.2f}s".format(
path,
"detected {} instances".format(len(predictions["instances"]))
if "instances" in predictions
else "finished",
time.time() - start_time,
)
)
if args.output:
if os.path.isdir(args.output):
assert os.path.isdir(args.output), args.output
out_filename = os.path.join(args.output, os.path.basename(path))
else:
assert len(args.input) == 1, "Please specify a directory with args.output"
out_filename = args.output
visualized_output.save(out_filename)
else:
cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)
cv2.imshow(WINDOW_NAME, visualized_output.get_image()[:, :, ::-1])
if cv2.waitKey(0) == 27:
break # esc to quit
elif args.webcam:
assert args.input is None, "Cannot have both --input and --webcam!"
assert args.output is None, "output not yet supported with --webcam!"
cam = cv2.VideoCapture(0)
for vis in tqdm.tqdm(demo.run_on_video(cam)):
cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)
cv2.imshow(WINDOW_NAME, vis)
if cv2.waitKey(1) == 27:
break # esc to quit
cam.release()
cv2.destroyAllWindows()
elif args.video_input:
video = cv2.VideoCapture(args.video_input)
width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT))
frames_per_second = video.get(cv2.CAP_PROP_FPS)
num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
basename = os.path.basename(args.video_input)
codec, file_ext = (
("x264", ".mkv") if test_opencv_video_format("x264", ".mkv") else ("mp4v", ".mp4")
)
if codec == ".mp4v":
warnings.warn("x264 codec not available, switching to mp4v")
if args.output:
if os.path.isdir(args.output):
output_fname = os.path.join(args.output, basename)
output_fname = os.path.splitext(output_fname)[0] + file_ext
else:
output_fname = args.output
assert not os.path.isfile(output_fname), output_fname
output_file = cv2.VideoWriter(
filename=output_fname,
# some installation of opencv may not support x264 (due to its license),
# you can try other format (e.g. MPEG)
fourcc=cv2.VideoWriter_fourcc(*codec),
fps=float(frames_per_second),
frameSize=(width, height),
isColor=True,
)
assert os.path.isfile(args.video_input)
for vis_frame in tqdm.tqdm(demo.run_on_video(video), total=num_frames):
if args.output:
output_file.write(vis_frame)
else:
cv2.namedWindow(basename, cv2.WINDOW_NORMAL)
cv2.imshow(basename, vis_frame)
if cv2.waitKey(1) == 27:
break # esc to quit
video.release()
if args.output:
output_file.release()
else:
cv2.destroyAllWindows()
| 39.116162 | 128 | 0.628922 |
import argparse
import glob
import multiprocessing as mp
import numpy as np
import os
import tempfile
import time
import warnings
import cv2
import tqdm
from detectron2.config import get_cfg
from detectron2.data.detection_utils import read_image
from detectron2.utils.logger import setup_logger
from predictor import VisualizationDemo
from detectron2.data import DatasetCatalog, MetadataCatalog
from detectron2.data.datasets import load_coco_json
path_to_coco_json = "/home/matthias/Data/Ubuntu/data/datasets/porta_filter/front3d/coco_data/coco_annotations.json"
path_to_images = "/home/matthias/Data/Ubuntu/data/datasets/porta_filter/front3d"
DatasetCatalog.register("porta_filter", lambda: load_coco_json(path_to_coco_json, path_to_images))
MetadataCatalog.get("porta_filter").set(thing_classes=["porta filter"], json_file=path_to_coco_json, image_root=path_to_images)
WINDOW_NAME = "COCO detections"
def setup_cfg(args):
cfg = get_cfg()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold
cfg.freeze()
return cfg
def get_parser():
parser = argparse.ArgumentParser(description="Detectron2 demo for builtin configs")
parser.add_argument(
"--config-file",
default="configs/quick_schedules/mask_rcnn_R_50_FPN_inference_acc_test.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument("--webcam", action="store_true", help="Take inputs from webcam.")
parser.add_argument("--video-input", help="Path to video file.")
parser.add_argument(
"--input",
nargs="+",
help="A list of space separated input images; "
"or a single glob pattern such as 'directory/*.jpg'",
)
parser.add_argument(
"--output",
help="A file or directory to save output visualizations. "
"If not given, will show output in an OpenCV window.",
)
parser.add_argument(
"--confidence-threshold",
type=float,
default=0.5,
help="Minimum score for instance predictions to be shown",
)
parser.add_argument(
"--opts",
help="Modify config options using the command-line 'KEY VALUE' pairs",
default=[],
nargs=argparse.REMAINDER,
)
return parser
def test_opencv_video_format(codec, file_ext):
with tempfile.TemporaryDirectory(prefix="video_format_test") as dir:
filename = os.path.join(dir, "test_file" + file_ext)
writer = cv2.VideoWriter(
filename=filename,
fourcc=cv2.VideoWriter_fourcc(*codec),
fps=float(30),
frameSize=(10, 10),
isColor=True,
)
[writer.write(np.zeros((10, 10, 3), np.uint8)) for _ in range(30)]
writer.release()
if os.path.isfile(filename):
return True
return False
if __name__ == "__main__":
mp.set_start_method("spawn", force=True)
args = get_parser().parse_args()
setup_logger(name="fvcore")
logger = setup_logger()
logger.info("Arguments: " + str(args))
cfg = setup_cfg(args)
demo = VisualizationDemo(cfg)
if args.input:
if len(args.input) == 1:
args.input = glob.glob(os.path.expanduser(args.input[0]))
assert args.input, "The input path(s) was not found"
for path in tqdm.tqdm(args.input, disable=not args.output):
img = read_image(path, format="BGR")
start_time = time.time()
predictions, visualized_output = demo.run_on_image(img)
logger.info(
"{}: {} in {:.2f}s".format(
path,
"detected {} instances".format(len(predictions["instances"]))
if "instances" in predictions
else "finished",
time.time() - start_time,
)
)
if args.output:
if os.path.isdir(args.output):
assert os.path.isdir(args.output), args.output
out_filename = os.path.join(args.output, os.path.basename(path))
else:
assert len(args.input) == 1, "Please specify a directory with args.output"
out_filename = args.output
visualized_output.save(out_filename)
else:
cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)
cv2.imshow(WINDOW_NAME, visualized_output.get_image()[:, :, ::-1])
if cv2.waitKey(0) == 27:
break
elif args.webcam:
assert args.input is None, "Cannot have both --input and --webcam!"
assert args.output is None, "output not yet supported with --webcam!"
cam = cv2.VideoCapture(0)
for vis in tqdm.tqdm(demo.run_on_video(cam)):
cv2.namedWindow(WINDOW_NAME, cv2.WINDOW_NORMAL)
cv2.imshow(WINDOW_NAME, vis)
if cv2.waitKey(1) == 27:
break
cam.release()
cv2.destroyAllWindows()
elif args.video_input:
video = cv2.VideoCapture(args.video_input)
width = int(video.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(video.get(cv2.CAP_PROP_FRAME_HEIGHT))
frames_per_second = video.get(cv2.CAP_PROP_FPS)
num_frames = int(video.get(cv2.CAP_PROP_FRAME_COUNT))
basename = os.path.basename(args.video_input)
codec, file_ext = (
("x264", ".mkv") if test_opencv_video_format("x264", ".mkv") else ("mp4v", ".mp4")
)
if codec == ".mp4v":
warnings.warn("x264 codec not available, switching to mp4v")
if args.output:
if os.path.isdir(args.output):
output_fname = os.path.join(args.output, basename)
output_fname = os.path.splitext(output_fname)[0] + file_ext
else:
output_fname = args.output
assert not os.path.isfile(output_fname), output_fname
output_file = cv2.VideoWriter(
filename=output_fname,
fourcc=cv2.VideoWriter_fourcc(*codec),
fps=float(frames_per_second),
frameSize=(width, height),
isColor=True,
)
assert os.path.isfile(args.video_input)
for vis_frame in tqdm.tqdm(demo.run_on_video(video), total=num_frames):
if args.output:
output_file.write(vis_frame)
else:
cv2.namedWindow(basename, cv2.WINDOW_NORMAL)
cv2.imshow(basename, vis_frame)
if cv2.waitKey(1) == 27:
break
video.release()
if args.output:
output_file.release()
else:
cv2.destroyAllWindows()
| true | true |
f731877388c6e4f2d7121cb542ca1fe6ff75cc62 | 1,466 | py | Python | xcc/util.py | BastianZim/xanadu-cloud-client | 3a2d9d5373d90339a1047ee939bacef1bd4019ed | [
"Apache-2.0"
] | 3 | 2021-11-12T21:44:35.000Z | 2022-02-10T15:07:20.000Z | xcc/util.py | BastianZim/xanadu-cloud-client | 3a2d9d5373d90339a1047ee939bacef1bd4019ed | [
"Apache-2.0"
] | 2 | 2021-11-08T16:39:56.000Z | 2022-01-20T14:47:29.000Z | xcc/util.py | BastianZim/xanadu-cloud-client | 3a2d9d5373d90339a1047ee939bacef1bd4019ed | [
"Apache-2.0"
] | 2 | 2021-12-01T19:07:05.000Z | 2022-01-20T14:26:47.000Z | """
This module contains utilities which are shared between other modules.
"""
from typing import Any, Callable
class cached_property: # pylint: disable=invalid-name
"""Descriptor that transforms a class method into a property whose value is
computed once and then cached for subsequent accesses.
Args:
func (Callable[[Any], Any]): class method whose value should be cached
.. note::
Each class instance is associated with an independent cache.
.. warning::
Unlike ``functools.cached_property``, this descriptor is *not* safe for
concurrent use.
"""
def __init__(self, func: Callable[[Any], Any]) -> None:
self.func = func
self.caches = {}
self.__doc__ = func.__doc__
def __get__(self, instance: Any, _) -> Any:
"""Returns the (cached) value associated with the given instance."""
# Edge case to support getattr() and generate Sphinx documentation.
if instance is None:
return self
if instance not in self.caches:
self.caches[instance] = self.func(instance)
return self.caches[instance]
def __set__(self, instance: Any, value: Any) -> None:
"""Sets the cache of the given instance to the provided value."""
self.caches[instance] = value
def __delete__(self, instance: Any) -> None:
"""Clears the cache of the given instance."""
self.caches.pop(instance, None)
| 31.191489 | 79 | 0.645975 |
from typing import Any, Callable
class cached_property:
def __init__(self, func: Callable[[Any], Any]) -> None:
self.func = func
self.caches = {}
self.__doc__ = func.__doc__
def __get__(self, instance: Any, _) -> Any:
if instance is None:
return self
if instance not in self.caches:
self.caches[instance] = self.func(instance)
return self.caches[instance]
def __set__(self, instance: Any, value: Any) -> None:
self.caches[instance] = value
def __delete__(self, instance: Any) -> None:
self.caches.pop(instance, None)
| true | true |
f73187898e8bb312c2b27b9f5d0d2fa9a2a19d87 | 79,267 | py | Python | flaml/model.py | rserran/FLAML | 7d6822aa40883550e72c4ee24adb765c6e937ce7 | [
"MIT"
] | null | null | null | flaml/model.py | rserran/FLAML | 7d6822aa40883550e72c4ee24adb765c6e937ce7 | [
"MIT"
] | null | null | null | flaml/model.py | rserran/FLAML | 7d6822aa40883550e72c4ee24adb765c6e937ce7 | [
"MIT"
] | null | null | null | # !
# * Copyright (c) FLAML authors. All rights reserved.
# * Licensed under the MIT License. See LICENSE file in the
# * project root for license information.
from contextlib import contextmanager
from functools import partial
import signal
import os
from typing import Callable, List
import numpy as np
import time
from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier
from sklearn.ensemble import ExtraTreesRegressor, ExtraTreesClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.dummy import DummyClassifier, DummyRegressor
from scipy.sparse import issparse
import logging
import shutil
from pandas import DataFrame, Series, to_datetime
import sys
import math
from . import tune
from .data import (
group_counts,
CLASSIFICATION,
TS_FORECASTREGRESSION,
TS_TIMESTAMP_COL,
TS_VALUE_COL,
SEQCLASSIFICATION,
SEQREGRESSION,
TOKENCLASSIFICATION,
SUMMARIZATION,
NLG_TASKS,
MULTICHOICECLASSIFICATION,
)
try:
import psutil
except ImportError:
psutil = None
try:
import resource
except ImportError:
resource = None
logger = logging.getLogger("flaml.automl")
FREE_MEM_RATIO = 0.2
def TimeoutHandler(sig, frame):
raise TimeoutError(sig, frame)
@contextmanager
def limit_resource(memory_limit, time_limit):
if memory_limit > 0:
soft, hard = resource.getrlimit(resource.RLIMIT_AS)
if soft < 0 and (hard < 0 or memory_limit <= hard) or memory_limit < soft:
try:
resource.setrlimit(resource.RLIMIT_AS, (int(memory_limit), hard))
except ValueError:
# According to https://bugs.python.org/issue40518, it's a mac-specific error.
pass
main_thread = False
if time_limit is not None:
try:
signal.signal(signal.SIGALRM, TimeoutHandler)
signal.alarm(int(time_limit) or 1)
main_thread = True
except ValueError:
pass
try:
yield
finally:
if main_thread:
signal.alarm(0)
if memory_limit > 0:
resource.setrlimit(resource.RLIMIT_AS, (soft, hard))
class BaseEstimator:
"""The abstract class for all learners.
Typical examples:
* XGBoostEstimator: for regression.
* XGBoostSklearnEstimator: for classification.
* LGBMEstimator, RandomForestEstimator, LRL1Classifier, LRL2Classifier:
for both regression and classification.
"""
def __init__(self, task="binary", **config):
"""Constructor.
Args:
task: A string of the task type, one of
'binary', 'multiclass', 'regression', 'rank', 'seq-classification',
'seq-regression', 'token-classification', 'multichoice-classification',
'summarization', 'ts_forecast', 'ts_forecast_classification'.
config: A dictionary containing the hyperparameter names, 'n_jobs' as keys.
n_jobs is the number of parallel threads.
"""
self._task = task
self.params = self.config2params(config)
self.estimator_class = self._model = None
if "_estimator_type" in config:
self._estimator_type = self.params.pop("_estimator_type")
else:
self._estimator_type = (
"classifier" if task in CLASSIFICATION else "regressor"
)
def get_params(self, deep=False):
params = self.params.copy()
params["task"] = self._task
if hasattr(self, "_estimator_type"):
params["_estimator_type"] = self._estimator_type
return params
@property
def classes_(self):
return self._model.classes_
@property
def n_features_in_(self):
return self._model.n_features_in_
@property
def model(self):
"""Trained model after fit() is called, or None before fit() is called."""
return self._model
@property
def estimator(self):
"""Trained model after fit() is called, or None before fit() is called."""
return self._model
def _preprocess(self, X):
return X
def _fit(self, X_train, y_train, **kwargs):
current_time = time.time()
if "groups" in kwargs:
kwargs = kwargs.copy()
groups = kwargs.pop("groups")
if self._task == "rank":
kwargs["group"] = group_counts(groups)
# groups_val = kwargs.get('groups_val')
# if groups_val is not None:
# kwargs['eval_group'] = [group_counts(groups_val)]
# kwargs['eval_set'] = [
# (kwargs['X_val'], kwargs['y_val'])]
# kwargs['verbose'] = False
# del kwargs['groups_val'], kwargs['X_val'], kwargs['y_val']
X_train = self._preprocess(X_train)
model = self.estimator_class(**self.params)
if logger.level == logging.DEBUG:
# xgboost 1.6 doesn't display all the params in the model str
logger.debug(f"flaml.model - {model} fit started with params {self.params}")
model.fit(X_train, y_train, **kwargs)
if logger.level == logging.DEBUG:
logger.debug(f"flaml.model - {model} fit finished")
train_time = time.time() - current_time
self._model = model
return train_time
def fit(self, X_train, y_train, budget=None, **kwargs):
"""Train the model from given training data.
Args:
X_train: A numpy array or a dataframe of training data in shape n*m.
y_train: A numpy array or a series of labels in shape n*1.
budget: A float of the time budget in seconds.
Returns:
train_time: A float of the training time in seconds.
"""
if (
getattr(self, "limit_resource", None)
and resource is not None
and (budget is not None or psutil is not None)
):
start_time = time.time()
mem = psutil.virtual_memory() if psutil is not None else None
try:
with limit_resource(
mem.available * (1 - FREE_MEM_RATIO)
+ psutil.Process(os.getpid()).memory_info().rss
if mem is not None
else -1,
budget,
):
train_time = self._fit(X_train, y_train, **kwargs)
except (MemoryError, TimeoutError) as e:
logger.warning(f"{e.__class__} {e}")
if self._task in CLASSIFICATION:
model = DummyClassifier()
else:
model = DummyRegressor()
X_train = self._preprocess(X_train)
model.fit(X_train, y_train)
self._model = model
train_time = time.time() - start_time
else:
train_time = self._fit(X_train, y_train, **kwargs)
return train_time
def predict(self, X, **kwargs):
"""Predict label from features.
Args:
X: A numpy array or a dataframe of featurized instances, shape n*m.
Returns:
A numpy array of shape n*1.
Each element is the label for a instance.
"""
if self._model is not None:
X = self._preprocess(X)
return self._model.predict(X)
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return np.ones(X.shape[0])
def predict_proba(self, X, **kwargs):
"""Predict the probability of each class from features.
Only works for classification problems
Args:
X: A numpy array of featurized instances, shape n*m.
Returns:
A numpy array of shape n*c. c is the # classes.
Each element at (i,j) is the probability for instance i to be in
class j.
"""
assert self._task in CLASSIFICATION, "predict_proba() only for classification."
X = self._preprocess(X)
return self._model.predict_proba(X)
def score(self, X_val: DataFrame, y_val: Series, **kwargs):
"""Report the evaluation score of a trained estimator.
Args:
X_val: A pandas dataframe of the validation input data.
y_val: A pandas series of the validation label.
kwargs: keyword argument of the evaluation function, for example:
- metric: A string of the metric name or a function
e.g., 'accuracy', 'roc_auc', 'roc_auc_ovr', 'roc_auc_ovo',
'f1', 'micro_f1', 'macro_f1', 'log_loss', 'mae', 'mse', 'r2',
'mape'. Default is 'auto'.
If metric is given, the score will report the user specified metric.
If metric is not given, the metric is set to accuracy for classification and r2
for regression.
You can also pass a customized metric function, for examples on how to pass a
customized metric function, please check
[test/nlp/test_autohf_custom_metric.py](https://github.com/microsoft/FLAML/blob/main/test/nlp/test_autohf_custom_metric.py) and
[test/automl/test_multiclass.py](https://github.com/microsoft/FLAML/blob/main/test/automl/test_multiclass.py).
Returns:
The evaluation score on the validation dataset.
"""
from .ml import metric_loss_score
from .ml import is_min_metric
if self._model is not None:
if self._task == "rank":
raise NotImplementedError(
"AutoML.score() is not implemented for ranking"
)
else:
X_val = self._preprocess(X_val)
metric = kwargs.get("metric", None)
if metric:
y_pred = self.predict(X_val, **kwargs)
if is_min_metric(metric):
return metric_loss_score(metric, y_pred, y_val)
else:
return 1.0 - metric_loss_score(metric, y_pred, y_val)
else:
return self._model.score(X_val, y_val, **kwargs)
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return 0.0
def cleanup(self):
del self._model
self._model = None
@classmethod
def search_space(cls, data_size, task, **params):
"""[required method] search space.
Args:
data_size: A tuple of two integers, number of rows and columns.
task: A str of the task type, e.g., "binary", "multiclass", "regression".
Returns:
A dictionary of the search space.
Each key is the name of a hyperparameter, and value is a dict with
its domain (required) and low_cost_init_value, init_value,
cat_hp_cost (if applicable).
e.g., ```{'domain': tune.randint(lower=1, upper=10), 'init_value': 1}```.
"""
return {}
@classmethod
def size(cls, config: dict) -> float:
"""[optional method] memory size of the estimator in bytes.
Args:
config: A dict of the hyperparameter config.
Returns:
A float of the memory size required by the estimator to train the
given config.
"""
return 1.0
@classmethod
def cost_relative2lgbm(cls) -> float:
"""[optional method] relative cost compared to lightgbm."""
return 1.0
@classmethod
def init(cls):
"""[optional method] initialize the class."""
pass
def config2params(self, config: dict) -> dict:
"""[optional method] config dict to params dict
Args:
config: A dict of the hyperparameter config.
Returns:
A dict that will be passed to self.estimator_class's constructor.
"""
params = config.copy()
if "FLAML_sample_size" in params:
params.pop("FLAML_sample_size")
return params
class TransformersEstimator(BaseEstimator):
"""The class for fine-tuning language models, using huggingface transformers API."""
ITER_HP = "global_max_steps"
def __init__(self, task="seq-classification", **config):
super().__init__(task, **config)
import uuid
self.trial_id = str(uuid.uuid1().hex)[:8]
if task not in NLG_TASKS: # TODO: not in NLG_TASKS
from .nlp.huggingface.training_args import (
TrainingArgumentsForAuto as TrainingArguments,
)
else:
from .nlp.huggingface.training_args import (
Seq2SeqTrainingArgumentsForAuto as TrainingArguments,
)
self._TrainingArguments = TrainingArguments
@staticmethod
def _join(X_train, y_train, task):
y_train = DataFrame(y_train, index=X_train.index)
y_train.columns = ["label"] if task != TOKENCLASSIFICATION else ["labels"]
train_df = X_train.join(y_train)
return train_df
@classmethod
def search_space(cls, data_size, task, **params):
search_space_dict = {
"learning_rate": {
"domain": tune.loguniform(lower=1e-6, upper=1e-3),
"init_value": 1e-5,
},
"num_train_epochs": {
"domain": tune.loguniform(lower=0.1, upper=10.0),
"init_value": 3.0, # to be consistent with roberta
},
"per_device_train_batch_size": {
"domain": tune.choice([4, 8, 16, 32]),
"init_value": 32,
},
"warmup_ratio": {
"domain": tune.uniform(lower=0.0, upper=0.3),
"init_value": 0.0,
},
"weight_decay": {
"domain": tune.uniform(lower=0.0, upper=0.3),
"init_value": 0.0,
},
"adam_epsilon": {
"domain": tune.loguniform(lower=1e-8, upper=1e-6),
"init_value": 1e-6,
},
"seed": {"domain": tune.choice(list(range(40, 45))), "init_value": 42},
"global_max_steps": {
"domain": sys.maxsize,
"init_value": sys.maxsize,
},
}
return search_space_dict
@property
def checkpoint_freq(self):
return (
int(
min(self._training_args.num_train_epochs, 1)
* len(self._X_train)
/ self._training_args.per_device_train_batch_size
/ self._training_args.ckpt_per_epoch
)
+ 1
)
@property
def fp16(self):
return self._kwargs.get("gpu_per_trial") and self._training_args.fp16
@property
def no_cuda(self):
return not self._kwargs.get("gpu_per_trial")
def _set_training_args(self, **kwargs):
from .nlp.utils import date_str, Counter
for (key, val) in kwargs.items():
assert key not in self.params, (
"Since {} is in the search space, it cannot exist in 'custom_fit_kwargs' at the same time."
"If you need to fix the value of {} to {}, the only way is to add a single-value domain in the search "
"space by adding:\n '{}': {{ 'domain': {} }} to 'custom_hp'. For example:"
'automl_settings["custom_hp"] = {{ "transformer": {{ "model_path": {{ "domain" : '
'"google/electra-small-discriminator" }} }} }}'.format(
key, key, val, key, val
)
)
"""
If use has specified any custom args for TrainingArguments, update these arguments
"""
self._training_args = self._TrainingArguments(**kwargs)
"""
Update the attributes in TrainingArguments with self.params values
"""
for key, val in self.params.items():
if hasattr(self._training_args, key):
setattr(self._training_args, key, val)
"""
Update the attributes in TrainingArguments that depends on the values of self.params
"""
local_dir = os.path.join(
self._training_args.output_dir, "train_{}".format(date_str())
)
if self._use_ray is True:
import ray
self._training_args.output_dir = ray.tune.get_trial_dir()
else:
self._training_args.output_dir = Counter.get_trial_fold_name(
local_dir, self.params, self.trial_id
)
self._training_args.eval_steps = (
self._training_args.logging_steps
) = self._training_args.saving_steps = self.checkpoint_freq
self._training_args.fp16 = self.fp16
self._training_args.no_cuda = self.no_cuda
def _preprocess(self, X, y=None, **kwargs):
from .nlp.utils import tokenize_text, is_a_list_of_str
is_str = str(X.dtypes[0]) in ("string", "str")
is_list_of_str = is_a_list_of_str(X[list(X.keys())[0]].to_list()[0])
if is_str or is_list_of_str:
return tokenize_text(
X=X,
Y=y,
task=self._task,
hf_args=self._training_args,
tokenizer=self.tokenizer,
)
else:
return X, None
def _model_init(self):
from .nlp.utils import load_model
this_model = load_model(
checkpoint_path=self._training_args.model_path,
task=self._task,
num_labels=self.num_labels,
)
return this_model
def preprocess_data(self, X, y):
from datasets import Dataset
if (self._task not in NLG_TASKS) and (self._task != TOKENCLASSIFICATION):
processed_X, _ = self._preprocess(X=X, **self._kwargs)
processed_y = y
else:
processed_X, processed_y = self._preprocess(X=X, y=y, **self._kwargs)
processed_dataset = Dataset.from_pandas(
TransformersEstimator._join(processed_X, processed_y, self._task)
)
return processed_dataset, processed_X, processed_y
@property
def num_labels(self):
from .data import SEQCLASSIFICATION, SEQREGRESSION, TOKENCLASSIFICATION
if self._task == SEQREGRESSION:
return 1
elif self._task == SEQCLASSIFICATION:
return len(set(self._y_train))
elif self._task == TOKENCLASSIFICATION:
return len(set([a for b in self._y_train.tolist() for a in b]))
else:
return None
@property
def tokenizer(self):
from transformers import AutoTokenizer
if self._task == SUMMARIZATION:
return AutoTokenizer.from_pretrained(
pretrained_model_name_or_path=self._training_args.model_path,
cache_dir=None,
use_fast=True,
revision="main",
use_auth_token=None,
)
else:
return AutoTokenizer.from_pretrained(
self._training_args.model_path,
use_fast=True,
add_prefix_space=True
if "roberta" in self._training_args.model_path
else False, # If roberta model, must set add_prefix_space to True to avoid the assertion error at
# https://github.com/huggingface/transformers/blob/main/src/transformers/models/roberta/tokenization_roberta_fast.py#L249
)
@property
def data_collator(self):
from .nlp.huggingface.data_collator import task_to_datacollator_class
return (
task_to_datacollator_class[self._task](
tokenizer=self.tokenizer,
pad_to_multiple_of=8, # if self._training_args.fp16 else None,
)
if self._task in (MULTICHOICECLASSIFICATION, TOKENCLASSIFICATION)
else None
)
def fit(
self,
X_train: DataFrame,
y_train: Series,
budget=None,
X_val=None,
y_val=None,
gpu_per_trial=None,
metric=None,
**kwargs,
):
import transformers
transformers.logging.set_verbosity_error()
from transformers import TrainerCallback
from transformers.trainer_utils import set_seed
from .nlp.huggingface.trainer import TrainerForAuto
try:
from ray.tune import is_session_enabled
self._use_ray = is_session_enabled()
except ImportError:
self._use_ray = False
this_params = self.params
self._kwargs = kwargs
self._X_train, self._y_train = X_train, y_train
self._set_training_args(**kwargs)
train_dataset, self._X_train, self._y_train = self.preprocess_data(
X_train, y_train
)
if X_val is not None:
eval_dataset, self._X_val, self._y_val = self.preprocess_data(X_val, y_val)
else:
eval_dataset, self._X_val, self._y_val = None, None, None
set_seed(self.params.get("seed", self._training_args.seed))
self._metric = metric
class EarlyStoppingCallbackForAuto(TrainerCallback):
def on_train_begin(self, args, state, control, **callback_kwargs):
self.train_begin_time = time.time()
def on_step_begin(self, args, state, control, **callback_kwargs):
self.step_begin_time = time.time()
def on_step_end(self, args, state, control, **callback_kwargs):
if state.global_step == 1:
self.time_per_iter = time.time() - self.step_begin_time
if (
budget
and (
time.time() + self.time_per_iter
> self.train_begin_time + budget
)
or state.global_step >= this_params[TransformersEstimator.ITER_HP]
):
control.should_training_stop = True
control.should_save = True
control.should_evaluate = True
return control
def on_epoch_end(self, args, state, control, **callback_kwargs):
if (
control.should_training_stop
or state.epoch + 1 >= args.num_train_epochs
):
control.should_save = True
control.should_evaluate = True
self._trainer = TrainerForAuto(
args=self._training_args,
model_init=self._model_init,
train_dataset=train_dataset,
eval_dataset=eval_dataset,
tokenizer=self.tokenizer,
data_collator=self.data_collator,
compute_metrics=self._compute_metrics_by_dataset_name,
callbacks=[EarlyStoppingCallbackForAuto],
)
if self._task in NLG_TASKS:
setattr(self._trainer, "_is_seq2seq", True)
"""
When not using ray for tuning, set the limit of CUDA_VISIBLE_DEVICES to math.ceil(gpu_per_trial),
so each estimator does not see all the GPUs
"""
if gpu_per_trial is not None:
tmp_cuda_visible_devices = os.environ.get("CUDA_VISIBLE_DEVICES", "")
self._trainer.args._n_gpu = gpu_per_trial
# if gpu_per_trial == 0:
# os.environ["CUDA_VISIBLE_DEVICES"] = ""
if tmp_cuda_visible_devices.count(",") != math.ceil(gpu_per_trial) - 1:
os.environ["CUDA_VISIBLE_DEVICES"] = ",".join(
[str(x) for x in range(math.ceil(gpu_per_trial))]
)
import time
start_time = time.time()
self._trainer.train()
if gpu_per_trial is not None:
os.environ["CUDA_VISIBLE_DEVICES"] = tmp_cuda_visible_devices
self.params[self.ITER_HP] = self._trainer.state.global_step
self._checkpoint_path = self._select_checkpoint(self._trainer)
self._ckpt_remains = list(self._trainer.ckpt_to_metric.keys())
if hasattr(self._trainer, "intermediate_results"):
self.intermediate_results = [
x[1]
for x in sorted(
self._trainer.intermediate_results.items(), key=lambda x: x[0]
)
]
self._trainer = None
return time.time() - start_time
def _delete_one_ckpt(self, ckpt_location):
if self._use_ray is False:
try:
shutil.rmtree(ckpt_location)
except FileNotFoundError:
logger.warning("checkpoint {} not found".format(ckpt_location))
def cleanup(self):
super().cleanup()
if hasattr(self, "_ckpt_remains"):
for each_ckpt in self._ckpt_remains:
self._delete_one_ckpt(each_ckpt)
def _select_checkpoint(self, trainer):
from transformers.trainer_utils import PREFIX_CHECKPOINT_DIR
if trainer.ckpt_to_metric:
best_ckpt, _ = min(
trainer.ckpt_to_metric.items(), key=lambda x: x[1]["eval_loss"]
)
best_ckpt_global_step = trainer.ckpt_to_global_step[best_ckpt]
for each_ckpt in list(trainer.ckpt_to_metric):
if each_ckpt != best_ckpt:
del trainer.ckpt_to_metric[each_ckpt]
del trainer.ckpt_to_global_step[each_ckpt]
self._delete_one_ckpt(each_ckpt)
else:
best_ckpt_global_step = trainer.state.global_step
best_ckpt = os.path.join(
trainer.args.output_dir,
f"{PREFIX_CHECKPOINT_DIR}-{best_ckpt_global_step}",
)
self.params[self.ITER_HP] = best_ckpt_global_step
logger.debug(trainer.state.global_step)
logger.debug(trainer.ckpt_to_global_step)
return best_ckpt
def _compute_metrics_by_dataset_name(self, eval_pred):
if isinstance(self._metric, str):
from .ml import metric_loss_score
from .nlp.utils import postprocess_text
predictions, labels = eval_pred
if self._task in NLG_TASKS:
if isinstance(predictions, tuple):
predictions = np.argmax(predictions[0], axis=2)
decoded_preds = self.tokenizer.batch_decode(
predictions, skip_special_tokens=True
)
labels = np.where(labels != -100, labels, self.tokenizer.pad_token_id)
decoded_labels = self.tokenizer.batch_decode(
labels, skip_special_tokens=True
)
predictions, labels = postprocess_text(decoded_preds, decoded_labels)
else:
predictions = (
np.squeeze(predictions)
if self._task == SEQREGRESSION
else np.argmax(predictions, axis=2)
if self._task == TOKENCLASSIFICATION
else np.argmax(predictions, axis=1)
)
metric_dict = {
"automl_metric": metric_loss_score(
metric_name=self._metric,
y_predict=predictions,
y_true=labels,
labels=self._training_args.label_list,
)
}
else:
loss, metric_dict = self._metric(
X_test=self._X_val,
y_test=self._y_val,
estimator=self,
labels=None,
X_train=self._X_train,
y_train=self._y_train,
)
metric_dict["automl_metric"] = loss
return metric_dict
def _init_model_for_predict(self):
from .nlp.huggingface.trainer import TrainerForAuto
"""
Need to reinit training_args because of a bug in deepspeed: if not reinit, the deepspeed config will be inconsistent
with HF config https://github.com/huggingface/transformers/blob/main/src/transformers/training_args.py#L947
"""
training_args = self._TrainingArguments(
local_rank=-1, model_path=self._checkpoint_path, fp16=self.fp16
)
for key, val in self._training_args.__dict__.items():
if key not in ("local_rank", "model_path", "fp16"):
setattr(training_args, key, val)
self._training_args = training_args
new_trainer = TrainerForAuto(
model=self._model_init(),
args=self._training_args,
data_collator=self.data_collator,
compute_metrics=self._compute_metrics_by_dataset_name,
)
if self._task in NLG_TASKS:
setattr(new_trainer, "_is_seq2seq", True)
return new_trainer
def predict_proba(self, X, **pred_kwargs):
from datasets import Dataset
if pred_kwargs:
for key, val in pred_kwargs.items():
setattr(self._training_args, key, val)
assert (
self._task in CLASSIFICATION
), "predict_proba() only for classification tasks."
X_test, _ = self._preprocess(X, **self._kwargs)
test_dataset = Dataset.from_pandas(X_test)
new_trainer = self._init_model_for_predict()
predictions = new_trainer.predict(test_dataset)
return predictions.predictions
def score(self, X_val: DataFrame, y_val: Series, **kwargs):
import transformers
transformers.logging.set_verbosity_error()
self._metric = kwargs["metric"]
eval_dataset, X_val, y_val = self.preprocess_data(X_val, y_val)
new_trainer = self._init_model_for_predict()
return new_trainer.evaluate(eval_dataset)
def predict(self, X, **pred_kwargs):
import transformers
from datasets import Dataset
transformers.logging.set_verbosity_error()
if pred_kwargs:
for key, val in pred_kwargs.items():
setattr(self._training_args, key, val)
X_test, _ = self._preprocess(X, **self._kwargs)
test_dataset = Dataset.from_pandas(X_test)
new_trainer = self._init_model_for_predict()
if self._task not in NLG_TASKS:
predictions = new_trainer.predict(test_dataset)
else:
predictions = new_trainer.predict(
test_dataset,
metric_key_prefix="predict",
)
if self._task == SEQCLASSIFICATION:
return np.argmax(predictions.predictions, axis=1)
elif self._task == SEQREGRESSION:
return predictions.predictions.reshape((len(predictions.predictions),))
elif self._task == TOKENCLASSIFICATION:
return np.argmax(predictions.predictions, axis=2)
elif self._task == SUMMARIZATION:
decoded_preds = self.tokenizer.batch_decode(
predictions.predictions, skip_special_tokens=True
)
return decoded_preds
elif self._task == MULTICHOICECLASSIFICATION:
return np.argmax(predictions.predictions, axis=1)
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params[TransformersEstimator.ITER_HP] = params.get(
TransformersEstimator.ITER_HP, sys.maxsize
)
return params
class TransformersEstimatorModelSelection(TransformersEstimator):
def __init__(self, task="seq-classification", **config):
super().__init__(task, **config)
@classmethod
def search_space(cls, data_size, task, **params):
search_space_dict = TransformersEstimator.search_space(
data_size, task, **params
)
"""
For model selection, use the same search space regardless of memory constraint
If OOM, user should change the search space themselves
"""
search_space_dict["model_path"] = {
"domain": tune.choice(
[
"google/electra-base-discriminator",
"bert-base-uncased",
"roberta-base",
"facebook/muppet-roberta-base",
"google/electra-small-discriminator",
]
),
"init_value": "facebook/muppet-roberta-base",
}
return search_space_dict
class SKLearnEstimator(BaseEstimator):
"""The base class for tuning scikit-learn estimators."""
def __init__(self, task="binary", **config):
super().__init__(task, **config)
def _preprocess(self, X):
if isinstance(X, DataFrame):
cat_columns = X.select_dtypes(include=["category"]).columns
if not cat_columns.empty:
X = X.copy()
X[cat_columns] = X[cat_columns].apply(lambda x: x.cat.codes)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
# numpy array is not of numeric dtype
X = DataFrame(X)
for col in X.columns:
if isinstance(X[col][0], str):
X[col] = X[col].astype("category").cat.codes
X = X.to_numpy()
return X
class LGBMEstimator(BaseEstimator):
"""The class for tuning LGBM, using sklearn API."""
ITER_HP = "n_estimators"
HAS_CALLBACK = True
DEFAULT_ITER = 100
@classmethod
def search_space(cls, data_size, **params):
upper = max(5, min(32768, int(data_size[0]))) # upper must be larger than lower
return {
"n_estimators": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"num_leaves": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"min_child_samples": {
"domain": tune.lograndint(lower=2, upper=2**7 + 1),
"init_value": 20,
},
"learning_rate": {
"domain": tune.loguniform(lower=1 / 1024, upper=1.0),
"init_value": 0.1,
},
"log_max_bin": { # log transformed with base 2
"domain": tune.lograndint(lower=3, upper=11),
"init_value": 8,
},
"colsample_bytree": {
"domain": tune.uniform(lower=0.01, upper=1.0),
"init_value": 1.0,
},
"reg_alpha": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1 / 1024,
},
"reg_lambda": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1.0,
},
}
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
if "log_max_bin" in params:
params["max_bin"] = (1 << params.pop("log_max_bin")) - 1
return params
@classmethod
def size(cls, config):
num_leaves = int(
round(
config.get("num_leaves")
or config.get("max_leaves")
or 1 << config.get("max_depth", 16)
)
)
n_estimators = int(round(config["n_estimators"]))
return (num_leaves * 3 + (num_leaves - 1) * 4 + 1.0) * n_estimators * 8
def __init__(self, task="binary", **config):
super().__init__(task, **config)
if "verbose" not in self.params:
self.params["verbose"] = -1
if "regression" == task:
from lightgbm import LGBMRegressor
self.estimator_class = LGBMRegressor
elif "rank" == task:
from lightgbm import LGBMRanker
self.estimator_class = LGBMRanker
else:
from lightgbm import LGBMClassifier
self.estimator_class = LGBMClassifier
self._time_per_iter = None
self._train_size = 0
self._mem_per_iter = -1
self.HAS_CALLBACK = self.HAS_CALLBACK and self._callbacks(0, 0) is not None
def _preprocess(self, X):
if (
not isinstance(X, DataFrame)
and issparse(X)
and np.issubdtype(X.dtype, np.integer)
):
X = X.astype(float)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
# numpy array is not of numeric dtype
X = DataFrame(X)
for col in X.columns:
if isinstance(X[col][0], str):
X[col] = X[col].astype("category").cat.codes
X = X.to_numpy()
return X
def fit(self, X_train, y_train, budget=None, **kwargs):
start_time = time.time()
deadline = start_time + budget if budget else np.inf
n_iter = self.params.get(self.ITER_HP, self.DEFAULT_ITER)
trained = False
if not self.HAS_CALLBACK:
mem0 = psutil.virtual_memory().available if psutil is not None else 1
if (
(
not self._time_per_iter
or abs(self._train_size - X_train.shape[0]) > 4
)
and budget is not None
or self._mem_per_iter < 0
and psutil is not None
) and n_iter > 1:
self.params[self.ITER_HP] = 1
self._t1 = self._fit(X_train, y_train, **kwargs)
if budget is not None and self._t1 >= budget or n_iter == 1:
return self._t1
mem1 = psutil.virtual_memory().available if psutil is not None else 1
self._mem1 = mem0 - mem1
self.params[self.ITER_HP] = min(n_iter, 4)
self._t2 = self._fit(X_train, y_train, **kwargs)
mem2 = psutil.virtual_memory().available if psutil is not None else 1
self._mem2 = max(mem0 - mem2, self._mem1)
# if self._mem1 <= 0:
# self._mem_per_iter = self._mem2 / (self.params[self.ITER_HP] + 1)
# elif self._mem2 <= 0:
# self._mem_per_iter = self._mem1
# else:
self._mem_per_iter = min(
self._mem1, self._mem2 / self.params[self.ITER_HP]
)
# if self._mem_per_iter <= 1 and psutil is not None:
# n_iter = self.params[self.ITER_HP]
self._time_per_iter = (
(self._t2 - self._t1) / (self.params[self.ITER_HP] - 1)
if self._t2 > self._t1
else self._t1
if self._t1
else 0.001
)
self._train_size = X_train.shape[0]
if (
budget is not None
and self._t1 + self._t2 >= budget
or n_iter == self.params[self.ITER_HP]
):
# self.params[self.ITER_HP] = n_iter
return time.time() - start_time
trained = True
# logger.debug(mem0)
# logger.debug(self._mem_per_iter)
if n_iter > 1:
max_iter = min(
n_iter,
int(
(budget - time.time() + start_time - self._t1)
/ self._time_per_iter
+ 1
)
if budget is not None
else n_iter,
int((1 - FREE_MEM_RATIO) * mem0 / self._mem_per_iter)
if psutil is not None and self._mem_per_iter > 0
else n_iter,
)
if trained and max_iter <= self.params[self.ITER_HP]:
return time.time() - start_time
# when not trained, train at least one iter
self.params[self.ITER_HP] = max(max_iter, 1)
if self.HAS_CALLBACK:
kwargs_callbacks = kwargs.get("callbacks")
if kwargs_callbacks:
callbacks = kwargs_callbacks + self._callbacks(start_time, deadline)
kwargs.pop("callbacks")
else:
callbacks = self._callbacks(start_time, deadline)
if isinstance(self, XGBoostSklearnEstimator):
from xgboost import __version__
if __version__ >= "1.6.0":
# since xgboost>=1.6.0, callbacks can't be passed in fit()
self.params["callbacks"] = callbacks
callbacks = None
self._fit(
X_train,
y_train,
callbacks=callbacks,
**kwargs,
)
if callbacks is None:
# for xgboost>=1.6.0, pop callbacks to enable pickle
callbacks = self.params.pop("callbacks")
self._model.set_params(callbacks=callbacks[:-1])
best_iteration = (
self._model.get_booster().best_iteration
if isinstance(self, XGBoostSklearnEstimator)
else self._model.best_iteration_
)
if best_iteration is not None:
self._model.set_params(n_estimators=best_iteration + 1)
else:
self._fit(X_train, y_train, **kwargs)
train_time = time.time() - start_time
return train_time
def _callbacks(self, start_time, deadline) -> List[Callable]:
return [partial(self._callback, start_time, deadline)]
def _callback(self, start_time, deadline, env) -> None:
from lightgbm.callback import EarlyStopException
now = time.time()
if env.iteration == 0:
self._time_per_iter = now - start_time
if now + self._time_per_iter > deadline:
raise EarlyStopException(env.iteration, env.evaluation_result_list)
if psutil is not None:
mem = psutil.virtual_memory()
if mem.available / mem.total < FREE_MEM_RATIO:
raise EarlyStopException(env.iteration, env.evaluation_result_list)
class XGBoostEstimator(SKLearnEstimator):
"""The class for tuning XGBoost regressor, not using sklearn API."""
DEFAULT_ITER = 10
@classmethod
def search_space(cls, data_size, **params):
upper = max(5, min(32768, int(data_size[0]))) # upper must be larger than lower
return {
"n_estimators": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"max_leaves": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"max_depth": {
"domain": tune.choice([0, 6, 12]),
"init_value": 0,
},
"min_child_weight": {
"domain": tune.loguniform(lower=0.001, upper=128),
"init_value": 1.0,
},
"learning_rate": {
"domain": tune.loguniform(lower=1 / 1024, upper=1.0),
"init_value": 0.1,
},
"subsample": {
"domain": tune.uniform(lower=0.1, upper=1.0),
"init_value": 1.0,
},
"colsample_bylevel": {
"domain": tune.uniform(lower=0.01, upper=1.0),
"init_value": 1.0,
},
"colsample_bytree": {
"domain": tune.uniform(lower=0.01, upper=1.0),
"init_value": 1.0,
},
"reg_alpha": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1 / 1024,
},
"reg_lambda": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1.0,
},
}
@classmethod
def size(cls, config):
return LGBMEstimator.size(config)
@classmethod
def cost_relative2lgbm(cls):
return 1.6
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
max_depth = params["max_depth"] = params.get("max_depth", 0)
if max_depth == 0:
params["grow_policy"] = params.get("grow_policy", "lossguide")
params["tree_method"] = params.get("tree_method", "hist")
# params["booster"] = params.get("booster", "gbtree")
params["use_label_encoder"] = params.get("use_label_encoder", False)
if "n_jobs" in config:
params["nthread"] = params.pop("n_jobs")
return params
def __init__(
self,
task="regression",
**config,
):
super().__init__(task, **config)
self.params["verbosity"] = 0
def fit(self, X_train, y_train, budget=None, **kwargs):
import xgboost as xgb
start_time = time.time()
deadline = start_time + budget if budget else np.inf
if issparse(X_train):
if xgb.__version__ < "1.6.0":
# "auto" fails for sparse input since xgboost 1.6.0
self.params["tree_method"] = "auto"
else:
X_train = self._preprocess(X_train)
if "sample_weight" in kwargs:
dtrain = xgb.DMatrix(X_train, label=y_train, weight=kwargs["sample_weight"])
else:
dtrain = xgb.DMatrix(X_train, label=y_train)
objective = self.params.get("objective")
if isinstance(objective, str):
obj = None
else:
obj = objective
if "objective" in self.params:
del self.params["objective"]
_n_estimators = self.params.pop("n_estimators")
callbacks = XGBoostEstimator._callbacks(start_time, deadline)
if callbacks:
self._model = xgb.train(
self.params,
dtrain,
_n_estimators,
obj=obj,
callbacks=callbacks,
)
self.params["n_estimators"] = self._model.best_iteration + 1
else:
self._model = xgb.train(self.params, dtrain, _n_estimators, obj=obj)
self.params["n_estimators"] = _n_estimators
self.params["objective"] = objective
del dtrain
train_time = time.time() - start_time
return train_time
def predict(self, X, **kwargs):
import xgboost as xgb
if not issparse(X):
X = self._preprocess(X)
dtest = xgb.DMatrix(X)
return super().predict(dtest)
@classmethod
def _callbacks(cls, start_time, deadline):
try:
from xgboost.callback import TrainingCallback
except ImportError: # for xgboost<1.3
return None
class ResourceLimit(TrainingCallback):
def after_iteration(self, model, epoch, evals_log) -> bool:
now = time.time()
if epoch == 0:
self._time_per_iter = now - start_time
if now + self._time_per_iter > deadline:
return True
if psutil is not None:
mem = psutil.virtual_memory()
if mem.available / mem.total < FREE_MEM_RATIO:
return True
return False
return [ResourceLimit()]
class XGBoostSklearnEstimator(SKLearnEstimator, LGBMEstimator):
"""The class for tuning XGBoost with unlimited depth, using sklearn API."""
DEFAULT_ITER = 10
@classmethod
def search_space(cls, data_size, **params):
space = XGBoostEstimator.search_space(data_size)
space.pop("max_depth")
return space
@classmethod
def cost_relative2lgbm(cls):
return XGBoostEstimator.cost_relative2lgbm()
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
max_depth = params["max_depth"] = params.get("max_depth", 0)
if max_depth == 0:
params["grow_policy"] = params.get("grow_policy", "lossguide")
params["tree_method"] = params.get("tree_method", "hist")
params["use_label_encoder"] = params.get("use_label_encoder", False)
return params
def __init__(
self,
task="binary",
**config,
):
super().__init__(task, **config)
del self.params["verbose"]
self.params["verbosity"] = 0
import xgboost as xgb
self.estimator_class = xgb.XGBRegressor
if "rank" == task:
self.estimator_class = xgb.XGBRanker
elif task in CLASSIFICATION:
self.estimator_class = xgb.XGBClassifier
self._xgb_version = xgb.__version__
def fit(self, X_train, y_train, budget=None, **kwargs):
if issparse(X_train) and self._xgb_version < "1.6.0":
# "auto" fails for sparse input since xgboost 1.6.0
self.params["tree_method"] = "auto"
if kwargs.get("gpu_per_trial"):
self.params["tree_method"] = "gpu_hist"
kwargs.pop("gpu_per_trial")
return super().fit(X_train, y_train, budget, **kwargs)
def _callbacks(self, start_time, deadline) -> List[Callable]:
return XGBoostEstimator._callbacks(start_time, deadline)
class XGBoostLimitDepthEstimator(XGBoostSklearnEstimator):
"""The class for tuning XGBoost with limited depth, using sklearn API."""
@classmethod
def search_space(cls, data_size, **params):
space = XGBoostEstimator.search_space(data_size)
space.pop("max_leaves")
upper = max(6, int(np.log2(data_size[0])))
space["max_depth"] = {
"domain": tune.randint(lower=1, upper=min(upper, 16)),
"init_value": 6,
"low_cost_init_value": 1,
}
space["learning_rate"]["init_value"] = 0.3
space["n_estimators"]["init_value"] = 10
return space
@classmethod
def cost_relative2lgbm(cls):
return 64
class RandomForestEstimator(SKLearnEstimator, LGBMEstimator):
"""The class for tuning Random Forest."""
HAS_CALLBACK = False
nrows = 101
@classmethod
def search_space(cls, data_size, task, **params):
RandomForestEstimator.nrows = int(data_size[0])
upper = min(2048, RandomForestEstimator.nrows)
init = 1 / np.sqrt(data_size[1]) if task in CLASSIFICATION else 1
lower = min(0.1, init)
space = {
"n_estimators": {
"domain": tune.lograndint(lower=4, upper=max(5, upper)),
"init_value": 4,
"low_cost_init_value": 4,
},
"max_features": {
"domain": tune.loguniform(lower=lower, upper=1.0),
"init_value": init,
},
"max_leaves": {
"domain": tune.lograndint(
lower=4,
upper=max(5, min(32768, RandomForestEstimator.nrows >> 1)), #
),
"init_value": 4,
"low_cost_init_value": 4,
},
}
if task in CLASSIFICATION:
space["criterion"] = {
"domain": tune.choice(["gini", "entropy"]),
# "init_value": "gini",
}
return space
@classmethod
def cost_relative2lgbm(cls):
return 2
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
if "max_leaves" in params:
params["max_leaf_nodes"] = params.get(
"max_leaf_nodes", params.pop("max_leaves")
)
if self._task not in CLASSIFICATION and "criterion" in config:
params.pop("criterion")
return params
def __init__(
self,
task="binary",
**params,
):
super().__init__(task, **params)
self.params["verbose"] = 0
self.estimator_class = RandomForestRegressor
if task in CLASSIFICATION:
self.estimator_class = RandomForestClassifier
class ExtraTreesEstimator(RandomForestEstimator):
"""The class for tuning Extra Trees."""
@classmethod
def cost_relative2lgbm(cls):
return 1.9
def __init__(self, task="binary", **params):
super().__init__(task, **params)
if "regression" in task:
self.estimator_class = ExtraTreesRegressor
else:
self.estimator_class = ExtraTreesClassifier
class LRL1Classifier(SKLearnEstimator):
"""The class for tuning Logistic Regression with L1 regularization."""
@classmethod
def search_space(cls, **params):
return {
"C": {
"domain": tune.loguniform(lower=0.03125, upper=32768.0),
"init_value": 1.0,
},
}
@classmethod
def cost_relative2lgbm(cls):
return 160
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["tol"] = params.get("tol", 0.0001)
params["solver"] = params.get("solver", "saga")
params["penalty"] = params.get("penalty", "l1")
return params
def __init__(self, task="binary", **config):
super().__init__(task, **config)
assert task in CLASSIFICATION, "LogisticRegression for classification task only"
self.estimator_class = LogisticRegression
class LRL2Classifier(SKLearnEstimator):
"""The class for tuning Logistic Regression with L2 regularization."""
limit_resource = True
@classmethod
def search_space(cls, **params):
return LRL1Classifier.search_space(**params)
@classmethod
def cost_relative2lgbm(cls):
return 25
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["tol"] = params.get("tol", 0.0001)
params["solver"] = params.get("solver", "lbfgs")
params["penalty"] = params.get("penalty", "l2")
return params
def __init__(self, task="binary", **config):
super().__init__(task, **config)
assert task in CLASSIFICATION, "LogisticRegression for classification task only"
self.estimator_class = LogisticRegression
class CatBoostEstimator(BaseEstimator):
"""The class for tuning CatBoost."""
ITER_HP = "n_estimators"
DEFAULT_ITER = 1000
@classmethod
def search_space(cls, data_size, **params):
upper = max(min(round(1500000 / data_size[0]), 150), 12)
return {
"early_stopping_rounds": {
"domain": tune.lograndint(lower=10, upper=upper),
"init_value": 10,
"low_cost_init_value": 10,
},
"learning_rate": {
"domain": tune.loguniform(lower=0.005, upper=0.2),
"init_value": 0.1,
},
"n_estimators": {
"domain": 8192,
"init_value": 8192,
},
}
@classmethod
def size(cls, config):
n_estimators = config.get("n_estimators", 8192)
max_leaves = 64
return (max_leaves * 3 + (max_leaves - 1) * 4 + 1.0) * n_estimators * 8
@classmethod
def cost_relative2lgbm(cls):
return 15
def _preprocess(self, X):
if isinstance(X, DataFrame):
cat_columns = X.select_dtypes(include=["category"]).columns
if not cat_columns.empty:
X = X.copy()
X[cat_columns] = X[cat_columns].apply(
lambda x: x.cat.rename_categories(
[
str(c) if isinstance(c, float) else c
for c in x.cat.categories
]
)
)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
# numpy array is not of numeric dtype
X = DataFrame(X)
for col in X.columns:
if isinstance(X[col][0], str):
X[col] = X[col].astype("category").cat.codes
X = X.to_numpy()
return X
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["n_estimators"] = params.get("n_estimators", 8192)
if "n_jobs" in params:
params["thread_count"] = params.pop("n_jobs")
return params
def __init__(
self,
task="binary",
**config,
):
super().__init__(task, **config)
self.params.update(
{
"verbose": config.get("verbose", False),
"random_seed": config.get("random_seed", 10242048),
}
)
from catboost import CatBoostRegressor
self.estimator_class = CatBoostRegressor
if task in CLASSIFICATION:
from catboost import CatBoostClassifier
self.estimator_class = CatBoostClassifier
def fit(self, X_train, y_train, budget=None, **kwargs):
start_time = time.time()
deadline = start_time + budget if budget else np.inf
train_dir = f"catboost_{str(start_time)}"
X_train = self._preprocess(X_train)
if isinstance(X_train, DataFrame):
cat_features = list(X_train.select_dtypes(include="category").columns)
else:
cat_features = []
n = max(int(len(y_train) * 0.9), len(y_train) - 1000)
X_tr, y_tr = X_train[:n], y_train[:n]
if "sample_weight" in kwargs:
weight = kwargs["sample_weight"]
if weight is not None:
kwargs["sample_weight"] = weight[:n]
else:
weight = None
from catboost import Pool, __version__
model = self.estimator_class(train_dir=train_dir, **self.params)
if __version__ >= "0.26":
model.fit(
X_tr,
y_tr,
cat_features=cat_features,
eval_set=Pool(
data=X_train[n:], label=y_train[n:], cat_features=cat_features
),
callbacks=CatBoostEstimator._callbacks(start_time, deadline),
**kwargs,
)
else:
model.fit(
X_tr,
y_tr,
cat_features=cat_features,
eval_set=Pool(
data=X_train[n:], label=y_train[n:], cat_features=cat_features
),
**kwargs,
)
shutil.rmtree(train_dir, ignore_errors=True)
if weight is not None:
kwargs["sample_weight"] = weight
self._model = model
self.params[self.ITER_HP] = self._model.tree_count_
train_time = time.time() - start_time
return train_time
@classmethod
def _callbacks(cls, start_time, deadline):
class ResourceLimit:
def after_iteration(self, info) -> bool:
now = time.time()
if info.iteration == 1:
self._time_per_iter = now - start_time
if now + self._time_per_iter > deadline:
return False
if psutil is not None:
mem = psutil.virtual_memory()
if mem.available / mem.total < FREE_MEM_RATIO:
return False
return True # can continue
return [ResourceLimit()]
class KNeighborsEstimator(BaseEstimator):
@classmethod
def search_space(cls, data_size, **params):
upper = min(512, int(data_size[0] / 2))
return {
"n_neighbors": {
"domain": tune.lograndint(lower=1, upper=max(2, upper)),
"init_value": 5,
"low_cost_init_value": 1,
},
}
@classmethod
def cost_relative2lgbm(cls):
return 30
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["weights"] = params.get("weights", "distance")
return params
def __init__(self, task="binary", **config):
super().__init__(task, **config)
if task in CLASSIFICATION:
from sklearn.neighbors import KNeighborsClassifier
self.estimator_class = KNeighborsClassifier
else:
from sklearn.neighbors import KNeighborsRegressor
self.estimator_class = KNeighborsRegressor
def _preprocess(self, X):
if isinstance(X, DataFrame):
cat_columns = X.select_dtypes(["category"]).columns
if X.shape[1] == len(cat_columns):
raise ValueError("kneighbor requires at least one numeric feature")
X = X.drop(cat_columns, axis=1)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
# drop categocial columns if any
X = DataFrame(X)
cat_columns = []
for col in X.columns:
if isinstance(X[col][0], str):
cat_columns.append(col)
X = X.drop(cat_columns, axis=1)
X = X.to_numpy()
return X
class Prophet(SKLearnEstimator):
"""The class for tuning Prophet."""
@classmethod
def search_space(cls, **params):
space = {
"changepoint_prior_scale": {
"domain": tune.loguniform(lower=0.001, upper=0.05),
"init_value": 0.05,
"low_cost_init_value": 0.001,
},
"seasonality_prior_scale": {
"domain": tune.loguniform(lower=0.01, upper=10),
"init_value": 10,
},
"holidays_prior_scale": {
"domain": tune.loguniform(lower=0.01, upper=10),
"init_value": 10,
},
"seasonality_mode": {
"domain": tune.choice(["additive", "multiplicative"]),
"init_value": "multiplicative",
},
}
return space
def __init__(self, task="ts_forecast", n_jobs=1, **params):
super().__init__(task, **params)
def _join(self, X_train, y_train):
assert TS_TIMESTAMP_COL in X_train, (
"Dataframe for training ts_forecast model must have column"
f' "{TS_TIMESTAMP_COL}" with the dates in X_train.'
)
y_train = DataFrame(y_train, columns=[TS_VALUE_COL])
train_df = X_train.join(y_train)
return train_df
def fit(self, X_train, y_train, budget=None, **kwargs):
from prophet import Prophet
current_time = time.time()
train_df = self._join(X_train, y_train)
train_df = self._preprocess(train_df)
cols = list(train_df)
cols.remove(TS_TIMESTAMP_COL)
cols.remove(TS_VALUE_COL)
logging.getLogger("prophet").setLevel(logging.WARNING)
model = Prophet(**self.params)
for regressor in cols:
model.add_regressor(regressor)
with suppress_stdout_stderr():
model.fit(train_df)
train_time = time.time() - current_time
self._model = model
return train_time
def predict(self, X, **kwargs):
if isinstance(X, int):
raise ValueError(
"predict() with steps is only supported for arima/sarimax."
" For Prophet, pass a dataframe with the first column containing"
" the timestamp values."
)
if self._model is not None:
X = self._preprocess(X)
forecast = self._model.predict(X)
return forecast["yhat"]
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return np.ones(X.shape[0])
def score(self, X_val: DataFrame, y_val: Series, **kwargs):
from sklearn.metrics import r2_score
from .ml import metric_loss_score
y_pred = self.predict(X_val)
self._metric = kwargs.get("metric", None)
if self._metric:
return metric_loss_score(self._metric, y_pred, y_val)
else:
return r2_score(y_pred, y_val)
class ARIMA(Prophet):
"""The class for tuning ARIMA."""
@classmethod
def search_space(cls, **params):
space = {
"p": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"d": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"q": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
}
return space
def _join(self, X_train, y_train):
train_df = super()._join(X_train, y_train)
train_df.index = to_datetime(train_df[TS_TIMESTAMP_COL])
train_df = train_df.drop(TS_TIMESTAMP_COL, axis=1)
return train_df
def fit(self, X_train, y_train, budget=None, **kwargs):
import warnings
warnings.filterwarnings("ignore")
from statsmodels.tsa.arima.model import ARIMA as ARIMA_estimator
current_time = time.time()
train_df = self._join(X_train, y_train)
train_df = self._preprocess(train_df)
regressors = list(train_df)
regressors.remove(TS_VALUE_COL)
if regressors:
model = ARIMA_estimator(
train_df[[TS_VALUE_COL]],
exog=train_df[regressors],
order=(self.params["p"], self.params["d"], self.params["q"]),
enforce_stationarity=False,
enforce_invertibility=False,
)
else:
model = ARIMA_estimator(
train_df,
order=(self.params["p"], self.params["d"], self.params["q"]),
enforce_stationarity=False,
enforce_invertibility=False,
)
with suppress_stdout_stderr():
model = model.fit()
train_time = time.time() - current_time
self._model = model
return train_time
def predict(self, X, **kwargs):
if self._model is not None:
if isinstance(X, int):
forecast = self._model.forecast(steps=X)
elif isinstance(X, DataFrame):
start = X[TS_TIMESTAMP_COL].iloc[0]
end = X[TS_TIMESTAMP_COL].iloc[-1]
if len(X.columns) > 1:
X = self._preprocess(X.drop(columns=TS_TIMESTAMP_COL))
regressors = list(X)
forecast = self._model.predict(
start=start, end=end, exog=X[regressors]
)
else:
forecast = self._model.predict(start=start, end=end)
else:
raise ValueError(
"X needs to be either a pandas Dataframe with dates as the first column"
" or an int number of periods for predict()."
)
return forecast
else:
return np.ones(X if isinstance(X, int) else X.shape[0])
class SARIMAX(ARIMA):
"""The class for tuning SARIMA."""
@classmethod
def search_space(cls, **params):
space = {
"p": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"d": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"q": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"P": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"D": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"Q": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"s": {
"domain": tune.choice([1, 4, 6, 12]),
"init_value": 12,
},
}
return space
def fit(self, X_train, y_train, budget=None, **kwargs):
import warnings
warnings.filterwarnings("ignore")
from statsmodels.tsa.statespace.sarimax import SARIMAX as SARIMAX_estimator
current_time = time.time()
train_df = self._join(X_train, y_train)
train_df = self._preprocess(train_df)
regressors = list(train_df)
regressors.remove(TS_VALUE_COL)
if regressors:
model = SARIMAX_estimator(
train_df[[TS_VALUE_COL]],
exog=train_df[regressors],
order=(self.params["p"], self.params["d"], self.params["q"]),
seasonality_order=(
self.params["P"],
self.params["D"],
self.params["Q"],
self.params["s"],
),
enforce_stationarity=False,
enforce_invertibility=False,
)
else:
model = SARIMAX_estimator(
train_df,
order=(self.params["p"], self.params["d"], self.params["q"]),
seasonality_order=(
self.params["P"],
self.params["D"],
self.params["Q"],
self.params["s"],
),
enforce_stationarity=False,
enforce_invertibility=False,
)
with suppress_stdout_stderr():
model = model.fit()
train_time = time.time() - current_time
self._model = model
return train_time
class TS_SKLearn(SKLearnEstimator):
"""The class for tuning SKLearn Regressors for time-series forecasting, using hcrystalball"""
base_class = SKLearnEstimator
@classmethod
def search_space(cls, data_size, pred_horizon, **params):
space = cls.base_class.search_space(data_size, **params)
space.update(
{
"optimize_for_horizon": {
"domain": tune.choice([True, False]),
"init_value": False,
"low_cost_init_value": False,
},
"lags": {
"domain": tune.randint(
lower=1, upper=max(2, int(np.sqrt(data_size[0])))
),
"init_value": 3,
},
}
)
return space
def __init__(self, task="ts_forecast", **params):
super().__init__(task, **params)
self.hcrystaball_model = None
self.ts_task = (
"regression" if task in TS_FORECASTREGRESSION else "classification"
)
def transform_X(self, X):
cols = list(X)
if len(cols) == 1:
ds_col = cols[0]
X = DataFrame(index=X[ds_col])
elif len(cols) > 1:
ds_col = cols[0]
exog_cols = cols[1:]
X = X[exog_cols].set_index(X[ds_col])
return X
def _fit(self, X_train, y_train, budget=None, **kwargs):
from hcrystalball.wrappers import get_sklearn_wrapper
X_train = self.transform_X(X_train)
X_train = self._preprocess(X_train)
params = self.params.copy()
lags = params.pop("lags")
optimize_for_horizon = params.pop("optimize_for_horizon")
estimator = self.base_class(task=self.ts_task, **params)
self.hcrystaball_model = get_sklearn_wrapper(estimator.estimator_class)
self.hcrystaball_model.lags = int(lags)
self.hcrystaball_model.fit(X_train, y_train)
if optimize_for_horizon:
# Direct Multi-step Forecast Strategy - fit a seperate model for each horizon
model_list = []
for i in range(1, kwargs["period"] + 1):
(
X_fit,
y_fit,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(
X_train, y_train, i
)
self.hcrystaball_model.model.set_params(**estimator.params)
model = self.hcrystaball_model.model.fit(X_fit, y_fit)
model_list.append(model)
self._model = model_list
else:
(
X_fit,
y_fit,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(
X_train, y_train, kwargs["period"]
)
self.hcrystaball_model.model.set_params(**estimator.params)
model = self.hcrystaball_model.model.fit(X_fit, y_fit)
self._model = model
def fit(self, X_train, y_train, budget=None, **kwargs):
current_time = time.time()
self._fit(X_train, y_train, budget=budget, **kwargs)
train_time = time.time() - current_time
return train_time
def predict(self, X, **kwargs):
if self._model is not None:
X = self.transform_X(X)
X = self._preprocess(X)
if isinstance(self._model, list):
assert len(self._model) == len(
X
), "Model is optimized for horizon, length of X must be equal to `period`."
preds = []
for i in range(1, len(self._model) + 1):
(
X_pred,
_,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(
X.iloc[:i, :]
)
preds.append(self._model[i - 1].predict(X_pred)[-1])
forecast = DataFrame(
data=np.asarray(preds).reshape(-1, 1),
columns=[self.hcrystaball_model.name],
index=X.index,
)
else:
(
X_pred,
_,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(X)
forecast = self._model.predict(X_pred)
return forecast
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return np.ones(X.shape[0])
class LGBM_TS(TS_SKLearn):
"""The class for tuning LGBM Regressor for time-series forecasting"""
base_class = LGBMEstimator
class XGBoost_TS(TS_SKLearn):
"""The class for tuning XGBoost Regressor for time-series forecasting"""
base_class = XGBoostSklearnEstimator
# catboost regressor is invalid because it has a `name` parameter, making it incompatible with hcrystalball
# class CatBoost_TS_Regressor(TS_Regressor):
# base_class = CatBoostEstimator
class RF_TS(TS_SKLearn):
"""The class for tuning Random Forest Regressor for time-series forecasting"""
base_class = RandomForestEstimator
class ExtraTrees_TS(TS_SKLearn):
"""The class for tuning Extra Trees Regressor for time-series forecasting"""
base_class = ExtraTreesEstimator
class XGBoostLimitDepth_TS(TS_SKLearn):
"""The class for tuning XGBoost Regressor with unlimited depth for time-series forecasting"""
base_class = XGBoostLimitDepthEstimator
class suppress_stdout_stderr(object):
def __init__(self):
# Open a pair of null files
self.null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)]
# Save the actual stdout (1) and stderr (2) file descriptors.
self.save_fds = (os.dup(1), os.dup(2))
def __enter__(self):
# Assign the null pointers to stdout and stderr.
os.dup2(self.null_fds[0], 1)
os.dup2(self.null_fds[1], 2)
def __exit__(self, *_):
# Re-assign the real stdout/stderr back to (1) and (2)
os.dup2(self.save_fds[0], 1)
os.dup2(self.save_fds[1], 2)
# Close the null files
os.close(self.null_fds[0])
os.close(self.null_fds[1])
| 36.646787 | 144 | 0.541121 |
from contextlib import contextmanager
from functools import partial
import signal
import os
from typing import Callable, List
import numpy as np
import time
from sklearn.ensemble import RandomForestRegressor, RandomForestClassifier
from sklearn.ensemble import ExtraTreesRegressor, ExtraTreesClassifier
from sklearn.linear_model import LogisticRegression
from sklearn.dummy import DummyClassifier, DummyRegressor
from scipy.sparse import issparse
import logging
import shutil
from pandas import DataFrame, Series, to_datetime
import sys
import math
from . import tune
from .data import (
group_counts,
CLASSIFICATION,
TS_FORECASTREGRESSION,
TS_TIMESTAMP_COL,
TS_VALUE_COL,
SEQCLASSIFICATION,
SEQREGRESSION,
TOKENCLASSIFICATION,
SUMMARIZATION,
NLG_TASKS,
MULTICHOICECLASSIFICATION,
)
try:
import psutil
except ImportError:
psutil = None
try:
import resource
except ImportError:
resource = None
logger = logging.getLogger("flaml.automl")
FREE_MEM_RATIO = 0.2
def TimeoutHandler(sig, frame):
raise TimeoutError(sig, frame)
@contextmanager
def limit_resource(memory_limit, time_limit):
if memory_limit > 0:
soft, hard = resource.getrlimit(resource.RLIMIT_AS)
if soft < 0 and (hard < 0 or memory_limit <= hard) or memory_limit < soft:
try:
resource.setrlimit(resource.RLIMIT_AS, (int(memory_limit), hard))
except ValueError:
pass
main_thread = False
if time_limit is not None:
try:
signal.signal(signal.SIGALRM, TimeoutHandler)
signal.alarm(int(time_limit) or 1)
main_thread = True
except ValueError:
pass
try:
yield
finally:
if main_thread:
signal.alarm(0)
if memory_limit > 0:
resource.setrlimit(resource.RLIMIT_AS, (soft, hard))
class BaseEstimator:
def __init__(self, task="binary", **config):
self._task = task
self.params = self.config2params(config)
self.estimator_class = self._model = None
if "_estimator_type" in config:
self._estimator_type = self.params.pop("_estimator_type")
else:
self._estimator_type = (
"classifier" if task in CLASSIFICATION else "regressor"
)
def get_params(self, deep=False):
params = self.params.copy()
params["task"] = self._task
if hasattr(self, "_estimator_type"):
params["_estimator_type"] = self._estimator_type
return params
@property
def classes_(self):
return self._model.classes_
@property
def n_features_in_(self):
return self._model.n_features_in_
@property
def model(self):
return self._model
@property
def estimator(self):
return self._model
def _preprocess(self, X):
return X
def _fit(self, X_train, y_train, **kwargs):
current_time = time.time()
if "groups" in kwargs:
kwargs = kwargs.copy()
groups = kwargs.pop("groups")
if self._task == "rank":
kwargs["group"] = group_counts(groups)
# groups_val = kwargs.get('groups_val')
# if groups_val is not None:
# kwargs['eval_group'] = [group_counts(groups_val)]
# kwargs['eval_set'] = [
# (kwargs['X_val'], kwargs['y_val'])]
# kwargs['verbose'] = False
# del kwargs['groups_val'], kwargs['X_val'], kwargs['y_val']
X_train = self._preprocess(X_train)
model = self.estimator_class(**self.params)
if logger.level == logging.DEBUG:
# xgboost 1.6 doesn't display all the params in the model str
logger.debug(f"flaml.model - {model} fit started with params {self.params}")
model.fit(X_train, y_train, **kwargs)
if logger.level == logging.DEBUG:
logger.debug(f"flaml.model - {model} fit finished")
train_time = time.time() - current_time
self._model = model
return train_time
def fit(self, X_train, y_train, budget=None, **kwargs):
if (
getattr(self, "limit_resource", None)
and resource is not None
and (budget is not None or psutil is not None)
):
start_time = time.time()
mem = psutil.virtual_memory() if psutil is not None else None
try:
with limit_resource(
mem.available * (1 - FREE_MEM_RATIO)
+ psutil.Process(os.getpid()).memory_info().rss
if mem is not None
else -1,
budget,
):
train_time = self._fit(X_train, y_train, **kwargs)
except (MemoryError, TimeoutError) as e:
logger.warning(f"{e.__class__} {e}")
if self._task in CLASSIFICATION:
model = DummyClassifier()
else:
model = DummyRegressor()
X_train = self._preprocess(X_train)
model.fit(X_train, y_train)
self._model = model
train_time = time.time() - start_time
else:
train_time = self._fit(X_train, y_train, **kwargs)
return train_time
def predict(self, X, **kwargs):
if self._model is not None:
X = self._preprocess(X)
return self._model.predict(X)
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return np.ones(X.shape[0])
def predict_proba(self, X, **kwargs):
assert self._task in CLASSIFICATION, "predict_proba() only for classification."
X = self._preprocess(X)
return self._model.predict_proba(X)
def score(self, X_val: DataFrame, y_val: Series, **kwargs):
from .ml import metric_loss_score
from .ml import is_min_metric
if self._model is not None:
if self._task == "rank":
raise NotImplementedError(
"AutoML.score() is not implemented for ranking"
)
else:
X_val = self._preprocess(X_val)
metric = kwargs.get("metric", None)
if metric:
y_pred = self.predict(X_val, **kwargs)
if is_min_metric(metric):
return metric_loss_score(metric, y_pred, y_val)
else:
return 1.0 - metric_loss_score(metric, y_pred, y_val)
else:
return self._model.score(X_val, y_val, **kwargs)
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return 0.0
def cleanup(self):
del self._model
self._model = None
@classmethod
def search_space(cls, data_size, task, **params):
return {}
@classmethod
def size(cls, config: dict) -> float:
return 1.0
@classmethod
def cost_relative2lgbm(cls) -> float:
return 1.0
@classmethod
def init(cls):
pass
def config2params(self, config: dict) -> dict:
params = config.copy()
if "FLAML_sample_size" in params:
params.pop("FLAML_sample_size")
return params
class TransformersEstimator(BaseEstimator):
ITER_HP = "global_max_steps"
def __init__(self, task="seq-classification", **config):
super().__init__(task, **config)
import uuid
self.trial_id = str(uuid.uuid1().hex)[:8]
if task not in NLG_TASKS:
from .nlp.huggingface.training_args import (
TrainingArgumentsForAuto as TrainingArguments,
)
else:
from .nlp.huggingface.training_args import (
Seq2SeqTrainingArgumentsForAuto as TrainingArguments,
)
self._TrainingArguments = TrainingArguments
@staticmethod
def _join(X_train, y_train, task):
y_train = DataFrame(y_train, index=X_train.index)
y_train.columns = ["label"] if task != TOKENCLASSIFICATION else ["labels"]
train_df = X_train.join(y_train)
return train_df
@classmethod
def search_space(cls, data_size, task, **params):
search_space_dict = {
"learning_rate": {
"domain": tune.loguniform(lower=1e-6, upper=1e-3),
"init_value": 1e-5,
},
"num_train_epochs": {
"domain": tune.loguniform(lower=0.1, upper=10.0),
"init_value": 3.0,
},
"per_device_train_batch_size": {
"domain": tune.choice([4, 8, 16, 32]),
"init_value": 32,
},
"warmup_ratio": {
"domain": tune.uniform(lower=0.0, upper=0.3),
"init_value": 0.0,
},
"weight_decay": {
"domain": tune.uniform(lower=0.0, upper=0.3),
"init_value": 0.0,
},
"adam_epsilon": {
"domain": tune.loguniform(lower=1e-8, upper=1e-6),
"init_value": 1e-6,
},
"seed": {"domain": tune.choice(list(range(40, 45))), "init_value": 42},
"global_max_steps": {
"domain": sys.maxsize,
"init_value": sys.maxsize,
},
}
return search_space_dict
@property
def checkpoint_freq(self):
return (
int(
min(self._training_args.num_train_epochs, 1)
* len(self._X_train)
/ self._training_args.per_device_train_batch_size
/ self._training_args.ckpt_per_epoch
)
+ 1
)
@property
def fp16(self):
return self._kwargs.get("gpu_per_trial") and self._training_args.fp16
@property
def no_cuda(self):
return not self._kwargs.get("gpu_per_trial")
def _set_training_args(self, **kwargs):
from .nlp.utils import date_str, Counter
for (key, val) in kwargs.items():
assert key not in self.params, (
"Since {} is in the search space, it cannot exist in 'custom_fit_kwargs' at the same time."
"If you need to fix the value of {} to {}, the only way is to add a single-value domain in the search "
"space by adding:\n '{}': {{ 'domain': {} }} to 'custom_hp'. For example:"
'automl_settings["custom_hp"] = {{ "transformer": {{ "model_path": {{ "domain" : '
'"google/electra-small-discriminator" }} }} }}'.format(
key, key, val, key, val
)
)
self._training_args = self._TrainingArguments(**kwargs)
for key, val in self.params.items():
if hasattr(self._training_args, key):
setattr(self._training_args, key, val)
local_dir = os.path.join(
self._training_args.output_dir, "train_{}".format(date_str())
)
if self._use_ray is True:
import ray
self._training_args.output_dir = ray.tune.get_trial_dir()
else:
self._training_args.output_dir = Counter.get_trial_fold_name(
local_dir, self.params, self.trial_id
)
self._training_args.eval_steps = (
self._training_args.logging_steps
) = self._training_args.saving_steps = self.checkpoint_freq
self._training_args.fp16 = self.fp16
self._training_args.no_cuda = self.no_cuda
def _preprocess(self, X, y=None, **kwargs):
from .nlp.utils import tokenize_text, is_a_list_of_str
is_str = str(X.dtypes[0]) in ("string", "str")
is_list_of_str = is_a_list_of_str(X[list(X.keys())[0]].to_list()[0])
if is_str or is_list_of_str:
return tokenize_text(
X=X,
Y=y,
task=self._task,
hf_args=self._training_args,
tokenizer=self.tokenizer,
)
else:
return X, None
def _model_init(self):
from .nlp.utils import load_model
this_model = load_model(
checkpoint_path=self._training_args.model_path,
task=self._task,
num_labels=self.num_labels,
)
return this_model
def preprocess_data(self, X, y):
from datasets import Dataset
if (self._task not in NLG_TASKS) and (self._task != TOKENCLASSIFICATION):
processed_X, _ = self._preprocess(X=X, **self._kwargs)
processed_y = y
else:
processed_X, processed_y = self._preprocess(X=X, y=y, **self._kwargs)
processed_dataset = Dataset.from_pandas(
TransformersEstimator._join(processed_X, processed_y, self._task)
)
return processed_dataset, processed_X, processed_y
@property
def num_labels(self):
from .data import SEQCLASSIFICATION, SEQREGRESSION, TOKENCLASSIFICATION
if self._task == SEQREGRESSION:
return 1
elif self._task == SEQCLASSIFICATION:
return len(set(self._y_train))
elif self._task == TOKENCLASSIFICATION:
return len(set([a for b in self._y_train.tolist() for a in b]))
else:
return None
@property
def tokenizer(self):
from transformers import AutoTokenizer
if self._task == SUMMARIZATION:
return AutoTokenizer.from_pretrained(
pretrained_model_name_or_path=self._training_args.model_path,
cache_dir=None,
use_fast=True,
revision="main",
use_auth_token=None,
)
else:
return AutoTokenizer.from_pretrained(
self._training_args.model_path,
use_fast=True,
add_prefix_space=True
if "roberta" in self._training_args.model_path
else False,
)
@property
def data_collator(self):
from .nlp.huggingface.data_collator import task_to_datacollator_class
return (
task_to_datacollator_class[self._task](
tokenizer=self.tokenizer,
pad_to_multiple_of=8,
)
if self._task in (MULTICHOICECLASSIFICATION, TOKENCLASSIFICATION)
else None
)
def fit(
self,
X_train: DataFrame,
y_train: Series,
budget=None,
X_val=None,
y_val=None,
gpu_per_trial=None,
metric=None,
**kwargs,
):
import transformers
transformers.logging.set_verbosity_error()
from transformers import TrainerCallback
from transformers.trainer_utils import set_seed
from .nlp.huggingface.trainer import TrainerForAuto
try:
from ray.tune import is_session_enabled
self._use_ray = is_session_enabled()
except ImportError:
self._use_ray = False
this_params = self.params
self._kwargs = kwargs
self._X_train, self._y_train = X_train, y_train
self._set_training_args(**kwargs)
train_dataset, self._X_train, self._y_train = self.preprocess_data(
X_train, y_train
)
if X_val is not None:
eval_dataset, self._X_val, self._y_val = self.preprocess_data(X_val, y_val)
else:
eval_dataset, self._X_val, self._y_val = None, None, None
set_seed(self.params.get("seed", self._training_args.seed))
self._metric = metric
class EarlyStoppingCallbackForAuto(TrainerCallback):
def on_train_begin(self, args, state, control, **callback_kwargs):
self.train_begin_time = time.time()
def on_step_begin(self, args, state, control, **callback_kwargs):
self.step_begin_time = time.time()
def on_step_end(self, args, state, control, **callback_kwargs):
if state.global_step == 1:
self.time_per_iter = time.time() - self.step_begin_time
if (
budget
and (
time.time() + self.time_per_iter
> self.train_begin_time + budget
)
or state.global_step >= this_params[TransformersEstimator.ITER_HP]
):
control.should_training_stop = True
control.should_save = True
control.should_evaluate = True
return control
def on_epoch_end(self, args, state, control, **callback_kwargs):
if (
control.should_training_stop
or state.epoch + 1 >= args.num_train_epochs
):
control.should_save = True
control.should_evaluate = True
self._trainer = TrainerForAuto(
args=self._training_args,
model_init=self._model_init,
train_dataset=train_dataset,
eval_dataset=eval_dataset,
tokenizer=self.tokenizer,
data_collator=self.data_collator,
compute_metrics=self._compute_metrics_by_dataset_name,
callbacks=[EarlyStoppingCallbackForAuto],
)
if self._task in NLG_TASKS:
setattr(self._trainer, "_is_seq2seq", True)
if gpu_per_trial is not None:
tmp_cuda_visible_devices = os.environ.get("CUDA_VISIBLE_DEVICES", "")
self._trainer.args._n_gpu = gpu_per_trial
if tmp_cuda_visible_devices.count(",") != math.ceil(gpu_per_trial) - 1:
os.environ["CUDA_VISIBLE_DEVICES"] = ",".join(
[str(x) for x in range(math.ceil(gpu_per_trial))]
)
import time
start_time = time.time()
self._trainer.train()
if gpu_per_trial is not None:
os.environ["CUDA_VISIBLE_DEVICES"] = tmp_cuda_visible_devices
self.params[self.ITER_HP] = self._trainer.state.global_step
self._checkpoint_path = self._select_checkpoint(self._trainer)
self._ckpt_remains = list(self._trainer.ckpt_to_metric.keys())
if hasattr(self._trainer, "intermediate_results"):
self.intermediate_results = [
x[1]
for x in sorted(
self._trainer.intermediate_results.items(), key=lambda x: x[0]
)
]
self._trainer = None
return time.time() - start_time
def _delete_one_ckpt(self, ckpt_location):
if self._use_ray is False:
try:
shutil.rmtree(ckpt_location)
except FileNotFoundError:
logger.warning("checkpoint {} not found".format(ckpt_location))
def cleanup(self):
super().cleanup()
if hasattr(self, "_ckpt_remains"):
for each_ckpt in self._ckpt_remains:
self._delete_one_ckpt(each_ckpt)
def _select_checkpoint(self, trainer):
from transformers.trainer_utils import PREFIX_CHECKPOINT_DIR
if trainer.ckpt_to_metric:
best_ckpt, _ = min(
trainer.ckpt_to_metric.items(), key=lambda x: x[1]["eval_loss"]
)
best_ckpt_global_step = trainer.ckpt_to_global_step[best_ckpt]
for each_ckpt in list(trainer.ckpt_to_metric):
if each_ckpt != best_ckpt:
del trainer.ckpt_to_metric[each_ckpt]
del trainer.ckpt_to_global_step[each_ckpt]
self._delete_one_ckpt(each_ckpt)
else:
best_ckpt_global_step = trainer.state.global_step
best_ckpt = os.path.join(
trainer.args.output_dir,
f"{PREFIX_CHECKPOINT_DIR}-{best_ckpt_global_step}",
)
self.params[self.ITER_HP] = best_ckpt_global_step
logger.debug(trainer.state.global_step)
logger.debug(trainer.ckpt_to_global_step)
return best_ckpt
def _compute_metrics_by_dataset_name(self, eval_pred):
if isinstance(self._metric, str):
from .ml import metric_loss_score
from .nlp.utils import postprocess_text
predictions, labels = eval_pred
if self._task in NLG_TASKS:
if isinstance(predictions, tuple):
predictions = np.argmax(predictions[0], axis=2)
decoded_preds = self.tokenizer.batch_decode(
predictions, skip_special_tokens=True
)
labels = np.where(labels != -100, labels, self.tokenizer.pad_token_id)
decoded_labels = self.tokenizer.batch_decode(
labels, skip_special_tokens=True
)
predictions, labels = postprocess_text(decoded_preds, decoded_labels)
else:
predictions = (
np.squeeze(predictions)
if self._task == SEQREGRESSION
else np.argmax(predictions, axis=2)
if self._task == TOKENCLASSIFICATION
else np.argmax(predictions, axis=1)
)
metric_dict = {
"automl_metric": metric_loss_score(
metric_name=self._metric,
y_predict=predictions,
y_true=labels,
labels=self._training_args.label_list,
)
}
else:
loss, metric_dict = self._metric(
X_test=self._X_val,
y_test=self._y_val,
estimator=self,
labels=None,
X_train=self._X_train,
y_train=self._y_train,
)
metric_dict["automl_metric"] = loss
return metric_dict
def _init_model_for_predict(self):
from .nlp.huggingface.trainer import TrainerForAuto
training_args = self._TrainingArguments(
local_rank=-1, model_path=self._checkpoint_path, fp16=self.fp16
)
for key, val in self._training_args.__dict__.items():
if key not in ("local_rank", "model_path", "fp16"):
setattr(training_args, key, val)
self._training_args = training_args
new_trainer = TrainerForAuto(
model=self._model_init(),
args=self._training_args,
data_collator=self.data_collator,
compute_metrics=self._compute_metrics_by_dataset_name,
)
if self._task in NLG_TASKS:
setattr(new_trainer, "_is_seq2seq", True)
return new_trainer
def predict_proba(self, X, **pred_kwargs):
from datasets import Dataset
if pred_kwargs:
for key, val in pred_kwargs.items():
setattr(self._training_args, key, val)
assert (
self._task in CLASSIFICATION
), "predict_proba() only for classification tasks."
X_test, _ = self._preprocess(X, **self._kwargs)
test_dataset = Dataset.from_pandas(X_test)
new_trainer = self._init_model_for_predict()
predictions = new_trainer.predict(test_dataset)
return predictions.predictions
def score(self, X_val: DataFrame, y_val: Series, **kwargs):
import transformers
transformers.logging.set_verbosity_error()
self._metric = kwargs["metric"]
eval_dataset, X_val, y_val = self.preprocess_data(X_val, y_val)
new_trainer = self._init_model_for_predict()
return new_trainer.evaluate(eval_dataset)
def predict(self, X, **pred_kwargs):
import transformers
from datasets import Dataset
transformers.logging.set_verbosity_error()
if pred_kwargs:
for key, val in pred_kwargs.items():
setattr(self._training_args, key, val)
X_test, _ = self._preprocess(X, **self._kwargs)
test_dataset = Dataset.from_pandas(X_test)
new_trainer = self._init_model_for_predict()
if self._task not in NLG_TASKS:
predictions = new_trainer.predict(test_dataset)
else:
predictions = new_trainer.predict(
test_dataset,
metric_key_prefix="predict",
)
if self._task == SEQCLASSIFICATION:
return np.argmax(predictions.predictions, axis=1)
elif self._task == SEQREGRESSION:
return predictions.predictions.reshape((len(predictions.predictions),))
elif self._task == TOKENCLASSIFICATION:
return np.argmax(predictions.predictions, axis=2)
elif self._task == SUMMARIZATION:
decoded_preds = self.tokenizer.batch_decode(
predictions.predictions, skip_special_tokens=True
)
return decoded_preds
elif self._task == MULTICHOICECLASSIFICATION:
return np.argmax(predictions.predictions, axis=1)
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params[TransformersEstimator.ITER_HP] = params.get(
TransformersEstimator.ITER_HP, sys.maxsize
)
return params
class TransformersEstimatorModelSelection(TransformersEstimator):
def __init__(self, task="seq-classification", **config):
super().__init__(task, **config)
@classmethod
def search_space(cls, data_size, task, **params):
search_space_dict = TransformersEstimator.search_space(
data_size, task, **params
)
search_space_dict["model_path"] = {
"domain": tune.choice(
[
"google/electra-base-discriminator",
"bert-base-uncased",
"roberta-base",
"facebook/muppet-roberta-base",
"google/electra-small-discriminator",
]
),
"init_value": "facebook/muppet-roberta-base",
}
return search_space_dict
class SKLearnEstimator(BaseEstimator):
def __init__(self, task="binary", **config):
super().__init__(task, **config)
def _preprocess(self, X):
if isinstance(X, DataFrame):
cat_columns = X.select_dtypes(include=["category"]).columns
if not cat_columns.empty:
X = X.copy()
X[cat_columns] = X[cat_columns].apply(lambda x: x.cat.codes)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
X = DataFrame(X)
for col in X.columns:
if isinstance(X[col][0], str):
X[col] = X[col].astype("category").cat.codes
X = X.to_numpy()
return X
class LGBMEstimator(BaseEstimator):
ITER_HP = "n_estimators"
HAS_CALLBACK = True
DEFAULT_ITER = 100
@classmethod
def search_space(cls, data_size, **params):
upper = max(5, min(32768, int(data_size[0])))
return {
"n_estimators": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"num_leaves": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"min_child_samples": {
"domain": tune.lograndint(lower=2, upper=2**7 + 1),
"init_value": 20,
},
"learning_rate": {
"domain": tune.loguniform(lower=1 / 1024, upper=1.0),
"init_value": 0.1,
},
"log_max_bin": {
"domain": tune.lograndint(lower=3, upper=11),
"init_value": 8,
},
"colsample_bytree": {
"domain": tune.uniform(lower=0.01, upper=1.0),
"init_value": 1.0,
},
"reg_alpha": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1 / 1024,
},
"reg_lambda": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1.0,
},
}
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
if "log_max_bin" in params:
params["max_bin"] = (1 << params.pop("log_max_bin")) - 1
return params
@classmethod
def size(cls, config):
num_leaves = int(
round(
config.get("num_leaves")
or config.get("max_leaves")
or 1 << config.get("max_depth", 16)
)
)
n_estimators = int(round(config["n_estimators"]))
return (num_leaves * 3 + (num_leaves - 1) * 4 + 1.0) * n_estimators * 8
def __init__(self, task="binary", **config):
super().__init__(task, **config)
if "verbose" not in self.params:
self.params["verbose"] = -1
if "regression" == task:
from lightgbm import LGBMRegressor
self.estimator_class = LGBMRegressor
elif "rank" == task:
from lightgbm import LGBMRanker
self.estimator_class = LGBMRanker
else:
from lightgbm import LGBMClassifier
self.estimator_class = LGBMClassifier
self._time_per_iter = None
self._train_size = 0
self._mem_per_iter = -1
self.HAS_CALLBACK = self.HAS_CALLBACK and self._callbacks(0, 0) is not None
def _preprocess(self, X):
if (
not isinstance(X, DataFrame)
and issparse(X)
and np.issubdtype(X.dtype, np.integer)
):
X = X.astype(float)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
X = DataFrame(X)
for col in X.columns:
if isinstance(X[col][0], str):
X[col] = X[col].astype("category").cat.codes
X = X.to_numpy()
return X
def fit(self, X_train, y_train, budget=None, **kwargs):
start_time = time.time()
deadline = start_time + budget if budget else np.inf
n_iter = self.params.get(self.ITER_HP, self.DEFAULT_ITER)
trained = False
if not self.HAS_CALLBACK:
mem0 = psutil.virtual_memory().available if psutil is not None else 1
if (
(
not self._time_per_iter
or abs(self._train_size - X_train.shape[0]) > 4
)
and budget is not None
or self._mem_per_iter < 0
and psutil is not None
) and n_iter > 1:
self.params[self.ITER_HP] = 1
self._t1 = self._fit(X_train, y_train, **kwargs)
if budget is not None and self._t1 >= budget or n_iter == 1:
return self._t1
mem1 = psutil.virtual_memory().available if psutil is not None else 1
self._mem1 = mem0 - mem1
self.params[self.ITER_HP] = min(n_iter, 4)
self._t2 = self._fit(X_train, y_train, **kwargs)
mem2 = psutil.virtual_memory().available if psutil is not None else 1
self._mem2 = max(mem0 - mem2, self._mem1)
self._mem_per_iter = min(
self._mem1, self._mem2 / self.params[self.ITER_HP]
)
self._time_per_iter = (
(self._t2 - self._t1) / (self.params[self.ITER_HP] - 1)
if self._t2 > self._t1
else self._t1
if self._t1
else 0.001
)
self._train_size = X_train.shape[0]
if (
budget is not None
and self._t1 + self._t2 >= budget
or n_iter == self.params[self.ITER_HP]
):
return time.time() - start_time
trained = True
if n_iter > 1:
max_iter = min(
n_iter,
int(
(budget - time.time() + start_time - self._t1)
/ self._time_per_iter
+ 1
)
if budget is not None
else n_iter,
int((1 - FREE_MEM_RATIO) * mem0 / self._mem_per_iter)
if psutil is not None and self._mem_per_iter > 0
else n_iter,
)
if trained and max_iter <= self.params[self.ITER_HP]:
return time.time() - start_time
self.params[self.ITER_HP] = max(max_iter, 1)
if self.HAS_CALLBACK:
kwargs_callbacks = kwargs.get("callbacks")
if kwargs_callbacks:
callbacks = kwargs_callbacks + self._callbacks(start_time, deadline)
kwargs.pop("callbacks")
else:
callbacks = self._callbacks(start_time, deadline)
if isinstance(self, XGBoostSklearnEstimator):
from xgboost import __version__
if __version__ >= "1.6.0":
self.params["callbacks"] = callbacks
callbacks = None
self._fit(
X_train,
y_train,
callbacks=callbacks,
**kwargs,
)
if callbacks is None:
# for xgboost>=1.6.0, pop callbacks to enable pickle
callbacks = self.params.pop("callbacks")
self._model.set_params(callbacks=callbacks[:-1])
best_iteration = (
self._model.get_booster().best_iteration
if isinstance(self, XGBoostSklearnEstimator)
else self._model.best_iteration_
)
if best_iteration is not None:
self._model.set_params(n_estimators=best_iteration + 1)
else:
self._fit(X_train, y_train, **kwargs)
train_time = time.time() - start_time
return train_time
def _callbacks(self, start_time, deadline) -> List[Callable]:
return [partial(self._callback, start_time, deadline)]
def _callback(self, start_time, deadline, env) -> None:
from lightgbm.callback import EarlyStopException
now = time.time()
if env.iteration == 0:
self._time_per_iter = now - start_time
if now + self._time_per_iter > deadline:
raise EarlyStopException(env.iteration, env.evaluation_result_list)
if psutil is not None:
mem = psutil.virtual_memory()
if mem.available / mem.total < FREE_MEM_RATIO:
raise EarlyStopException(env.iteration, env.evaluation_result_list)
class XGBoostEstimator(SKLearnEstimator):
DEFAULT_ITER = 10
@classmethod
def search_space(cls, data_size, **params):
upper = max(5, min(32768, int(data_size[0]))) # upper must be larger than lower
return {
"n_estimators": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"max_leaves": {
"domain": tune.lograndint(lower=4, upper=upper),
"init_value": 4,
"low_cost_init_value": 4,
},
"max_depth": {
"domain": tune.choice([0, 6, 12]),
"init_value": 0,
},
"min_child_weight": {
"domain": tune.loguniform(lower=0.001, upper=128),
"init_value": 1.0,
},
"learning_rate": {
"domain": tune.loguniform(lower=1 / 1024, upper=1.0),
"init_value": 0.1,
},
"subsample": {
"domain": tune.uniform(lower=0.1, upper=1.0),
"init_value": 1.0,
},
"colsample_bylevel": {
"domain": tune.uniform(lower=0.01, upper=1.0),
"init_value": 1.0,
},
"colsample_bytree": {
"domain": tune.uniform(lower=0.01, upper=1.0),
"init_value": 1.0,
},
"reg_alpha": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1 / 1024,
},
"reg_lambda": {
"domain": tune.loguniform(lower=1 / 1024, upper=1024),
"init_value": 1.0,
},
}
@classmethod
def size(cls, config):
return LGBMEstimator.size(config)
@classmethod
def cost_relative2lgbm(cls):
return 1.6
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
max_depth = params["max_depth"] = params.get("max_depth", 0)
if max_depth == 0:
params["grow_policy"] = params.get("grow_policy", "lossguide")
params["tree_method"] = params.get("tree_method", "hist")
# params["booster"] = params.get("booster", "gbtree")
params["use_label_encoder"] = params.get("use_label_encoder", False)
if "n_jobs" in config:
params["nthread"] = params.pop("n_jobs")
return params
def __init__(
self,
task="regression",
**config,
):
super().__init__(task, **config)
self.params["verbosity"] = 0
def fit(self, X_train, y_train, budget=None, **kwargs):
import xgboost as xgb
start_time = time.time()
deadline = start_time + budget if budget else np.inf
if issparse(X_train):
if xgb.__version__ < "1.6.0":
# "auto" fails for sparse input since xgboost 1.6.0
self.params["tree_method"] = "auto"
else:
X_train = self._preprocess(X_train)
if "sample_weight" in kwargs:
dtrain = xgb.DMatrix(X_train, label=y_train, weight=kwargs["sample_weight"])
else:
dtrain = xgb.DMatrix(X_train, label=y_train)
objective = self.params.get("objective")
if isinstance(objective, str):
obj = None
else:
obj = objective
if "objective" in self.params:
del self.params["objective"]
_n_estimators = self.params.pop("n_estimators")
callbacks = XGBoostEstimator._callbacks(start_time, deadline)
if callbacks:
self._model = xgb.train(
self.params,
dtrain,
_n_estimators,
obj=obj,
callbacks=callbacks,
)
self.params["n_estimators"] = self._model.best_iteration + 1
else:
self._model = xgb.train(self.params, dtrain, _n_estimators, obj=obj)
self.params["n_estimators"] = _n_estimators
self.params["objective"] = objective
del dtrain
train_time = time.time() - start_time
return train_time
def predict(self, X, **kwargs):
import xgboost as xgb
if not issparse(X):
X = self._preprocess(X)
dtest = xgb.DMatrix(X)
return super().predict(dtest)
@classmethod
def _callbacks(cls, start_time, deadline):
try:
from xgboost.callback import TrainingCallback
except ImportError: # for xgboost<1.3
return None
class ResourceLimit(TrainingCallback):
def after_iteration(self, model, epoch, evals_log) -> bool:
now = time.time()
if epoch == 0:
self._time_per_iter = now - start_time
if now + self._time_per_iter > deadline:
return True
if psutil is not None:
mem = psutil.virtual_memory()
if mem.available / mem.total < FREE_MEM_RATIO:
return True
return False
return [ResourceLimit()]
class XGBoostSklearnEstimator(SKLearnEstimator, LGBMEstimator):
DEFAULT_ITER = 10
@classmethod
def search_space(cls, data_size, **params):
space = XGBoostEstimator.search_space(data_size)
space.pop("max_depth")
return space
@classmethod
def cost_relative2lgbm(cls):
return XGBoostEstimator.cost_relative2lgbm()
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
max_depth = params["max_depth"] = params.get("max_depth", 0)
if max_depth == 0:
params["grow_policy"] = params.get("grow_policy", "lossguide")
params["tree_method"] = params.get("tree_method", "hist")
params["use_label_encoder"] = params.get("use_label_encoder", False)
return params
def __init__(
self,
task="binary",
**config,
):
super().__init__(task, **config)
del self.params["verbose"]
self.params["verbosity"] = 0
import xgboost as xgb
self.estimator_class = xgb.XGBRegressor
if "rank" == task:
self.estimator_class = xgb.XGBRanker
elif task in CLASSIFICATION:
self.estimator_class = xgb.XGBClassifier
self._xgb_version = xgb.__version__
def fit(self, X_train, y_train, budget=None, **kwargs):
if issparse(X_train) and self._xgb_version < "1.6.0":
# "auto" fails for sparse input since xgboost 1.6.0
self.params["tree_method"] = "auto"
if kwargs.get("gpu_per_trial"):
self.params["tree_method"] = "gpu_hist"
kwargs.pop("gpu_per_trial")
return super().fit(X_train, y_train, budget, **kwargs)
def _callbacks(self, start_time, deadline) -> List[Callable]:
return XGBoostEstimator._callbacks(start_time, deadline)
class XGBoostLimitDepthEstimator(XGBoostSklearnEstimator):
@classmethod
def search_space(cls, data_size, **params):
space = XGBoostEstimator.search_space(data_size)
space.pop("max_leaves")
upper = max(6, int(np.log2(data_size[0])))
space["max_depth"] = {
"domain": tune.randint(lower=1, upper=min(upper, 16)),
"init_value": 6,
"low_cost_init_value": 1,
}
space["learning_rate"]["init_value"] = 0.3
space["n_estimators"]["init_value"] = 10
return space
@classmethod
def cost_relative2lgbm(cls):
return 64
class RandomForestEstimator(SKLearnEstimator, LGBMEstimator):
HAS_CALLBACK = False
nrows = 101
@classmethod
def search_space(cls, data_size, task, **params):
RandomForestEstimator.nrows = int(data_size[0])
upper = min(2048, RandomForestEstimator.nrows)
init = 1 / np.sqrt(data_size[1]) if task in CLASSIFICATION else 1
lower = min(0.1, init)
space = {
"n_estimators": {
"domain": tune.lograndint(lower=4, upper=max(5, upper)),
"init_value": 4,
"low_cost_init_value": 4,
},
"max_features": {
"domain": tune.loguniform(lower=lower, upper=1.0),
"init_value": init,
},
"max_leaves": {
"domain": tune.lograndint(
lower=4,
upper=max(5, min(32768, RandomForestEstimator.nrows >> 1)), #
),
"init_value": 4,
"low_cost_init_value": 4,
},
}
if task in CLASSIFICATION:
space["criterion"] = {
"domain": tune.choice(["gini", "entropy"]),
# "init_value": "gini",
}
return space
@classmethod
def cost_relative2lgbm(cls):
return 2
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
if "max_leaves" in params:
params["max_leaf_nodes"] = params.get(
"max_leaf_nodes", params.pop("max_leaves")
)
if self._task not in CLASSIFICATION and "criterion" in config:
params.pop("criterion")
return params
def __init__(
self,
task="binary",
**params,
):
super().__init__(task, **params)
self.params["verbose"] = 0
self.estimator_class = RandomForestRegressor
if task in CLASSIFICATION:
self.estimator_class = RandomForestClassifier
class ExtraTreesEstimator(RandomForestEstimator):
@classmethod
def cost_relative2lgbm(cls):
return 1.9
def __init__(self, task="binary", **params):
super().__init__(task, **params)
if "regression" in task:
self.estimator_class = ExtraTreesRegressor
else:
self.estimator_class = ExtraTreesClassifier
class LRL1Classifier(SKLearnEstimator):
@classmethod
def search_space(cls, **params):
return {
"C": {
"domain": tune.loguniform(lower=0.03125, upper=32768.0),
"init_value": 1.0,
},
}
@classmethod
def cost_relative2lgbm(cls):
return 160
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["tol"] = params.get("tol", 0.0001)
params["solver"] = params.get("solver", "saga")
params["penalty"] = params.get("penalty", "l1")
return params
def __init__(self, task="binary", **config):
super().__init__(task, **config)
assert task in CLASSIFICATION, "LogisticRegression for classification task only"
self.estimator_class = LogisticRegression
class LRL2Classifier(SKLearnEstimator):
limit_resource = True
@classmethod
def search_space(cls, **params):
return LRL1Classifier.search_space(**params)
@classmethod
def cost_relative2lgbm(cls):
return 25
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["tol"] = params.get("tol", 0.0001)
params["solver"] = params.get("solver", "lbfgs")
params["penalty"] = params.get("penalty", "l2")
return params
def __init__(self, task="binary", **config):
super().__init__(task, **config)
assert task in CLASSIFICATION, "LogisticRegression for classification task only"
self.estimator_class = LogisticRegression
class CatBoostEstimator(BaseEstimator):
ITER_HP = "n_estimators"
DEFAULT_ITER = 1000
@classmethod
def search_space(cls, data_size, **params):
upper = max(min(round(1500000 / data_size[0]), 150), 12)
return {
"early_stopping_rounds": {
"domain": tune.lograndint(lower=10, upper=upper),
"init_value": 10,
"low_cost_init_value": 10,
},
"learning_rate": {
"domain": tune.loguniform(lower=0.005, upper=0.2),
"init_value": 0.1,
},
"n_estimators": {
"domain": 8192,
"init_value": 8192,
},
}
@classmethod
def size(cls, config):
n_estimators = config.get("n_estimators", 8192)
max_leaves = 64
return (max_leaves * 3 + (max_leaves - 1) * 4 + 1.0) * n_estimators * 8
@classmethod
def cost_relative2lgbm(cls):
return 15
def _preprocess(self, X):
if isinstance(X, DataFrame):
cat_columns = X.select_dtypes(include=["category"]).columns
if not cat_columns.empty:
X = X.copy()
X[cat_columns] = X[cat_columns].apply(
lambda x: x.cat.rename_categories(
[
str(c) if isinstance(c, float) else c
for c in x.cat.categories
]
)
)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
# numpy array is not of numeric dtype
X = DataFrame(X)
for col in X.columns:
if isinstance(X[col][0], str):
X[col] = X[col].astype("category").cat.codes
X = X.to_numpy()
return X
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["n_estimators"] = params.get("n_estimators", 8192)
if "n_jobs" in params:
params["thread_count"] = params.pop("n_jobs")
return params
def __init__(
self,
task="binary",
**config,
):
super().__init__(task, **config)
self.params.update(
{
"verbose": config.get("verbose", False),
"random_seed": config.get("random_seed", 10242048),
}
)
from catboost import CatBoostRegressor
self.estimator_class = CatBoostRegressor
if task in CLASSIFICATION:
from catboost import CatBoostClassifier
self.estimator_class = CatBoostClassifier
def fit(self, X_train, y_train, budget=None, **kwargs):
start_time = time.time()
deadline = start_time + budget if budget else np.inf
train_dir = f"catboost_{str(start_time)}"
X_train = self._preprocess(X_train)
if isinstance(X_train, DataFrame):
cat_features = list(X_train.select_dtypes(include="category").columns)
else:
cat_features = []
n = max(int(len(y_train) * 0.9), len(y_train) - 1000)
X_tr, y_tr = X_train[:n], y_train[:n]
if "sample_weight" in kwargs:
weight = kwargs["sample_weight"]
if weight is not None:
kwargs["sample_weight"] = weight[:n]
else:
weight = None
from catboost import Pool, __version__
model = self.estimator_class(train_dir=train_dir, **self.params)
if __version__ >= "0.26":
model.fit(
X_tr,
y_tr,
cat_features=cat_features,
eval_set=Pool(
data=X_train[n:], label=y_train[n:], cat_features=cat_features
),
callbacks=CatBoostEstimator._callbacks(start_time, deadline),
**kwargs,
)
else:
model.fit(
X_tr,
y_tr,
cat_features=cat_features,
eval_set=Pool(
data=X_train[n:], label=y_train[n:], cat_features=cat_features
),
**kwargs,
)
shutil.rmtree(train_dir, ignore_errors=True)
if weight is not None:
kwargs["sample_weight"] = weight
self._model = model
self.params[self.ITER_HP] = self._model.tree_count_
train_time = time.time() - start_time
return train_time
@classmethod
def _callbacks(cls, start_time, deadline):
class ResourceLimit:
def after_iteration(self, info) -> bool:
now = time.time()
if info.iteration == 1:
self._time_per_iter = now - start_time
if now + self._time_per_iter > deadline:
return False
if psutil is not None:
mem = psutil.virtual_memory()
if mem.available / mem.total < FREE_MEM_RATIO:
return False
return True # can continue
return [ResourceLimit()]
class KNeighborsEstimator(BaseEstimator):
@classmethod
def search_space(cls, data_size, **params):
upper = min(512, int(data_size[0] / 2))
return {
"n_neighbors": {
"domain": tune.lograndint(lower=1, upper=max(2, upper)),
"init_value": 5,
"low_cost_init_value": 1,
},
}
@classmethod
def cost_relative2lgbm(cls):
return 30
def config2params(self, config: dict) -> dict:
params = super().config2params(config)
params["weights"] = params.get("weights", "distance")
return params
def __init__(self, task="binary", **config):
super().__init__(task, **config)
if task in CLASSIFICATION:
from sklearn.neighbors import KNeighborsClassifier
self.estimator_class = KNeighborsClassifier
else:
from sklearn.neighbors import KNeighborsRegressor
self.estimator_class = KNeighborsRegressor
def _preprocess(self, X):
if isinstance(X, DataFrame):
cat_columns = X.select_dtypes(["category"]).columns
if X.shape[1] == len(cat_columns):
raise ValueError("kneighbor requires at least one numeric feature")
X = X.drop(cat_columns, axis=1)
elif isinstance(X, np.ndarray) and X.dtype.kind not in "buif":
# drop categocial columns if any
X = DataFrame(X)
cat_columns = []
for col in X.columns:
if isinstance(X[col][0], str):
cat_columns.append(col)
X = X.drop(cat_columns, axis=1)
X = X.to_numpy()
return X
class Prophet(SKLearnEstimator):
@classmethod
def search_space(cls, **params):
space = {
"changepoint_prior_scale": {
"domain": tune.loguniform(lower=0.001, upper=0.05),
"init_value": 0.05,
"low_cost_init_value": 0.001,
},
"seasonality_prior_scale": {
"domain": tune.loguniform(lower=0.01, upper=10),
"init_value": 10,
},
"holidays_prior_scale": {
"domain": tune.loguniform(lower=0.01, upper=10),
"init_value": 10,
},
"seasonality_mode": {
"domain": tune.choice(["additive", "multiplicative"]),
"init_value": "multiplicative",
},
}
return space
def __init__(self, task="ts_forecast", n_jobs=1, **params):
super().__init__(task, **params)
def _join(self, X_train, y_train):
assert TS_TIMESTAMP_COL in X_train, (
"Dataframe for training ts_forecast model must have column"
f' "{TS_TIMESTAMP_COL}" with the dates in X_train.'
)
y_train = DataFrame(y_train, columns=[TS_VALUE_COL])
train_df = X_train.join(y_train)
return train_df
def fit(self, X_train, y_train, budget=None, **kwargs):
from prophet import Prophet
current_time = time.time()
train_df = self._join(X_train, y_train)
train_df = self._preprocess(train_df)
cols = list(train_df)
cols.remove(TS_TIMESTAMP_COL)
cols.remove(TS_VALUE_COL)
logging.getLogger("prophet").setLevel(logging.WARNING)
model = Prophet(**self.params)
for regressor in cols:
model.add_regressor(regressor)
with suppress_stdout_stderr():
model.fit(train_df)
train_time = time.time() - current_time
self._model = model
return train_time
def predict(self, X, **kwargs):
if isinstance(X, int):
raise ValueError(
"predict() with steps is only supported for arima/sarimax."
" For Prophet, pass a dataframe with the first column containing"
" the timestamp values."
)
if self._model is not None:
X = self._preprocess(X)
forecast = self._model.predict(X)
return forecast["yhat"]
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return np.ones(X.shape[0])
def score(self, X_val: DataFrame, y_val: Series, **kwargs):
from sklearn.metrics import r2_score
from .ml import metric_loss_score
y_pred = self.predict(X_val)
self._metric = kwargs.get("metric", None)
if self._metric:
return metric_loss_score(self._metric, y_pred, y_val)
else:
return r2_score(y_pred, y_val)
class ARIMA(Prophet):
@classmethod
def search_space(cls, **params):
space = {
"p": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"d": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"q": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
}
return space
def _join(self, X_train, y_train):
train_df = super()._join(X_train, y_train)
train_df.index = to_datetime(train_df[TS_TIMESTAMP_COL])
train_df = train_df.drop(TS_TIMESTAMP_COL, axis=1)
return train_df
def fit(self, X_train, y_train, budget=None, **kwargs):
import warnings
warnings.filterwarnings("ignore")
from statsmodels.tsa.arima.model import ARIMA as ARIMA_estimator
current_time = time.time()
train_df = self._join(X_train, y_train)
train_df = self._preprocess(train_df)
regressors = list(train_df)
regressors.remove(TS_VALUE_COL)
if regressors:
model = ARIMA_estimator(
train_df[[TS_VALUE_COL]],
exog=train_df[regressors],
order=(self.params["p"], self.params["d"], self.params["q"]),
enforce_stationarity=False,
enforce_invertibility=False,
)
else:
model = ARIMA_estimator(
train_df,
order=(self.params["p"], self.params["d"], self.params["q"]),
enforce_stationarity=False,
enforce_invertibility=False,
)
with suppress_stdout_stderr():
model = model.fit()
train_time = time.time() - current_time
self._model = model
return train_time
def predict(self, X, **kwargs):
if self._model is not None:
if isinstance(X, int):
forecast = self._model.forecast(steps=X)
elif isinstance(X, DataFrame):
start = X[TS_TIMESTAMP_COL].iloc[0]
end = X[TS_TIMESTAMP_COL].iloc[-1]
if len(X.columns) > 1:
X = self._preprocess(X.drop(columns=TS_TIMESTAMP_COL))
regressors = list(X)
forecast = self._model.predict(
start=start, end=end, exog=X[regressors]
)
else:
forecast = self._model.predict(start=start, end=end)
else:
raise ValueError(
"X needs to be either a pandas Dataframe with dates as the first column"
" or an int number of periods for predict()."
)
return forecast
else:
return np.ones(X if isinstance(X, int) else X.shape[0])
class SARIMAX(ARIMA):
@classmethod
def search_space(cls, **params):
space = {
"p": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"d": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 2,
"low_cost_init_value": 0,
},
"q": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"P": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"D": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"Q": {
"domain": tune.qrandint(lower=0, upper=10, q=1),
"init_value": 1,
"low_cost_init_value": 0,
},
"s": {
"domain": tune.choice([1, 4, 6, 12]),
"init_value": 12,
},
}
return space
def fit(self, X_train, y_train, budget=None, **kwargs):
import warnings
warnings.filterwarnings("ignore")
from statsmodels.tsa.statespace.sarimax import SARIMAX as SARIMAX_estimator
current_time = time.time()
train_df = self._join(X_train, y_train)
train_df = self._preprocess(train_df)
regressors = list(train_df)
regressors.remove(TS_VALUE_COL)
if regressors:
model = SARIMAX_estimator(
train_df[[TS_VALUE_COL]],
exog=train_df[regressors],
order=(self.params["p"], self.params["d"], self.params["q"]),
seasonality_order=(
self.params["P"],
self.params["D"],
self.params["Q"],
self.params["s"],
),
enforce_stationarity=False,
enforce_invertibility=False,
)
else:
model = SARIMAX_estimator(
train_df,
order=(self.params["p"], self.params["d"], self.params["q"]),
seasonality_order=(
self.params["P"],
self.params["D"],
self.params["Q"],
self.params["s"],
),
enforce_stationarity=False,
enforce_invertibility=False,
)
with suppress_stdout_stderr():
model = model.fit()
train_time = time.time() - current_time
self._model = model
return train_time
class TS_SKLearn(SKLearnEstimator):
base_class = SKLearnEstimator
@classmethod
def search_space(cls, data_size, pred_horizon, **params):
space = cls.base_class.search_space(data_size, **params)
space.update(
{
"optimize_for_horizon": {
"domain": tune.choice([True, False]),
"init_value": False,
"low_cost_init_value": False,
},
"lags": {
"domain": tune.randint(
lower=1, upper=max(2, int(np.sqrt(data_size[0])))
),
"init_value": 3,
},
}
)
return space
def __init__(self, task="ts_forecast", **params):
super().__init__(task, **params)
self.hcrystaball_model = None
self.ts_task = (
"regression" if task in TS_FORECASTREGRESSION else "classification"
)
def transform_X(self, X):
cols = list(X)
if len(cols) == 1:
ds_col = cols[0]
X = DataFrame(index=X[ds_col])
elif len(cols) > 1:
ds_col = cols[0]
exog_cols = cols[1:]
X = X[exog_cols].set_index(X[ds_col])
return X
def _fit(self, X_train, y_train, budget=None, **kwargs):
from hcrystalball.wrappers import get_sklearn_wrapper
X_train = self.transform_X(X_train)
X_train = self._preprocess(X_train)
params = self.params.copy()
lags = params.pop("lags")
optimize_for_horizon = params.pop("optimize_for_horizon")
estimator = self.base_class(task=self.ts_task, **params)
self.hcrystaball_model = get_sklearn_wrapper(estimator.estimator_class)
self.hcrystaball_model.lags = int(lags)
self.hcrystaball_model.fit(X_train, y_train)
if optimize_for_horizon:
# Direct Multi-step Forecast Strategy - fit a seperate model for each horizon
model_list = []
for i in range(1, kwargs["period"] + 1):
(
X_fit,
y_fit,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(
X_train, y_train, i
)
self.hcrystaball_model.model.set_params(**estimator.params)
model = self.hcrystaball_model.model.fit(X_fit, y_fit)
model_list.append(model)
self._model = model_list
else:
(
X_fit,
y_fit,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(
X_train, y_train, kwargs["period"]
)
self.hcrystaball_model.model.set_params(**estimator.params)
model = self.hcrystaball_model.model.fit(X_fit, y_fit)
self._model = model
def fit(self, X_train, y_train, budget=None, **kwargs):
current_time = time.time()
self._fit(X_train, y_train, budget=budget, **kwargs)
train_time = time.time() - current_time
return train_time
def predict(self, X, **kwargs):
if self._model is not None:
X = self.transform_X(X)
X = self._preprocess(X)
if isinstance(self._model, list):
assert len(self._model) == len(
X
), "Model is optimized for horizon, length of X must be equal to `period`."
preds = []
for i in range(1, len(self._model) + 1):
(
X_pred,
_,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(
X.iloc[:i, :]
)
preds.append(self._model[i - 1].predict(X_pred)[-1])
forecast = DataFrame(
data=np.asarray(preds).reshape(-1, 1),
columns=[self.hcrystaball_model.name],
index=X.index,
)
else:
(
X_pred,
_,
) = self.hcrystaball_model._transform_data_to_tsmodel_input_format(X)
forecast = self._model.predict(X_pred)
return forecast
else:
logger.warning(
"Estimator is not fit yet. Please run fit() before predict()."
)
return np.ones(X.shape[0])
class LGBM_TS(TS_SKLearn):
base_class = LGBMEstimator
class XGBoost_TS(TS_SKLearn):
base_class = XGBoostSklearnEstimator
# catboost regressor is invalid because it has a `name` parameter, making it incompatible with hcrystalball
# class CatBoost_TS_Regressor(TS_Regressor):
# base_class = CatBoostEstimator
class RF_TS(TS_SKLearn):
base_class = RandomForestEstimator
class ExtraTrees_TS(TS_SKLearn):
base_class = ExtraTreesEstimator
class XGBoostLimitDepth_TS(TS_SKLearn):
base_class = XGBoostLimitDepthEstimator
class suppress_stdout_stderr(object):
def __init__(self):
# Open a pair of null files
self.null_fds = [os.open(os.devnull, os.O_RDWR) for x in range(2)]
# Save the actual stdout (1) and stderr (2) file descriptors.
self.save_fds = (os.dup(1), os.dup(2))
def __enter__(self):
# Assign the null pointers to stdout and stderr.
os.dup2(self.null_fds[0], 1)
os.dup2(self.null_fds[1], 2)
def __exit__(self, *_):
# Re-assign the real stdout/stderr back to (1) and (2)
os.dup2(self.save_fds[0], 1)
os.dup2(self.save_fds[1], 2)
# Close the null files
os.close(self.null_fds[0])
os.close(self.null_fds[1])
| true | true |
f7318945e70a68298c48631812fa81322c407bc9 | 12,916 | py | Python | env/lib/python3.6/site-packages/nibabel/tests/test_image_load_save.py | Raniac/NEURO-LEARN | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | [
"Apache-2.0"
] | 8 | 2019-05-29T09:38:30.000Z | 2021-01-20T03:36:59.000Z | env/lib/python3.6/site-packages/nibabel/tests/test_image_load_save.py | Raniac/neurolearn_dev | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | [
"Apache-2.0"
] | 12 | 2021-03-09T03:01:16.000Z | 2022-03-11T23:59:36.000Z | env/lib/python3.6/site-packages/nibabel/tests/test_image_load_save.py | Raniac/NEURO-LEARN | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | [
"Apache-2.0"
] | 1 | 2020-07-17T12:49:49.000Z | 2020-07-17T12:49:49.000Z | # emacs: -*- mode: python-mode; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
#
# See COPYING file distributed along with the NiBabel package for the
# copyright and license terms.
#
### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ##
''' Tests for loader function '''
from __future__ import division, print_function, absolute_import
from io import BytesIO
import shutil
from os.path import dirname, join as pjoin
from tempfile import mkdtemp
import numpy as np
from .. import analyze as ana
from .. import spm99analyze as spm99
from .. import spm2analyze as spm2
from .. import nifti1 as ni1
from .. import loadsave as nils
from .. import (Nifti1Image, Nifti1Header, Nifti1Pair, Nifti2Image, Nifti2Pair,
Minc1Image, Minc2Image, Spm2AnalyzeImage, Spm99AnalyzeImage,
AnalyzeImage, MGHImage, all_image_classes)
from ..tmpdirs import InTemporaryDirectory
from ..volumeutils import native_code, swapped_code
from ..optpkg import optional_package
from ..spatialimages import SpatialImage
from numpy.testing import assert_array_equal, assert_array_almost_equal
from nose.tools import assert_true, assert_equal, assert_raises
_, have_scipy, _ = optional_package('scipy') # No scipy=>no SPM-format writing
DATA_PATH = pjoin(dirname(__file__), 'data')
MGH_DATA_PATH = pjoin(dirname(__file__), '..', 'freesurfer', 'tests', 'data')
def round_trip(img):
# round trip a nifti single
sio = BytesIO()
img.file_map['image'].fileobj = sio
img.to_file_map()
img2 = Nifti1Image.from_file_map(img.file_map)
return img2
def test_conversion_spatialimages():
shape = (2, 4, 6)
affine = np.diag([1, 2, 3, 1])
klasses = [klass for klass in all_image_classes
if klass.rw and issubclass(klass, SpatialImage)]
for npt in np.float32, np.int16:
data = np.arange(np.prod(shape), dtype=npt).reshape(shape)
for r_class in klasses:
if not r_class.makeable:
continue
img = r_class(data, affine)
img.set_data_dtype(npt)
for w_class in klasses:
if not w_class.makeable:
continue
img2 = w_class.from_image(img)
assert_array_equal(img2.get_data(), data)
assert_array_equal(img2.affine, affine)
def test_save_load_endian():
shape = (2, 4, 6)
affine = np.diag([1, 2, 3, 1])
data = np.arange(np.prod(shape), dtype='f4').reshape(shape)
# Native endian image
img = Nifti1Image(data, affine)
assert_equal(img.header.endianness, native_code)
img2 = round_trip(img)
assert_equal(img2.header.endianness, native_code)
assert_array_equal(img2.get_data(), data)
# byte swapped endian image
bs_hdr = img.header.as_byteswapped()
bs_img = Nifti1Image(data, affine, bs_hdr)
assert_equal(bs_img.header.endianness, swapped_code)
# of course the data is the same because it's not written to disk
assert_array_equal(bs_img.get_data(), data)
# Check converting to another image
cbs_img = AnalyzeImage.from_image(bs_img)
# this will make the header native by doing the header conversion
cbs_hdr = cbs_img.header
assert_equal(cbs_hdr.endianness, native_code)
# and the byte order follows it back into another image
cbs_img2 = Nifti1Image.from_image(cbs_img)
cbs_hdr2 = cbs_img2.header
assert_equal(cbs_hdr2.endianness, native_code)
# Try byteswapped round trip
bs_img2 = round_trip(bs_img)
bs_data2 = bs_img2.get_data()
# now the data dtype was swapped endian, so the read data is too
assert_equal(bs_data2.dtype.byteorder, swapped_code)
assert_equal(bs_img2.header.endianness, swapped_code)
assert_array_equal(bs_data2, data)
# Now mix up byteswapped data and non-byteswapped header
mixed_img = Nifti1Image(bs_data2, affine)
assert_equal(mixed_img.header.endianness, native_code)
m_img2 = round_trip(mixed_img)
assert_equal(m_img2.header.endianness, native_code)
assert_array_equal(m_img2.get_data(), data)
def test_save_load():
shape = (2, 4, 6)
npt = np.float32
data = np.arange(np.prod(shape), dtype=npt).reshape(shape)
affine = np.diag([1, 2, 3, 1])
affine[:3, 3] = [3, 2, 1]
img = ni1.Nifti1Image(data, affine)
img.set_data_dtype(npt)
with InTemporaryDirectory() as pth:
nifn = 'an_image.nii'
sifn = 'another_image.img'
ni1.save(img, nifn)
re_img = nils.load(nifn)
assert_true(isinstance(re_img, ni1.Nifti1Image))
assert_array_equal(re_img.get_data(), data)
assert_array_equal(re_img.affine, affine)
# These and subsequent del statements are to prevent confusing
# windows errors when trying to open files or delete the
# temporary directory.
del re_img
if have_scipy: # skip we we cannot read .mat files
spm2.save(img, sifn)
re_img2 = nils.load(sifn)
assert_true(isinstance(re_img2, spm2.Spm2AnalyzeImage))
assert_array_equal(re_img2.get_data(), data)
assert_array_equal(re_img2.affine, affine)
del re_img2
spm99.save(img, sifn)
re_img3 = nils.load(sifn)
assert_true(isinstance(re_img3,
spm99.Spm99AnalyzeImage))
assert_array_equal(re_img3.get_data(), data)
assert_array_equal(re_img3.affine, affine)
ni1.save(re_img3, nifn)
del re_img3
re_img = nils.load(nifn)
assert_true(isinstance(re_img, ni1.Nifti1Image))
assert_array_equal(re_img.get_data(), data)
assert_array_equal(re_img.affine, affine)
del re_img
def test_two_to_one():
# test going from two to one file in save
shape = (2, 4, 6)
npt = np.float32
data = np.arange(np.prod(shape), dtype=npt).reshape(shape)
affine = np.diag([1, 2, 3, 1])
affine[:3, 3] = [3, 2, 1]
# single file format
img = ni1.Nifti1Image(data, affine)
assert_equal(img.header['magic'], b'n+1')
str_io = BytesIO()
img.file_map['image'].fileobj = str_io
# check that the single format vox offset stays at zero
img.to_file_map()
assert_equal(img.header['magic'], b'n+1')
assert_equal(img.header['vox_offset'], 0)
# make a new pair image, with the single image header
pimg = ni1.Nifti1Pair(data, affine, img.header)
isio = BytesIO()
hsio = BytesIO()
pimg.file_map['image'].fileobj = isio
pimg.file_map['header'].fileobj = hsio
pimg.to_file_map()
# the offset stays at zero (but is 352 on disk)
assert_equal(pimg.header['magic'], b'ni1')
assert_equal(pimg.header['vox_offset'], 0)
assert_array_equal(pimg.get_data(), data)
# same for from_image, going from single image to pair format
ana_img = ana.AnalyzeImage.from_image(img)
assert_equal(ana_img.header['vox_offset'], 0)
# back to the single image, save it again to a stringio
str_io = BytesIO()
img.file_map['image'].fileobj = str_io
img.to_file_map()
assert_equal(img.header['vox_offset'], 0)
aimg = ana.AnalyzeImage.from_image(img)
assert_equal(aimg.header['vox_offset'], 0)
aimg = spm99.Spm99AnalyzeImage.from_image(img)
assert_equal(aimg.header['vox_offset'], 0)
aimg = spm2.Spm2AnalyzeImage.from_image(img)
assert_equal(aimg.header['vox_offset'], 0)
nfimg = ni1.Nifti1Pair.from_image(img)
assert_equal(nfimg.header['vox_offset'], 0)
# now set the vox offset directly
hdr = nfimg.header
hdr['vox_offset'] = 16
assert_equal(nfimg.header['vox_offset'], 16)
# check it gets properly set by the nifti single image
nfimg = ni1.Nifti1Image.from_image(img)
assert_equal(nfimg.header['vox_offset'], 0)
def test_negative_load_save():
shape = (1, 2, 5)
data = np.arange(10).reshape(shape) - 10.0
affine = np.eye(4)
hdr = ni1.Nifti1Header()
hdr.set_data_dtype(np.int16)
img = Nifti1Image(data, affine, hdr)
str_io = BytesIO()
img.file_map['image'].fileobj = str_io
img.to_file_map()
str_io.seek(0)
re_img = Nifti1Image.from_file_map(img.file_map)
assert_array_almost_equal(re_img.get_data(), data, 4)
def test_filename_save():
# This is to test the logic in the load and save routines, relating
# extensions to filetypes
# Tuples of class, ext, loadedclass
inklass_ext_loadklasses = (
(Nifti1Image, '.nii', Nifti1Image),
(Nifti2Image, '.nii', Nifti2Image),
(Nifti1Pair, '.nii', Nifti1Image),
(Nifti2Pair, '.nii', Nifti2Image),
(Nifti1Image, '.img', Nifti1Pair),
(Nifti2Image, '.img', Nifti2Pair),
(Nifti1Pair, '.img', Nifti1Pair),
(Nifti2Pair, '.img', Nifti2Pair),
(Nifti1Image, '.hdr', Nifti1Pair),
(Nifti2Image, '.hdr', Nifti2Pair),
(Nifti1Pair, '.hdr', Nifti1Pair),
(Nifti2Pair, '.hdr', Nifti2Pair),
(Minc1Image, '.nii', Nifti1Image),
(Minc1Image, '.img', Nifti1Pair),
(Spm2AnalyzeImage, '.nii', Nifti1Image),
(Spm2AnalyzeImage, '.img', Spm2AnalyzeImage),
(Spm99AnalyzeImage, '.nii', Nifti1Image),
(Spm99AnalyzeImage, '.img', Spm2AnalyzeImage),
(AnalyzeImage, '.nii', Nifti1Image),
(AnalyzeImage, '.img', Spm2AnalyzeImage),
)
shape = (2, 4, 6)
affine = np.diag([1, 2, 3, 1])
data = np.arange(np.prod(shape), dtype='f4').reshape(shape)
for inklass, out_ext, loadklass in inklass_ext_loadklasses:
if not have_scipy:
# We can't load a SPM analyze type without scipy. These types have
# a 'mat' file (the type we can't load)
if ('mat', '.mat') in loadklass.files_types:
continue
img = inklass(data, affine)
try:
pth = mkdtemp()
fname = pjoin(pth, 'image' + out_ext)
nils.save(img, fname)
rt_img = nils.load(fname)
assert_array_almost_equal(rt_img.get_data(), data)
assert_true(type(rt_img) is loadklass)
# delete image to allow file close. Otherwise windows
# raises an error when trying to delete the directory
del rt_img
finally:
shutil.rmtree(pth)
def test_analyze_detection():
# Test detection of Analyze, Nifti1 and Nifti2
# Algorithm is as described in loadsave:which_analyze_type
def wat(hdr):
return nils.which_analyze_type(hdr.binaryblock)
n1_hdr = Nifti1Header(b'\0' * 348, check=False)
assert_equal(wat(n1_hdr), None)
n1_hdr['sizeof_hdr'] = 540
assert_equal(wat(n1_hdr), 'nifti2')
assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti2')
n1_hdr['sizeof_hdr'] = 348
assert_equal(wat(n1_hdr), 'analyze')
assert_equal(wat(n1_hdr.as_byteswapped()), 'analyze')
n1_hdr['magic'] = b'n+1'
assert_equal(wat(n1_hdr), 'nifti1')
assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1')
n1_hdr['magic'] = b'ni1'
assert_equal(wat(n1_hdr), 'nifti1')
assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1')
# Doesn't matter what magic is if it's not a nifti1 magic
n1_hdr['magic'] = b'ni2'
assert_equal(wat(n1_hdr), 'analyze')
n1_hdr['sizeof_hdr'] = 0
n1_hdr['magic'] = b''
assert_equal(wat(n1_hdr), None)
n1_hdr['magic'] = 'n+1'
assert_equal(wat(n1_hdr), 'nifti1')
n1_hdr['magic'] = 'ni1'
assert_equal(wat(n1_hdr), 'nifti1')
def test_guessed_image_type():
# Test whether we can guess the image type from example files
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'example4d.nii.gz')),
Nifti1Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'nifti1.hdr')),
Nifti1Pair)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'example_nifti2.nii.gz')),
Nifti2Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'nifti2.hdr')),
Nifti2Pair)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'tiny.mnc')),
Minc1Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'small.mnc')),
Minc2Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'test.mgz')),
MGHImage)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'analyze.hdr')),
Spm2AnalyzeImage)
def test_fail_save():
with InTemporaryDirectory():
dataobj = np.ones((10, 10, 10), dtype=np.float16)
affine = np.eye(4, dtype=np.float32)
img = SpatialImage(dataobj, affine)
# Fails because float16 is not supported.
with assert_raises(AttributeError):
nils.save(img, 'foo.nii.gz')
del img
| 38.440476 | 79 | 0.650743 |
)
assert_true(isinstance(re_img2, spm2.Spm2AnalyzeImage))
assert_array_equal(re_img2.get_data(), data)
assert_array_equal(re_img2.affine, affine)
del re_img2
spm99.save(img, sifn)
re_img3 = nils.load(sifn)
assert_true(isinstance(re_img3,
spm99.Spm99AnalyzeImage))
assert_array_equal(re_img3.get_data(), data)
assert_array_equal(re_img3.affine, affine)
ni1.save(re_img3, nifn)
del re_img3
re_img = nils.load(nifn)
assert_true(isinstance(re_img, ni1.Nifti1Image))
assert_array_equal(re_img.get_data(), data)
assert_array_equal(re_img.affine, affine)
del re_img
def test_two_to_one():
# test going from two to one file in save
shape = (2, 4, 6)
npt = np.float32
data = np.arange(np.prod(shape), dtype=npt).reshape(shape)
affine = np.diag([1, 2, 3, 1])
affine[:3, 3] = [3, 2, 1]
# single file format
img = ni1.Nifti1Image(data, affine)
assert_equal(img.header['magic'], b'n+1')
str_io = BytesIO()
img.file_map['image'].fileobj = str_io
# check that the single format vox offset stays at zero
img.to_file_map()
assert_equal(img.header['magic'], b'n+1')
assert_equal(img.header['vox_offset'], 0)
# make a new pair image, with the single image header
pimg = ni1.Nifti1Pair(data, affine, img.header)
isio = BytesIO()
hsio = BytesIO()
pimg.file_map['image'].fileobj = isio
pimg.file_map['header'].fileobj = hsio
pimg.to_file_map()
# the offset stays at zero (but is 352 on disk)
assert_equal(pimg.header['magic'], b'ni1')
assert_equal(pimg.header['vox_offset'], 0)
assert_array_equal(pimg.get_data(), data)
# same for from_image, going from single image to pair format
ana_img = ana.AnalyzeImage.from_image(img)
assert_equal(ana_img.header['vox_offset'], 0)
# back to the single image, save it again to a stringio
str_io = BytesIO()
img.file_map['image'].fileobj = str_io
img.to_file_map()
assert_equal(img.header['vox_offset'], 0)
aimg = ana.AnalyzeImage.from_image(img)
assert_equal(aimg.header['vox_offset'], 0)
aimg = spm99.Spm99AnalyzeImage.from_image(img)
assert_equal(aimg.header['vox_offset'], 0)
aimg = spm2.Spm2AnalyzeImage.from_image(img)
assert_equal(aimg.header['vox_offset'], 0)
nfimg = ni1.Nifti1Pair.from_image(img)
assert_equal(nfimg.header['vox_offset'], 0)
# now set the vox offset directly
hdr = nfimg.header
hdr['vox_offset'] = 16
assert_equal(nfimg.header['vox_offset'], 16)
# check it gets properly set by the nifti single image
nfimg = ni1.Nifti1Image.from_image(img)
assert_equal(nfimg.header['vox_offset'], 0)
def test_negative_load_save():
shape = (1, 2, 5)
data = np.arange(10).reshape(shape) - 10.0
affine = np.eye(4)
hdr = ni1.Nifti1Header()
hdr.set_data_dtype(np.int16)
img = Nifti1Image(data, affine, hdr)
str_io = BytesIO()
img.file_map['image'].fileobj = str_io
img.to_file_map()
str_io.seek(0)
re_img = Nifti1Image.from_file_map(img.file_map)
assert_array_almost_equal(re_img.get_data(), data, 4)
def test_filename_save():
# This is to test the logic in the load and save routines, relating
# extensions to filetypes
# Tuples of class, ext, loadedclass
inklass_ext_loadklasses = (
(Nifti1Image, '.nii', Nifti1Image),
(Nifti2Image, '.nii', Nifti2Image),
(Nifti1Pair, '.nii', Nifti1Image),
(Nifti2Pair, '.nii', Nifti2Image),
(Nifti1Image, '.img', Nifti1Pair),
(Nifti2Image, '.img', Nifti2Pair),
(Nifti1Pair, '.img', Nifti1Pair),
(Nifti2Pair, '.img', Nifti2Pair),
(Nifti1Image, '.hdr', Nifti1Pair),
(Nifti2Image, '.hdr', Nifti2Pair),
(Nifti1Pair, '.hdr', Nifti1Pair),
(Nifti2Pair, '.hdr', Nifti2Pair),
(Minc1Image, '.nii', Nifti1Image),
(Minc1Image, '.img', Nifti1Pair),
(Spm2AnalyzeImage, '.nii', Nifti1Image),
(Spm2AnalyzeImage, '.img', Spm2AnalyzeImage),
(Spm99AnalyzeImage, '.nii', Nifti1Image),
(Spm99AnalyzeImage, '.img', Spm2AnalyzeImage),
(AnalyzeImage, '.nii', Nifti1Image),
(AnalyzeImage, '.img', Spm2AnalyzeImage),
)
shape = (2, 4, 6)
affine = np.diag([1, 2, 3, 1])
data = np.arange(np.prod(shape), dtype='f4').reshape(shape)
for inklass, out_ext, loadklass in inklass_ext_loadklasses:
if not have_scipy:
# We can't load a SPM analyze type without scipy. These types have
if ('mat', '.mat') in loadklass.files_types:
continue
img = inklass(data, affine)
try:
pth = mkdtemp()
fname = pjoin(pth, 'image' + out_ext)
nils.save(img, fname)
rt_img = nils.load(fname)
assert_array_almost_equal(rt_img.get_data(), data)
assert_true(type(rt_img) is loadklass)
# delete image to allow file close. Otherwise windows
# raises an error when trying to delete the directory
del rt_img
finally:
shutil.rmtree(pth)
def test_analyze_detection():
# Test detection of Analyze, Nifti1 and Nifti2
# Algorithm is as described in loadsave:which_analyze_type
def wat(hdr):
return nils.which_analyze_type(hdr.binaryblock)
n1_hdr = Nifti1Header(b'\0' * 348, check=False)
assert_equal(wat(n1_hdr), None)
n1_hdr['sizeof_hdr'] = 540
assert_equal(wat(n1_hdr), 'nifti2')
assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti2')
n1_hdr['sizeof_hdr'] = 348
assert_equal(wat(n1_hdr), 'analyze')
assert_equal(wat(n1_hdr.as_byteswapped()), 'analyze')
n1_hdr['magic'] = b'n+1'
assert_equal(wat(n1_hdr), 'nifti1')
assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1')
n1_hdr['magic'] = b'ni1'
assert_equal(wat(n1_hdr), 'nifti1')
assert_equal(wat(n1_hdr.as_byteswapped()), 'nifti1')
# Doesn't matter what magic is if it's not a nifti1 magic
n1_hdr['magic'] = b'ni2'
assert_equal(wat(n1_hdr), 'analyze')
n1_hdr['sizeof_hdr'] = 0
n1_hdr['magic'] = b''
assert_equal(wat(n1_hdr), None)
n1_hdr['magic'] = 'n+1'
assert_equal(wat(n1_hdr), 'nifti1')
n1_hdr['magic'] = 'ni1'
assert_equal(wat(n1_hdr), 'nifti1')
def test_guessed_image_type():
# Test whether we can guess the image type from example files
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'example4d.nii.gz')),
Nifti1Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'nifti1.hdr')),
Nifti1Pair)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'example_nifti2.nii.gz')),
Nifti2Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'nifti2.hdr')),
Nifti2Pair)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'tiny.mnc')),
Minc1Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'small.mnc')),
Minc2Image)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'test.mgz')),
MGHImage)
assert_equal(nils.guessed_image_type(
pjoin(DATA_PATH, 'analyze.hdr')),
Spm2AnalyzeImage)
def test_fail_save():
with InTemporaryDirectory():
dataobj = np.ones((10, 10, 10), dtype=np.float16)
affine = np.eye(4, dtype=np.float32)
img = SpatialImage(dataobj, affine)
# Fails because float16 is not supported.
with assert_raises(AttributeError):
nils.save(img, 'foo.nii.gz')
del img
| true | true |
f7318a7c9b9961d14143aebdcada5b709c5812c1 | 2,945 | py | Python | tests/integration/qbs/test_qbs_submission_data.py | nealedj/eq-survey-runner | b8e6cddae6068f6c8fd60e21d31d58aaa79bbb34 | [
"MIT"
] | null | null | null | tests/integration/qbs/test_qbs_submission_data.py | nealedj/eq-survey-runner | b8e6cddae6068f6c8fd60e21d31d58aaa79bbb34 | [
"MIT"
] | 1 | 2018-11-05T12:00:51.000Z | 2018-11-05T12:00:51.000Z | tests/integration/qbs/test_qbs_submission_data.py | nealedj/eq-survey-runner | b8e6cddae6068f6c8fd60e21d31d58aaa79bbb34 | [
"MIT"
] | null | null | null | from tests.integration.integration_test_case import IntegrationTestCase
class TestQbsSubmissionData(IntegrationTestCase):
def test_submission_data_2_0001(self):
self.submission_data('2', '0001')
def submission_data(self, eq_id, form_type_id):
self.launchSurvey(eq_id, form_type_id, roles=['dumper'])
# We are on the introduction page
self.assertInPage('>Start survey<')
self.assertInPage('Quarterly Business Survey')
# We proceed to the questionnaire
self.post(action='start_questionnaire')
# We are in the Questionnaire
self.assertInPage('>Quarterly Business Survey</')
self.assertInPage('what was the number of employees for Integration Tests?')
self.assertInPage('>Save and continue<')
# When I submit answers
self.post(post_data={'number-of-employees-total': '10'})
self.post(post_data={'number-of-employees-male-more-30-hours': '1',
'number-of-employees-male-less-30-hours': '2',
'number-of-employees-female-more-30-hours': '3',
'number-of-employees-female-less-30-hours': '4'})
# There are no validation errors (we're on the summary screen)
self.assertInUrl('summary')
self.assertInPage('>Quarterly Business Survey</')
self.assertInPage('>Check your answers and submit<')
self.assertInPage('You can check your answers below')
self.assertInPage('>Submit answers<')
# And the JSON response contains the data I submitted
actual = self.dumpSubmission()
expected = {
'submission': {
'origin': 'uk.gov.ons.edc.eq',
'started_at': actual['submission']['started_at'],
'submitted_at': actual['submission']['submitted_at'],
'case_id': actual['submission']['case_id'],
'collection': {
'exercise_sid': '789',
'period': '201604',
'instrument_id': '0001'
},
'survey_id': '139',
'flushed': False,
'tx_id': actual['submission']['tx_id'],
'data': {
'50': '10',
'51': '1',
'52': '2',
'53': '3',
'54': '4'
},
'type': 'uk.gov.ons.edc.eq:surveyresponse',
'version': '0.0.1',
'metadata': {
'ref_period_end_date': '2016-04-30',
'ref_period_start_date': '2016-04-01',
'ru_ref': '123456789012A',
'user_id': 'integration-test'
}
}
}
# Enable full dictionary diffs on test failure
self.maxDiff = None
self.assertDictEqual(actual, expected)
| 38.75 | 84 | 0.529372 | from tests.integration.integration_test_case import IntegrationTestCase
class TestQbsSubmissionData(IntegrationTestCase):
def test_submission_data_2_0001(self):
self.submission_data('2', '0001')
def submission_data(self, eq_id, form_type_id):
self.launchSurvey(eq_id, form_type_id, roles=['dumper'])
self.assertInPage('>Start survey<')
self.assertInPage('Quarterly Business Survey')
self.post(action='start_questionnaire')
self.assertInPage('>Quarterly Business Survey</')
self.assertInPage('what was the number of employees for Integration Tests?')
self.assertInPage('>Save and continue<')
self.post(post_data={'number-of-employees-total': '10'})
self.post(post_data={'number-of-employees-male-more-30-hours': '1',
'number-of-employees-male-less-30-hours': '2',
'number-of-employees-female-more-30-hours': '3',
'number-of-employees-female-less-30-hours': '4'})
self.assertInUrl('summary')
self.assertInPage('>Quarterly Business Survey</')
self.assertInPage('>Check your answers and submit<')
self.assertInPage('You can check your answers below')
self.assertInPage('>Submit answers<')
# And the JSON response contains the data I submitted
actual = self.dumpSubmission()
expected = {
'submission': {
'origin': 'uk.gov.ons.edc.eq',
'started_at': actual['submission']['started_at'],
'submitted_at': actual['submission']['submitted_at'],
'case_id': actual['submission']['case_id'],
'collection': {
'exercise_sid': '789',
'period': '201604',
'instrument_id': '0001'
},
'survey_id': '139',
'flushed': False,
'tx_id': actual['submission']['tx_id'],
'data': {
'50': '10',
'51': '1',
'52': '2',
'53': '3',
'54': '4'
},
'type': 'uk.gov.ons.edc.eq:surveyresponse',
'version': '0.0.1',
'metadata': {
'ref_period_end_date': '2016-04-30',
'ref_period_start_date': '2016-04-01',
'ru_ref': '123456789012A',
'user_id': 'integration-test'
}
}
}
# Enable full dictionary diffs on test failure
self.maxDiff = None
self.assertDictEqual(actual, expected)
| true | true |
f7318bf8ab84bb950ae4d28f761bd4399b07f385 | 46,765 | py | Python | src/transformers/configuration_utils.py | elusenji/transformers | af14c61973effd8b8077ac61b3f24bdd4a632f25 | [
"Apache-2.0"
] | 3 | 2022-01-15T08:06:07.000Z | 2022-03-10T07:13:18.000Z | src/transformers/configuration_utils.py | arron1227/transformers | b18dfd95e1f60ae65a959a7b255fc06522170d1b | [
"Apache-2.0"
] | null | null | null | src/transformers/configuration_utils.py | arron1227/transformers | b18dfd95e1f60ae65a959a7b255fc06522170d1b | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors and The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Configuration base class and utilities."""
import copy
import json
import os
import re
import warnings
from typing import Any, Dict, List, Optional, Tuple, Union
from packaging import version
from requests import HTTPError
from . import __version__
from .dynamic_module_utils import custom_object_save
from .utils import (
CONFIG_NAME,
HUGGINGFACE_CO_RESOLVE_ENDPOINT,
EntryNotFoundError,
PushToHubMixin,
RepositoryNotFoundError,
RevisionNotFoundError,
cached_path,
copy_func,
hf_bucket_url,
is_offline_mode,
is_remote_url,
is_torch_available,
logging,
)
logger = logging.get_logger(__name__)
_re_configuration_file = re.compile(r"config\.(.*)\.json")
class PretrainedConfig(PushToHubMixin):
r"""
Base class for all configuration classes. Handles a few parameters common to all models' configurations as well as
methods for loading/downloading/saving configurations.
<Tip>
A configuration file can be loaded and saved to disk. Loading the configuration file and using this file to
initialize a model does **not** load the model weights. It only affects the model's configuration.
</Tip>
Class attributes (overridden by derived classes):
- **model_type** (`str`) -- An identifier for the model type, serialized into the JSON file, and used to recreate
the correct object in [`~transformers.AutoConfig`].
- **is_composition** (`bool`) -- Whether the config class is composed of multiple sub-configs. In this case the
config has to be initialized from two or more configs of type [`~transformers.PretrainedConfig`] like:
[`~transformers.EncoderDecoderConfig`] or [`~RagConfig`].
- **keys_to_ignore_at_inference** (`List[str]`) -- A list of keys to ignore by default when looking at dictionary
outputs of the model during inference.
- **attribute_map** (`Dict[str, str]`) -- A dict that maps model specific attribute names to the standardized
naming of attributes.
Common attributes (present in all subclasses):
- **vocab_size** (`int`) -- The number of tokens in the vocabulary, which is also the first dimension of the
embeddings matrix (this attribute may be missing for models that don't have a text modality like ViT).
- **hidden_size** (`int`) -- The hidden size of the model.
- **num_attention_heads** (`int`) -- The number of attention heads used in the multi-head attention layers of the
model.
- **num_hidden_layers** (`int`) -- The number of blocks in the model.
Arg:
name_or_path (`str`, *optional*, defaults to `""`):
Store the string that was passed to [`PreTrainedModel.from_pretrained`] or
[`TFPreTrainedModel.from_pretrained`] as `pretrained_model_name_or_path` if the configuration was created
with such a method.
output_hidden_states (`bool`, *optional*, defaults to `False`):
Whether or not the model should return all hidden-states.
output_attentions (`bool`, *optional*, defaults to `False`):
Whether or not the model should returns all attentions.
return_dict (`bool`, *optional*, defaults to `True`):
Whether or not the model should return a [`~transformers.utils.ModelOutput`] instead of a plain tuple.
is_encoder_decoder (`bool`, *optional*, defaults to `False`):
Whether the model is used as an encoder/decoder or not.
is_decoder (`bool`, *optional*, defaults to `False`):
Whether the model is used as decoder or not (in which case it's used as an encoder).
cross_attention_hidden_size** (`bool`, *optional*):
The hidden size of the cross-attention layer in case the model is used as a decoder in an encoder-decoder
setting and the cross-attention hidden dimension differs from `self.config.hidden_size`.
add_cross_attention (`bool`, *optional*, defaults to `False`):
Whether cross-attention layers should be added to the model. Note, this option is only relevant for models
that can be used as decoder models within the [`EncoderDecoderModel`] class, which consists of all models
in `AUTO_MODELS_FOR_CAUSAL_LM`.
tie_encoder_decoder (`bool`, *optional*, defaults to `False`):
Whether all encoder weights should be tied to their equivalent decoder weights. This requires the encoder
and decoder model to have the exact same parameter names.
prune_heads (`Dict[int, List[int]]`, *optional*, defaults to `{}`):
Pruned heads of the model. The keys are the selected layer indices and the associated values, the list of
heads to prune in said layer.
For instance `{1: [0, 2], 2: [2, 3]}` will prune heads 0 and 2 on layer 1 and heads 2 and 3 on layer 2.
chunk_size_feed_forward (`int`, *optional*, defaults to `0`):
The chunk size of all feed forward layers in the residual attention blocks. A chunk size of `0` means that
the feed forward layer is not chunked. A chunk size of n means that the feed forward layer processes `n` <
sequence_length embeddings at a time. For more information on feed forward chunking, see [How does Feed
Forward Chunking work?](../glossary.html#feed-forward-chunking).
> Parameters for sequence generation
max_length (`int`, *optional*, defaults to 20):
Maximum length that will be used by default in the `generate` method of the model.
min_length (`int`, *optional*, defaults to 10):
Minimum length that will be used by default in the `generate` method of the model.
do_sample (`bool`, *optional*, defaults to `False`):
Flag that will be used by default in the `generate` method of the model. Whether or not to use sampling ;
use greedy decoding otherwise.
early_stopping (`bool`, *optional*, defaults to `False`):
Flag that will be used by default in the `generate` method of the model. Whether to stop the beam search
when at least `num_beams` sentences are finished per batch or not.
num_beams (`int`, *optional*, defaults to 1):
Number of beams for beam search that will be used by default in the `generate` method of the model. 1 means
no beam search.
num_beam_groups (`int`, *optional*, defaults to 1):
Number of groups to divide `num_beams` into in order to ensure diversity among different groups of beams
that will be used by default in the `generate` method of the model. 1 means no group beam search.
diversity_penalty (`float`, *optional*, defaults to 0.0):
Value to control diversity for group beam search. that will be used by default in the `generate` method of
the model. 0 means no diversity penalty. The higher the penalty, the more diverse are the outputs.
temperature (`float`, *optional*, defaults to 1):
The value used to module the next token probabilities that will be used by default in the `generate` method
of the model. Must be strictly positive.
top_k (`int`, *optional*, defaults to 50):
Number of highest probability vocabulary tokens to keep for top-k-filtering that will be used by default in
the `generate` method of the model.
top_p (`float`, *optional*, defaults to 1):
Value that will be used by default in the `generate` method of the model for `top_p`. If set to float < 1,
only the most probable tokens with probabilities that add up to `top_p` or higher are kept for generation.
repetition_penalty (`float`, *optional*, defaults to 1):
Parameter for repetition penalty that will be used by default in the `generate` method of the model. 1.0
means no penalty.
length_penalty (`float`, *optional*, defaults to 1):
Exponential penalty to the length that will be used by default in the `generate` method of the model.
no_repeat_ngram_size (`int`, *optional*, defaults to 0) -- Value that will be used by default in the
`generate` method of the model for `no_repeat_ngram_size`. If set to int > 0, all ngrams of that size can
only occur once.
encoder_no_repeat_ngram_size (`int`, *optional*, defaults to 0) -- Value that will be used by
default in the `generate` method of the model for `encoder_no_repeat_ngram_size`. If set to int > 0, all
ngrams of that size that occur in the `encoder_input_ids` cannot occur in the `decoder_input_ids`.
bad_words_ids (`List[int]`, *optional*):
List of token ids that are not allowed to be generated that will be used by default in the `generate`
method of the model. In order to get the tokens of the words that should not appear in the generated text,
use `tokenizer.encode(bad_word, add_prefix_space=True)`.
num_return_sequences (`int`, *optional*, defaults to 1):
Number of independently computed returned sequences for each element in the batch that will be used by
default in the `generate` method of the model.
output_scores (`bool`, *optional*, defaults to `False`):
Whether the model should return the logits when used for generation.
return_dict_in_generate (`bool`, *optional*, defaults to `False`):
Whether the model should return a [`~transformers.utils.ModelOutput`] instead of a `torch.LongTensor`.
forced_bos_token_id (`int`, *optional*):
The id of the token to force as the first generated token after the `decoder_start_token_id`. Useful for
multilingual models like [mBART](../model_doc/mbart) where the first generated token needs to be the target
language token.
forced_eos_token_id (`int`, *optional*):
The id of the token to force as the last generated token when `max_length` is reached.
remove_invalid_values (`bool`, *optional*):
Whether to remove possible _nan_ and _inf_ outputs of the model to prevent the generation method to crash.
Note that using `remove_invalid_values` can slow down generation.
> Parameters for fine-tuning tasks
architectures (`List[str]`, *optional*):
Model architectures that can be used with the model pretrained weights.
finetuning_task (`str`, *optional*):
Name of the task used to fine-tune the model. This can be used when converting from an original (TensorFlow
or PyTorch) checkpoint.
id2label (`Dict[int, str]`, *optional*):
A map from index (for instance prediction index, or target index) to label.
label2id (`Dict[str, int]`, *optional*): A map from label to index for the model.
num_labels (`int`, *optional*):
Number of labels to use in the last layer added to the model, typically for a classification task.
task_specific_params (`Dict[str, Any]`, *optional*):
Additional keyword arguments to store for the current task.
problem_type (`str`, *optional*):
Problem type for `XxxForSequenceClassification` models. Can be one of `"regression"`,
`"single_label_classification"` or `"multi_label_classification"`.
> Parameters linked to the tokenizer
tokenizer_class (`str`, *optional*):
The name of the associated tokenizer class to use (if none is set, will use the tokenizer associated to the
model by default).
prefix (`str`, *optional*):
A specific prompt that should be added at the beginning of each text before calling the model.
bos_token_id (`int`, *optional*): The id of the _beginning-of-stream_ token.
pad_token_id (`int`, *optional*): The id of the _padding_ token.
eos_token_id (`int`, *optional*): The id of the _end-of-stream_ token.
decoder_start_token_id (`int`, *optional*):
If an encoder-decoder model starts decoding with a different token than _bos_, the id of that token.
sep_token_id (`int`, *optional*): The id of the _separation_ token.
> PyTorch specific parameters
torchscript (`bool`, *optional*, defaults to `False`):
Whether or not the model should be used with Torchscript.
tie_word_embeddings (`bool`, *optional*, defaults to `True`):
Whether the model's input and output word embeddings should be tied. Note that this is only relevant if the
model has a output word embedding layer.
torch_dtype (`str`, *optional*):
The `dtype` of the weights. This attribute can be used to initialize the model to a non-default `dtype`
(which is normally `float32`) and thus allow for optimal storage allocation. For example, if the saved
model is `float16`, ideally we want to load it back using the minimal amount of memory needed to load
`float16` weights. Since the config object is stored in plain text, this attribute contains just the
floating type string without the `torch.` prefix. For example, for `torch.float16` ``torch_dtype` is the
`"float16"` string.
This attribute is currently not being used during model loading time, but this may change in the future
versions. But we can already start preparing for the future by saving the dtype with save_pretrained.
> TensorFlow specific parameters
use_bfloat16 (`bool`, *optional*, defaults to `False`):
Whether or not the model should use BFloat16 scalars (only used by some TensorFlow models).
"""
model_type: str = ""
is_composition: bool = False
attribute_map: Dict[str, str] = {}
_auto_class: Optional[str] = None
def __setattr__(self, key, value):
if key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
super().__setattr__(key, value)
def __getattribute__(self, key):
if key != "attribute_map" and key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
return super().__getattribute__(key)
def __init__(self, **kwargs):
# Attributes with defaults
self.return_dict = kwargs.pop("return_dict", True)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_attentions = kwargs.pop("output_attentions", False)
self.torchscript = kwargs.pop("torchscript", False) # Only used by PyTorch models
self.torch_dtype = kwargs.pop("torch_dtype", None) # Only used by PyTorch models
self.use_bfloat16 = kwargs.pop("use_bfloat16", False)
self.pruned_heads = kwargs.pop("pruned_heads", {})
self.tie_word_embeddings = kwargs.pop(
"tie_word_embeddings", True
) # Whether input and output word embeddings should be tied for all MLM, LM and Seq2Seq models.
# Is decoder is used in encoder-decoder models to differentiate encoder from decoder
self.is_encoder_decoder = kwargs.pop("is_encoder_decoder", False)
self.is_decoder = kwargs.pop("is_decoder", False)
self.cross_attention_hidden_size = kwargs.pop("cross_attention_hidden_size", None)
self.add_cross_attention = kwargs.pop("add_cross_attention", False)
self.tie_encoder_decoder = kwargs.pop("tie_encoder_decoder", False)
# Parameters for sequence generation
self.max_length = kwargs.pop("max_length", 20)
self.min_length = kwargs.pop("min_length", 0)
self.do_sample = kwargs.pop("do_sample", False)
self.early_stopping = kwargs.pop("early_stopping", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.num_beam_groups = kwargs.pop("num_beam_groups", 1)
self.diversity_penalty = kwargs.pop("diversity_penalty", 0.0)
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.typical_p = kwargs.pop("typical_p", 1.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.encoder_no_repeat_ngram_size = kwargs.pop("encoder_no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forward", 0)
self.output_scores = kwargs.pop("output_scores", False)
self.return_dict_in_generate = kwargs.pop("return_dict_in_generate", False)
self.forced_bos_token_id = kwargs.pop("forced_bos_token_id", None)
self.forced_eos_token_id = kwargs.pop("forced_eos_token_id", None)
self.remove_invalid_values = kwargs.pop("remove_invalid_values", False)
self.exponential_decay_length_penalty = kwargs.pop("exponential_decay_length_penalty", None)
# Fine-tuning task arguments
self.architectures = kwargs.pop("architectures", None)
self.finetuning_task = kwargs.pop("finetuning_task", None)
self.id2label = kwargs.pop("id2label", None)
self.label2id = kwargs.pop("label2id", None)
if self.id2label is not None:
kwargs.pop("num_labels", None)
self.id2label = dict((int(key), value) for key, value in self.id2label.items())
# Keys are always strings in JSON so convert ids to int here.
else:
self.num_labels = kwargs.pop("num_labels", 2)
if self.torch_dtype is not None and isinstance(self.torch_dtype, str):
# we will start using self.torch_dtype in v5, but to be consistent with
# from_pretrained's torch_dtype arg convert it to an actual torch.dtype object
if is_torch_available():
import torch
self.torch_dtype = getattr(torch, self.torch_dtype)
# Tokenizer arguments TODO: eventually tokenizer and models should share the same config
self.tokenizer_class = kwargs.pop("tokenizer_class", None)
self.prefix = kwargs.pop("prefix", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.sep_token_id = kwargs.pop("sep_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
# task specific arguments
self.task_specific_params = kwargs.pop("task_specific_params", None)
# regression / multi-label classification
self.problem_type = kwargs.pop("problem_type", None)
allowed_problem_types = ("regression", "single_label_classification", "multi_label_classification")
if self.problem_type is not None and self.problem_type not in allowed_problem_types:
raise ValueError(
f"The config parameter `problem_type` was not understood: received {self.problem_type} "
"but only 'regression', 'single_label_classification' and 'multi_label_classification' are valid."
)
# TPU arguments
if kwargs.pop("xla_device", None) is not None:
logger.warning(
"The `xla_device` argument has been deprecated in v4.4.0 of Transformers. It is ignored and you can "
"safely remove it from your `config.json` file."
)
# Name or path to the pretrained checkpoint
self._name_or_path = str(kwargs.pop("name_or_path", ""))
# Drop the transformers version info
self.transformers_version = kwargs.pop("transformers_version", None)
# Deal with gradient checkpointing
if kwargs.get("gradient_checkpointing", False):
warnings.warn(
"Passing `gradient_checkpointing` to a config initialization is deprecated and will be removed in v5 "
"Transformers. Using `model.gradient_checkpointing_enable()` instead, or if you are using the "
"`Trainer` API, pass `gradient_checkpointing=True` in your `TrainingArguments`."
)
# Additional attributes without default values
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error(f"Can't set {key} with value {value} for {self}")
raise err
@property
def name_or_path(self) -> str:
return getattr(self, "_name_or_path", None)
@name_or_path.setter
def name_or_path(self, value):
self._name_or_path = str(value) # Make sure that name_or_path is a string (for JSON encoding)
@property
def use_return_dict(self) -> bool:
"""
`bool`: Whether or not return [`~utils.ModelOutput`] instead of tuples.
"""
# If torchscript is set, force `return_dict=False` to avoid jit errors
return self.return_dict and not self.torchscript
@property
def num_labels(self) -> int:
"""
`int`: The number of labels for classification models.
"""
return len(self.id2label)
@num_labels.setter
def num_labels(self, num_labels: int):
if not hasattr(self, "id2label") or self.id2label is None or len(self.id2label) != num_labels:
self.id2label = {i: f"LABEL_{i}" for i in range(num_labels)}
self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):
"""
Save a configuration object to the directory `save_directory`, so that it can be re-loaded using the
[`~PretrainedConfig.from_pretrained`] class method.
Args:
save_directory (`str` or `os.PathLike`):
Directory where the configuration JSON file will be saved (will be created if it does not exist).
push_to_hub (`bool`, *optional*, defaults to `False`):
Whether or not to push your model to the Hugging Face model hub after saving it.
<Tip warning={true}>
Using `push_to_hub=True` will synchronize the repository you are pushing to with `save_directory`,
which requires `save_directory` to be a local clone of the repo you are pushing to if it's an existing
folder. Pass along `temp_dir=True` to use a temporary directory instead.
</Tip>
kwargs:
Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method.
"""
if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
if push_to_hub:
commit_message = kwargs.pop("commit_message", None)
repo = self._create_or_get_repo(save_directory, **kwargs)
os.makedirs(save_directory, exist_ok=True)
# If we have a custom config, we copy the file defining it in the folder and set the attributes so it can be
# loaded from the Hub.
if self._auto_class is not None:
custom_object_save(self, save_directory, config=self)
# If we save using the predefined names, we can load using `from_pretrained`
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file, use_diff=True)
logger.info(f"Configuration saved in {output_config_file}")
if push_to_hub:
url = self._push_to_hub(repo, commit_message=commit_message)
logger.info(f"Configuration pushed to the hub in this commit: {url}")
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig":
r"""
Instantiate a [`PretrainedConfig`] (or a derived class) from a pretrained model configuration.
Args:
pretrained_model_name_or_path (`str` or `os.PathLike`):
This can be either:
- a string, the *model id* of a pretrained model configuration hosted inside a model repo on
huggingface.co. Valid model ids can be located at the root-level, like `bert-base-uncased`, or
namespaced under a user or organization name, like `dbmdz/bert-base-german-cased`.
- a path to a *directory* containing a configuration file saved using the
[`~PretrainedConfig.save_pretrained`] method, e.g., `./my_model_directory/`.
- a path or url to a saved configuration JSON *file*, e.g., `./my_model_directory/configuration.json`.
cache_dir (`str` or `os.PathLike`, *optional*):
Path to a directory in which a downloaded pretrained model configuration should be cached if the
standard cache should not be used.
force_download (`bool`, *optional*, defaults to `False`):
Whether or not to force to (re-)download the configuration files and override the cached versions if
they exist.
resume_download (`bool`, *optional*, defaults to `False`):
Whether or not to delete incompletely received file. Attempts to resume the download if such a file
exists.
proxies (`Dict[str, str]`, *optional*):
A dictionary of proxy servers to use by protocol or endpoint, e.g., `{'http': 'foo.bar:3128',
'http://hostname': 'foo.bar:4012'}.` The proxies are used on each request.
use_auth_token (`str` or *bool*, *optional*):
The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated
when running `transformers-cli login` (stored in `~/.huggingface`).
revision (`str`, *optional*, defaults to `"main"`):
The specific model version to use. It can be a branch name, a tag name, or a commit id, since we use a
git-based system for storing models and other artifacts on huggingface.co, so `revision` can be any
identifier allowed by git.
return_unused_kwargs (`bool`, *optional*, defaults to `False`):
If `False`, then this function returns just the final configuration object.
If `True`, then this functions returns a `Tuple(config, unused_kwargs)` where *unused_kwargs* is a
dictionary consisting of the key/value pairs whose keys are not configuration attributes: i.e., the
part of `kwargs` which has not been used to update `config` and is otherwise ignored.
kwargs (`Dict[str, Any]`, *optional*):
The values in kwargs of any keys which are configuration attributes will be used to override the loaded
values. Behavior concerning key/value pairs whose keys are *not* configuration attributes is controlled
by the `return_unused_kwargs` keyword parameter.
<Tip>
Passing `use_auth_token=True` is required when you want to use a private model.
</Tip>
Returns:
[`PretrainedConfig`]: The configuration object instantiated from this pretrained model.
Examples:
```python
# We can't instantiate directly the base class *PretrainedConfig* so let's show the examples on a
# derived class: BertConfig
config = BertConfig.from_pretrained(
"bert-base-uncased"
) # Download configuration from huggingface.co and cache.
config = BertConfig.from_pretrained(
"./test/saved_model/"
) # E.g. config (or model) was saved using *save_pretrained('./test/saved_model/')*
config = BertConfig.from_pretrained("./test/saved_model/my_configuration.json")
config = BertConfig.from_pretrained("bert-base-uncased", output_attentions=True, foo=False)
assert config.output_attentions == True
config, unused_kwargs = BertConfig.from_pretrained(
"bert-base-uncased", output_attentions=True, foo=False, return_unused_kwargs=True
)
assert config.output_attentions == True
assert unused_kwargs == {"foo": False}
```"""
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type:
logger.warning(
f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
)
return cls.from_dict(config_dict, **kwargs)
@classmethod
def get_config_dict(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""
From a `pretrained_model_name_or_path`, resolve to a dictionary of parameters, to be used for instantiating a
[`PretrainedConfig`] using `from_dict`.
Parameters:
pretrained_model_name_or_path (`str` or `os.PathLike`):
The identifier of the pre-trained checkpoint from which we want the dictionary of parameters.
Returns:
`Tuple[Dict, Dict]`: The dictionary(ies) that will be used to instantiate the configuration object.
"""
original_kwargs = copy.deepcopy(kwargs)
# Get config dict associated with the base config file
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
# That config file may point us toward another config file to use.
if "configuration_files" in config_dict:
configuration_file = get_configuration_file(config_dict["configuration_files"])
config_dict, kwargs = cls._get_config_dict(
pretrained_model_name_or_path, _configuration_file=configuration_file, **original_kwargs
)
return config_dict, kwargs
@classmethod
def _get_config_dict(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
cache_dir = kwargs.pop("cache_dir", None)
force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None)
local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None)
from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False)
user_agent = {"file_type": "config", "from_auto_class": from_auto_class}
if from_pipeline is not None:
user_agent["using_pipeline"] = from_pipeline
if is_offline_mode() and not local_files_only:
logger.info("Offline mode: forcing local_files_only=True")
local_files_only = True
pretrained_model_name_or_path = str(pretrained_model_name_or_path)
if os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
config_file = pretrained_model_name_or_path
else:
configuration_file = kwargs.pop("_configuration_file", CONFIG_NAME)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, configuration_file)
else:
config_file = hf_bucket_url(
pretrained_model_name_or_path, filename=configuration_file, revision=revision, mirror=None
)
try:
# Load from URL or cache if already cached
resolved_config_file = cached_path(
config_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
user_agent=user_agent,
)
except RepositoryNotFoundError:
raise EnvironmentError(
f"{pretrained_model_name_or_path} is not a local folder and is not a valid model identifier listed on "
"'https://huggingface.co/models'\nIf this is a private repository, make sure to pass a token having "
"permission to this repo with `use_auth_token` or log in with `huggingface-cli login` and pass "
"`use_auth_token=True`."
)
except RevisionNotFoundError:
raise EnvironmentError(
f"{revision} is not a valid git identifier (branch name, tag name or commit id) that exists for this "
f"model name. Check the model page at 'https://huggingface.co/{pretrained_model_name_or_path}' for "
"available revisions."
)
except EntryNotFoundError:
raise EnvironmentError(
f"{pretrained_model_name_or_path} does not appear to have a file named {configuration_file}."
)
except HTTPError as err:
raise EnvironmentError(
f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n{err}"
)
except ValueError:
raise EnvironmentError(
f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this model, couldn't find it in the cached "
f"files and it looks like {pretrained_model_name_or_path} is not the path to a directory containing a "
"{configuration_file} file.\nCheckout your internet connection or see how to run the library in "
"offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'."
)
except EnvironmentError:
raise EnvironmentError(
f"Can't load config for '{pretrained_model_name_or_path}'. If you were trying to load it from "
"'https://huggingface.co/models', make sure you don't have a local directory with the same name. "
f"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory "
f"containing a {configuration_file} file"
)
try:
# Load config dict
config_dict = cls._dict_from_json_file(resolved_config_file)
except (json.JSONDecodeError, UnicodeDecodeError):
raise EnvironmentError(
f"It looks like the config file at '{resolved_config_file}' is not a valid JSON file."
)
if resolved_config_file == config_file:
logger.info(f"loading configuration file {config_file}")
else:
logger.info(f"loading configuration file {config_file} from cache at {resolved_config_file}")
return config_dict, kwargs
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig":
"""
Instantiates a [`PretrainedConfig`] from a Python dictionary of parameters.
Args:
config_dict (`Dict[str, Any]`):
Dictionary that will be used to instantiate the configuration object. Such a dictionary can be
retrieved from a pretrained checkpoint by leveraging the [`~PretrainedConfig.get_config_dict`] method.
kwargs (`Dict[str, Any]`):
Additional parameters from which to initialize the configuration object.
Returns:
[`PretrainedConfig`]: The configuration object instantiated from those parameters.
"""
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
config = cls(**config_dict)
if hasattr(config, "pruned_heads"):
config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items())
# Update config with kwargs if needed
to_remove = []
for key, value in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
if key != "torch_dtype":
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info(f"Model config {config}")
if return_unused_kwargs:
return config, kwargs
else:
return config
@classmethod
def from_json_file(cls, json_file: Union[str, os.PathLike]) -> "PretrainedConfig":
"""
Instantiates a [`PretrainedConfig`] from the path to a JSON file of parameters.
Args:
json_file (`str` or `os.PathLike`):
Path to the JSON file containing the parameters.
Returns:
[`PretrainedConfig`]: The configuration object instantiated from that JSON file.
"""
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
@classmethod
def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return f"{self.__class__.__name__} {self.to_json_string()}"
def to_diff_dict(self) -> Dict[str, Any]:
"""
Removes all attributes from config which correspond to the default config attributes for better readability and
serializes to a Python dictionary.
Returns:
`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance,
"""
config_dict = self.to_dict()
# get the default config dict
default_config_dict = PretrainedConfig().to_dict()
# get class specific config dict
class_config_dict = self.__class__().to_dict() if not self.is_composition else {}
serializable_config_dict = {}
# only serialize values that differ from the default config
for key, value in config_dict.items():
if (
key not in default_config_dict
or key == "transformers_version"
or value != default_config_dict[key]
or (key in class_config_dict and value != class_config_dict[key])
):
serializable_config_dict[key] = value
self.dict_torch_dtype_to_str(serializable_config_dict)
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
"""
Serializes this instance to a Python dictionary.
Returns:
`Dict[str, Any]`: Dictionary of all the attributes that make up this configuration instance.
"""
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, "model_type"):
output["model_type"] = self.__class__.model_type
if "_auto_class" in output:
del output["_auto_class"]
# Transformers version when serializing the model
output["transformers_version"] = __version__
self.dict_torch_dtype_to_str(output)
return output
def to_json_string(self, use_diff: bool = True) -> str:
"""
Serializes this instance to a JSON string.
Args:
use_diff (`bool`, *optional*, defaults to `True`):
If set to `True`, only the difference between the config instance and the default `PretrainedConfig()`
is serialized to JSON string.
Returns:
`str`: String containing all the attributes that make up this configuration instance in JSON format.
"""
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True):
"""
Save this instance to a JSON file.
Args:
json_file_path (`str` or `os.PathLike`):
Path to the JSON file in which this configuration instance's parameters will be saved.
use_diff (`bool`, *optional*, defaults to `True`):
If set to `True`, only the difference between the config instance and the default `PretrainedConfig()`
is serialized to JSON file.
"""
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
def update(self, config_dict: Dict[str, Any]):
"""
Updates attributes of this class with attributes from `config_dict`.
Args:
config_dict (`Dict[str, Any]`): Dictionary of attributes that should be updated for this class.
"""
for key, value in config_dict.items():
setattr(self, key, value)
def update_from_string(self, update_str: str):
"""
Updates attributes of this class with attributes from `update_str`.
The expected format is ints, floats and strings as is, and for booleans use `true` or `false`. For example:
"n_embd=10,resid_pdrop=0.2,scale_attn_weights=false,summary_type=cls_index"
The keys to change have to already exist in the config object.
Args:
update_str (`str`): String with attributes that should be updated for this class.
"""
d = dict(x.split("=") for x in update_str.split(","))
for k, v in d.items():
if not hasattr(self, k):
raise ValueError(f"key {k} isn't in the original config dict")
old_v = getattr(self, k)
if isinstance(old_v, bool):
if v.lower() in ["true", "1", "y", "yes"]:
v = True
elif v.lower() in ["false", "0", "n", "no"]:
v = False
else:
raise ValueError(f"can't derive true or false from {v} (key {k})")
elif isinstance(old_v, int):
v = int(v)
elif isinstance(old_v, float):
v = float(v)
elif not isinstance(old_v, str):
raise ValueError(
f"You can only update int, float, bool or string values in the config, got {v} for key {k}"
)
setattr(self, k, v)
def dict_torch_dtype_to_str(self, d: Dict[str, Any]) -> None:
"""
Checks whether the passed dictionary and its nested dicts have a *torch_dtype* key and if it's not None,
converts torch.dtype to a string of just the type. For example, `torch.float32` get converted into *"float32"*
string, which can then be stored in the json format.
"""
if d.get("torch_dtype", None) is not None and not isinstance(d["torch_dtype"], str):
d["torch_dtype"] = str(d["torch_dtype"]).split(".")[1]
for value in d.values():
if isinstance(value, dict):
self.dict_torch_dtype_to_str(value)
@classmethod
def register_for_auto_class(cls, auto_class="AutoConfig"):
"""
Register this class with a given auto class. This should only be used for custom configurations as the ones in
the library are already mapped with `AutoConfig`.
<Tip warning={true}>
This API is experimental and may have some slight breaking changes in the next releases.
</Tip>
Args:
auto_class (`str` or `type`, *optional*, defaults to `"AutoConfig"`):
The auto class to register this new configuration with.
"""
if not isinstance(auto_class, str):
auto_class = auto_class.__name__
import transformers.models.auto as auto_module
if not hasattr(auto_module, auto_class):
raise ValueError(f"{auto_class} is not a valid auto class.")
cls._auto_class = auto_class
def get_configuration_file(configuration_files: List[str]) -> str:
"""
Get the configuration file to use for this version of transformers.
Args:
configuration_files (`List[str]`): The list of available configuration files.
Returns:
`str`: The configuration file to use.
"""
configuration_files_map = {}
for file_name in configuration_files:
search = _re_configuration_file.search(file_name)
if search is not None:
v = search.groups()[0]
configuration_files_map[v] = file_name
available_versions = sorted(configuration_files_map.keys())
# Defaults to FULL_CONFIGURATION_FILE and then try to look at some newer versions.
configuration_file = CONFIG_NAME
transformers_version = version.parse(__version__)
for v in available_versions:
if version.parse(v) <= transformers_version:
configuration_file = configuration_files_map[v]
else:
# No point going further since the versions are sorted.
break
return configuration_file
PretrainedConfig.push_to_hub = copy_func(PretrainedConfig.push_to_hub)
PretrainedConfig.push_to_hub.__doc__ = PretrainedConfig.push_to_hub.__doc__.format(
object="config", object_class="AutoConfig", object_files="configuration file"
)
| 50.284946 | 129 | 0.653459 |
import copy
import json
import os
import re
import warnings
from typing import Any, Dict, List, Optional, Tuple, Union
from packaging import version
from requests import HTTPError
from . import __version__
from .dynamic_module_utils import custom_object_save
from .utils import (
CONFIG_NAME,
HUGGINGFACE_CO_RESOLVE_ENDPOINT,
EntryNotFoundError,
PushToHubMixin,
RepositoryNotFoundError,
RevisionNotFoundError,
cached_path,
copy_func,
hf_bucket_url,
is_offline_mode,
is_remote_url,
is_torch_available,
logging,
)
logger = logging.get_logger(__name__)
_re_configuration_file = re.compile(r"config\.(.*)\.json")
class PretrainedConfig(PushToHubMixin):
model_type: str = ""
is_composition: bool = False
attribute_map: Dict[str, str] = {}
_auto_class: Optional[str] = None
def __setattr__(self, key, value):
if key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
super().__setattr__(key, value)
def __getattribute__(self, key):
if key != "attribute_map" and key in super().__getattribute__("attribute_map"):
key = super().__getattribute__("attribute_map")[key]
return super().__getattribute__(key)
def __init__(self, **kwargs):
self.return_dict = kwargs.pop("return_dict", True)
self.output_hidden_states = kwargs.pop("output_hidden_states", False)
self.output_attentions = kwargs.pop("output_attentions", False)
self.torchscript = kwargs.pop("torchscript", False)
self.torch_dtype = kwargs.pop("torch_dtype", None)
self.use_bfloat16 = kwargs.pop("use_bfloat16", False)
self.pruned_heads = kwargs.pop("pruned_heads", {})
self.tie_word_embeddings = kwargs.pop(
"tie_word_embeddings", True
)
self.is_encoder_decoder = kwargs.pop("is_encoder_decoder", False)
self.is_decoder = kwargs.pop("is_decoder", False)
self.cross_attention_hidden_size = kwargs.pop("cross_attention_hidden_size", None)
self.add_cross_attention = kwargs.pop("add_cross_attention", False)
self.tie_encoder_decoder = kwargs.pop("tie_encoder_decoder", False)
self.max_length = kwargs.pop("max_length", 20)
self.min_length = kwargs.pop("min_length", 0)
self.do_sample = kwargs.pop("do_sample", False)
self.early_stopping = kwargs.pop("early_stopping", False)
self.num_beams = kwargs.pop("num_beams", 1)
self.num_beam_groups = kwargs.pop("num_beam_groups", 1)
self.diversity_penalty = kwargs.pop("diversity_penalty", 0.0)
self.temperature = kwargs.pop("temperature", 1.0)
self.top_k = kwargs.pop("top_k", 50)
self.top_p = kwargs.pop("top_p", 1.0)
self.typical_p = kwargs.pop("typical_p", 1.0)
self.repetition_penalty = kwargs.pop("repetition_penalty", 1.0)
self.length_penalty = kwargs.pop("length_penalty", 1.0)
self.no_repeat_ngram_size = kwargs.pop("no_repeat_ngram_size", 0)
self.encoder_no_repeat_ngram_size = kwargs.pop("encoder_no_repeat_ngram_size", 0)
self.bad_words_ids = kwargs.pop("bad_words_ids", None)
self.num_return_sequences = kwargs.pop("num_return_sequences", 1)
self.chunk_size_feed_forward = kwargs.pop("chunk_size_feed_forward", 0)
self.output_scores = kwargs.pop("output_scores", False)
self.return_dict_in_generate = kwargs.pop("return_dict_in_generate", False)
self.forced_bos_token_id = kwargs.pop("forced_bos_token_id", None)
self.forced_eos_token_id = kwargs.pop("forced_eos_token_id", None)
self.remove_invalid_values = kwargs.pop("remove_invalid_values", False)
self.exponential_decay_length_penalty = kwargs.pop("exponential_decay_length_penalty", None)
self.architectures = kwargs.pop("architectures", None)
self.finetuning_task = kwargs.pop("finetuning_task", None)
self.id2label = kwargs.pop("id2label", None)
self.label2id = kwargs.pop("label2id", None)
if self.id2label is not None:
kwargs.pop("num_labels", None)
self.id2label = dict((int(key), value) for key, value in self.id2label.items())
else:
self.num_labels = kwargs.pop("num_labels", 2)
if self.torch_dtype is not None and isinstance(self.torch_dtype, str):
if is_torch_available():
import torch
self.torch_dtype = getattr(torch, self.torch_dtype)
# Tokenizer arguments TODO: eventually tokenizer and models should share the same config
self.tokenizer_class = kwargs.pop("tokenizer_class", None)
self.prefix = kwargs.pop("prefix", None)
self.bos_token_id = kwargs.pop("bos_token_id", None)
self.pad_token_id = kwargs.pop("pad_token_id", None)
self.eos_token_id = kwargs.pop("eos_token_id", None)
self.sep_token_id = kwargs.pop("sep_token_id", None)
self.decoder_start_token_id = kwargs.pop("decoder_start_token_id", None)
# task specific arguments
self.task_specific_params = kwargs.pop("task_specific_params", None)
# regression / multi-label classification
self.problem_type = kwargs.pop("problem_type", None)
allowed_problem_types = ("regression", "single_label_classification", "multi_label_classification")
if self.problem_type is not None and self.problem_type not in allowed_problem_types:
raise ValueError(
f"The config parameter `problem_type` was not understood: received {self.problem_type} "
"but only 'regression', 'single_label_classification' and 'multi_label_classification' are valid."
)
# TPU arguments
if kwargs.pop("xla_device", None) is not None:
logger.warning(
"The `xla_device` argument has been deprecated in v4.4.0 of Transformers. It is ignored and you can "
"safely remove it from your `config.json` file."
)
# Name or path to the pretrained checkpoint
self._name_or_path = str(kwargs.pop("name_or_path", ""))
# Drop the transformers version info
self.transformers_version = kwargs.pop("transformers_version", None)
# Deal with gradient checkpointing
if kwargs.get("gradient_checkpointing", False):
warnings.warn(
"Passing `gradient_checkpointing` to a config initialization is deprecated and will be removed in v5 "
"Transformers. Using `model.gradient_checkpointing_enable()` instead, or if you are using the "
"`Trainer` API, pass `gradient_checkpointing=True` in your `TrainingArguments`."
)
# Additional attributes without default values
for key, value in kwargs.items():
try:
setattr(self, key, value)
except AttributeError as err:
logger.error(f"Can't set {key} with value {value} for {self}")
raise err
@property
def name_or_path(self) -> str:
return getattr(self, "_name_or_path", None)
@name_or_path.setter
def name_or_path(self, value):
self._name_or_path = str(value)
@property
def use_return_dict(self) -> bool:
return self.return_dict and not self.torchscript
@property
def num_labels(self) -> int:
return len(self.id2label)
@num_labels.setter
def num_labels(self, num_labels: int):
if not hasattr(self, "id2label") or self.id2label is None or len(self.id2label) != num_labels:
self.id2label = {i: f"LABEL_{i}" for i in range(num_labels)}
self.label2id = dict(zip(self.id2label.values(), self.id2label.keys()))
def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: bool = False, **kwargs):
if os.path.isfile(save_directory):
raise AssertionError(f"Provided path ({save_directory}) should be a directory, not a file")
if push_to_hub:
commit_message = kwargs.pop("commit_message", None)
repo = self._create_or_get_repo(save_directory, **kwargs)
os.makedirs(save_directory, exist_ok=True)
if self._auto_class is not None:
custom_object_save(self, save_directory, config=self)
output_config_file = os.path.join(save_directory, CONFIG_NAME)
self.to_json_file(output_config_file, use_diff=True)
logger.info(f"Configuration saved in {output_config_file}")
if push_to_hub:
url = self._push_to_hub(repo, commit_message=commit_message)
logger.info(f"Configuration pushed to the hub in this commit: {url}")
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig":
config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type:
logger.warning(
f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
)
return cls.from_dict(config_dict, **kwargs)
@classmethod
def get_config_dict(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
original_kwargs = copy.deepcopy(kwargs)
config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
if "configuration_files" in config_dict:
configuration_file = get_configuration_file(config_dict["configuration_files"])
config_dict, kwargs = cls._get_config_dict(
pretrained_model_name_or_path, _configuration_file=configuration_file, **original_kwargs
)
return config_dict, kwargs
@classmethod
def _get_config_dict(
cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
cache_dir = kwargs.pop("cache_dir", None)
force_download = kwargs.pop("force_download", False)
resume_download = kwargs.pop("resume_download", False)
proxies = kwargs.pop("proxies", None)
use_auth_token = kwargs.pop("use_auth_token", None)
local_files_only = kwargs.pop("local_files_only", False)
revision = kwargs.pop("revision", None)
from_pipeline = kwargs.pop("_from_pipeline", None)
from_auto_class = kwargs.pop("_from_auto", False)
user_agent = {"file_type": "config", "from_auto_class": from_auto_class}
if from_pipeline is not None:
user_agent["using_pipeline"] = from_pipeline
if is_offline_mode() and not local_files_only:
logger.info("Offline mode: forcing local_files_only=True")
local_files_only = True
pretrained_model_name_or_path = str(pretrained_model_name_or_path)
if os.path.isfile(pretrained_model_name_or_path) or is_remote_url(pretrained_model_name_or_path):
config_file = pretrained_model_name_or_path
else:
configuration_file = kwargs.pop("_configuration_file", CONFIG_NAME)
if os.path.isdir(pretrained_model_name_or_path):
config_file = os.path.join(pretrained_model_name_or_path, configuration_file)
else:
config_file = hf_bucket_url(
pretrained_model_name_or_path, filename=configuration_file, revision=revision, mirror=None
)
try:
resolved_config_file = cached_path(
config_file,
cache_dir=cache_dir,
force_download=force_download,
proxies=proxies,
resume_download=resume_download,
local_files_only=local_files_only,
use_auth_token=use_auth_token,
user_agent=user_agent,
)
except RepositoryNotFoundError:
raise EnvironmentError(
f"{pretrained_model_name_or_path} is not a local folder and is not a valid model identifier listed on "
"'https://huggingface.co/models'\nIf this is a private repository, make sure to pass a token having "
"permission to this repo with `use_auth_token` or log in with `huggingface-cli login` and pass "
"`use_auth_token=True`."
)
except RevisionNotFoundError:
raise EnvironmentError(
f"{revision} is not a valid git identifier (branch name, tag name or commit id) that exists for this "
f"model name. Check the model page at 'https://huggingface.co/{pretrained_model_name_or_path}' for "
"available revisions."
)
except EntryNotFoundError:
raise EnvironmentError(
f"{pretrained_model_name_or_path} does not appear to have a file named {configuration_file}."
)
except HTTPError as err:
raise EnvironmentError(
f"There was a specific connection error when trying to load {pretrained_model_name_or_path}:\n{err}"
)
except ValueError:
raise EnvironmentError(
f"We couldn't connect to '{HUGGINGFACE_CO_RESOLVE_ENDPOINT}' to load this model, couldn't find it in the cached "
f"files and it looks like {pretrained_model_name_or_path} is not the path to a directory containing a "
"{configuration_file} file.\nCheckout your internet connection or see how to run the library in "
"offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'."
)
except EnvironmentError:
raise EnvironmentError(
f"Can't load config for '{pretrained_model_name_or_path}'. If you were trying to load it from "
"'https://huggingface.co/models', make sure you don't have a local directory with the same name. "
f"Otherwise, make sure '{pretrained_model_name_or_path}' is the correct path to a directory "
f"containing a {configuration_file} file"
)
try:
config_dict = cls._dict_from_json_file(resolved_config_file)
except (json.JSONDecodeError, UnicodeDecodeError):
raise EnvironmentError(
f"It looks like the config file at '{resolved_config_file}' is not a valid JSON file."
)
if resolved_config_file == config_file:
logger.info(f"loading configuration file {config_file}")
else:
logger.info(f"loading configuration file {config_file} from cache at {resolved_config_file}")
return config_dict, kwargs
@classmethod
def from_dict(cls, config_dict: Dict[str, Any], **kwargs) -> "PretrainedConfig":
return_unused_kwargs = kwargs.pop("return_unused_kwargs", False)
config = cls(**config_dict)
if hasattr(config, "pruned_heads"):
config.pruned_heads = dict((int(key), value) for key, value in config.pruned_heads.items())
to_remove = []
for key, value in kwargs.items():
if hasattr(config, key):
setattr(config, key, value)
if key != "torch_dtype":
to_remove.append(key)
for key in to_remove:
kwargs.pop(key, None)
logger.info(f"Model config {config}")
if return_unused_kwargs:
return config, kwargs
else:
return config
@classmethod
def from_json_file(cls, json_file: Union[str, os.PathLike]) -> "PretrainedConfig":
config_dict = cls._dict_from_json_file(json_file)
return cls(**config_dict)
@classmethod
def _dict_from_json_file(cls, json_file: Union[str, os.PathLike]):
with open(json_file, "r", encoding="utf-8") as reader:
text = reader.read()
return json.loads(text)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return f"{self.__class__.__name__} {self.to_json_string()}"
def to_diff_dict(self) -> Dict[str, Any]:
config_dict = self.to_dict()
default_config_dict = PretrainedConfig().to_dict()
class_config_dict = self.__class__().to_dict() if not self.is_composition else {}
serializable_config_dict = {}
for key, value in config_dict.items():
if (
key not in default_config_dict
or key == "transformers_version"
or value != default_config_dict[key]
or (key in class_config_dict and value != class_config_dict[key])
):
serializable_config_dict[key] = value
self.dict_torch_dtype_to_str(serializable_config_dict)
return serializable_config_dict
def to_dict(self) -> Dict[str, Any]:
output = copy.deepcopy(self.__dict__)
if hasattr(self.__class__, "model_type"):
output["model_type"] = self.__class__.model_type
if "_auto_class" in output:
del output["_auto_class"]
output["transformers_version"] = __version__
self.dict_torch_dtype_to_str(output)
return output
def to_json_string(self, use_diff: bool = True) -> str:
if use_diff is True:
config_dict = self.to_diff_dict()
else:
config_dict = self.to_dict()
return json.dumps(config_dict, indent=2, sort_keys=True) + "\n"
def to_json_file(self, json_file_path: Union[str, os.PathLike], use_diff: bool = True):
with open(json_file_path, "w", encoding="utf-8") as writer:
writer.write(self.to_json_string(use_diff=use_diff))
def update(self, config_dict: Dict[str, Any]):
for key, value in config_dict.items():
setattr(self, key, value)
def update_from_string(self, update_str: str):
d = dict(x.split("=") for x in update_str.split(","))
for k, v in d.items():
if not hasattr(self, k):
raise ValueError(f"key {k} isn't in the original config dict")
old_v = getattr(self, k)
if isinstance(old_v, bool):
if v.lower() in ["true", "1", "y", "yes"]:
v = True
elif v.lower() in ["false", "0", "n", "no"]:
v = False
else:
raise ValueError(f"can't derive true or false from {v} (key {k})")
elif isinstance(old_v, int):
v = int(v)
elif isinstance(old_v, float):
v = float(v)
elif not isinstance(old_v, str):
raise ValueError(
f"You can only update int, float, bool or string values in the config, got {v} for key {k}"
)
setattr(self, k, v)
def dict_torch_dtype_to_str(self, d: Dict[str, Any]) -> None:
if d.get("torch_dtype", None) is not None and not isinstance(d["torch_dtype"], str):
d["torch_dtype"] = str(d["torch_dtype"]).split(".")[1]
for value in d.values():
if isinstance(value, dict):
self.dict_torch_dtype_to_str(value)
@classmethod
def register_for_auto_class(cls, auto_class="AutoConfig"):
if not isinstance(auto_class, str):
auto_class = auto_class.__name__
import transformers.models.auto as auto_module
if not hasattr(auto_module, auto_class):
raise ValueError(f"{auto_class} is not a valid auto class.")
cls._auto_class = auto_class
def get_configuration_file(configuration_files: List[str]) -> str:
configuration_files_map = {}
for file_name in configuration_files:
search = _re_configuration_file.search(file_name)
if search is not None:
v = search.groups()[0]
configuration_files_map[v] = file_name
available_versions = sorted(configuration_files_map.keys())
configuration_file = CONFIG_NAME
transformers_version = version.parse(__version__)
for v in available_versions:
if version.parse(v) <= transformers_version:
configuration_file = configuration_files_map[v]
else:
break
return configuration_file
PretrainedConfig.push_to_hub = copy_func(PretrainedConfig.push_to_hub)
PretrainedConfig.push_to_hub.__doc__ = PretrainedConfig.push_to_hub.__doc__.format(
object="config", object_class="AutoConfig", object_files="configuration file"
)
| true | true |
f7318c2b9fdaa9f536243ae29bdfe008ac0eb2a0 | 22,340 | py | Python | addons/io_scene_gltf2/blender/exp/gltf2_blender_extract.py | inaber0420/glTF-Modo-IO | 57f99aee4e9b6177d25b465b87d731b54a625532 | [
"Apache-2.0"
] | 1,084 | 2018-07-14T07:09:50.000Z | 2022-03-30T16:34:05.000Z | addons/io_scene_gltf2/blender/exp/gltf2_blender_extract.py | inaber0420/glTF-Modo-IO | 57f99aee4e9b6177d25b465b87d731b54a625532 | [
"Apache-2.0"
] | 1,375 | 2018-07-13T22:09:24.000Z | 2022-03-31T00:36:36.000Z | addons/io_scene_gltf2/blender/exp/gltf2_blender_extract.py | inaber0420/glTF-Modo-IO | 57f99aee4e9b6177d25b465b87d731b54a625532 | [
"Apache-2.0"
] | 235 | 2018-07-13T22:04:28.000Z | 2022-03-30T09:15:53.000Z | # Copyright 2018-2021 The glTF-Blender-IO authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from mathutils import Vector
from . import gltf2_blender_export_keys
from ...io.com.gltf2_io_debug import print_console
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
def extract_primitives(glTF, blender_mesh, library, blender_object, blender_vertex_groups, modifiers, export_settings):
"""Extract primitives from a mesh."""
print_console('INFO', 'Extracting primitive: ' + blender_mesh.name)
use_normals = export_settings[gltf2_blender_export_keys.NORMALS]
if use_normals:
blender_mesh.calc_normals_split()
use_tangents = False
if use_normals and export_settings[gltf2_blender_export_keys.TANGENTS]:
if blender_mesh.uv_layers.active and len(blender_mesh.uv_layers) > 0:
try:
blender_mesh.calc_tangents()
use_tangents = True
except Exception:
print_console('WARNING', 'Could not calculate tangents. Please try to triangulate the mesh first.')
tex_coord_max = 0
if export_settings[gltf2_blender_export_keys.TEX_COORDS]:
if blender_mesh.uv_layers.active:
tex_coord_max = len(blender_mesh.uv_layers)
color_max = 0
if export_settings[gltf2_blender_export_keys.COLORS]:
color_max = len(blender_mesh.vertex_colors)
armature = None
skin = None
if blender_vertex_groups and export_settings[gltf2_blender_export_keys.SKINS]:
if modifiers is not None:
modifiers_dict = {m.type: m for m in modifiers}
if "ARMATURE" in modifiers_dict:
modifier = modifiers_dict["ARMATURE"]
armature = modifier.object
# Skin must be ignored if the object is parented to a bone of the armature
# (This creates an infinite recursive error)
# So ignoring skin in that case
is_child_of_arma = (
armature and
blender_object and
blender_object.parent_type == "BONE" and
blender_object.parent.name == armature.name
)
if is_child_of_arma:
armature = None
if armature:
skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
if not skin:
armature = None
use_morph_normals = use_normals and export_settings[gltf2_blender_export_keys.MORPH_NORMAL]
use_morph_tangents = use_morph_normals and use_tangents and export_settings[gltf2_blender_export_keys.MORPH_TANGENT]
key_blocks = []
if blender_mesh.shape_keys and export_settings[gltf2_blender_export_keys.MORPH]:
key_blocks = [
key_block
for key_block in blender_mesh.shape_keys.key_blocks
if not (key_block == key_block.relative_key or key_block.mute)
]
use_materials = export_settings[gltf2_blender_export_keys.MATERIALS]
# Fetch vert positions and bone data (joint,weights)
locs, morph_locs = __get_positions(blender_mesh, key_blocks, armature, blender_object, export_settings)
if skin:
vert_bones, num_joint_sets = __get_bone_data(blender_mesh, skin, blender_vertex_groups)
# In Blender there is both per-vert data, like position, and also per-loop
# (loop=corner-of-poly) data, like normals or UVs. glTF only has per-vert
# data, so we need to split Blender verts up into potentially-multiple glTF
# verts.
#
# First, we'll collect a "dot" for every loop: a struct that stores all the
# attributes at that loop, namely the vertex index (which determines all
# per-vert data), and all the per-loop data like UVs, etc.
#
# Each unique dot will become one unique glTF vert.
# List all fields the dot struct needs.
dot_fields = [('vertex_index', np.uint32)]
if use_normals:
dot_fields += [('nx', np.float32), ('ny', np.float32), ('nz', np.float32)]
if use_tangents:
dot_fields += [('tx', np.float32), ('ty', np.float32), ('tz', np.float32), ('tw', np.float32)]
for uv_i in range(tex_coord_max):
dot_fields += [('uv%dx' % uv_i, np.float32), ('uv%dy' % uv_i, np.float32)]
for col_i in range(color_max):
dot_fields += [
('color%dr' % col_i, np.float32),
('color%dg' % col_i, np.float32),
('color%db' % col_i, np.float32),
('color%da' % col_i, np.float32),
]
if use_morph_normals:
for morph_i, _ in enumerate(key_blocks):
dot_fields += [
('morph%dnx' % morph_i, np.float32),
('morph%dny' % morph_i, np.float32),
('morph%dnz' % morph_i, np.float32),
]
dots = np.empty(len(blender_mesh.loops), dtype=np.dtype(dot_fields))
vidxs = np.empty(len(blender_mesh.loops))
blender_mesh.loops.foreach_get('vertex_index', vidxs)
dots['vertex_index'] = vidxs
del vidxs
if use_normals:
kbs = key_blocks if use_morph_normals else []
normals, morph_normals = __get_normals(
blender_mesh, kbs, armature, blender_object, export_settings
)
dots['nx'] = normals[:, 0]
dots['ny'] = normals[:, 1]
dots['nz'] = normals[:, 2]
del normals
for morph_i, ns in enumerate(morph_normals):
dots['morph%dnx' % morph_i] = ns[:, 0]
dots['morph%dny' % morph_i] = ns[:, 1]
dots['morph%dnz' % morph_i] = ns[:, 2]
del morph_normals
if use_tangents:
tangents = __get_tangents(blender_mesh, armature, blender_object, export_settings)
dots['tx'] = tangents[:, 0]
dots['ty'] = tangents[:, 1]
dots['tz'] = tangents[:, 2]
del tangents
signs = __get_bitangent_signs(blender_mesh, armature, blender_object, export_settings)
dots['tw'] = signs
del signs
for uv_i in range(tex_coord_max):
uvs = __get_uvs(blender_mesh, uv_i)
dots['uv%dx' % uv_i] = uvs[:, 0]
dots['uv%dy' % uv_i] = uvs[:, 1]
del uvs
for col_i in range(color_max):
colors = __get_colors(blender_mesh, col_i)
dots['color%dr' % col_i] = colors[:, 0]
dots['color%dg' % col_i] = colors[:, 1]
dots['color%db' % col_i] = colors[:, 2]
dots['color%da' % col_i] = colors[:, 3]
del colors
# Calculate triangles and sort them into primitives.
blender_mesh.calc_loop_triangles()
loop_indices = np.empty(len(blender_mesh.loop_triangles) * 3, dtype=np.uint32)
blender_mesh.loop_triangles.foreach_get('loops', loop_indices)
prim_indices = {} # maps material index to TRIANGLES-style indices into dots
if use_materials == "NONE": # Only for None. For placeholder and export, keep primitives
# Put all vertices into one primitive
prim_indices[-1] = loop_indices
else:
# Bucket by material index.
tri_material_idxs = np.empty(len(blender_mesh.loop_triangles), dtype=np.uint32)
blender_mesh.loop_triangles.foreach_get('material_index', tri_material_idxs)
loop_material_idxs = np.repeat(tri_material_idxs, 3) # material index for every loop
unique_material_idxs = np.unique(tri_material_idxs)
del tri_material_idxs
for material_idx in unique_material_idxs:
prim_indices[material_idx] = loop_indices[loop_material_idxs == material_idx]
# Create all the primitives.
primitives = []
for material_idx, dot_indices in prim_indices.items():
# Extract just dots used by this primitive, deduplicate them, and
# calculate indices into this deduplicated list.
prim_dots = dots[dot_indices]
prim_dots, indices = np.unique(prim_dots, return_inverse=True)
if len(prim_dots) == 0:
continue
# Now just move all the data for prim_dots into attribute arrays
attributes = {}
blender_idxs = prim_dots['vertex_index']
attributes['POSITION'] = locs[blender_idxs]
for morph_i, vs in enumerate(morph_locs):
attributes['MORPH_POSITION_%d' % morph_i] = vs[blender_idxs]
if use_normals:
normals = np.empty((len(prim_dots), 3), dtype=np.float32)
normals[:, 0] = prim_dots['nx']
normals[:, 1] = prim_dots['ny']
normals[:, 2] = prim_dots['nz']
attributes['NORMAL'] = normals
if use_tangents:
tangents = np.empty((len(prim_dots), 4), dtype=np.float32)
tangents[:, 0] = prim_dots['tx']
tangents[:, 1] = prim_dots['ty']
tangents[:, 2] = prim_dots['tz']
tangents[:, 3] = prim_dots['tw']
attributes['TANGENT'] = tangents
if use_morph_normals:
for morph_i, _ in enumerate(key_blocks):
ns = np.empty((len(prim_dots), 3), dtype=np.float32)
ns[:, 0] = prim_dots['morph%dnx' % morph_i]
ns[:, 1] = prim_dots['morph%dny' % morph_i]
ns[:, 2] = prim_dots['morph%dnz' % morph_i]
attributes['MORPH_NORMAL_%d' % morph_i] = ns
if use_morph_tangents:
attributes['MORPH_TANGENT_%d' % morph_i] = __calc_morph_tangents(normals, ns, tangents)
for tex_coord_i in range(tex_coord_max):
uvs = np.empty((len(prim_dots), 2), dtype=np.float32)
uvs[:, 0] = prim_dots['uv%dx' % tex_coord_i]
uvs[:, 1] = prim_dots['uv%dy' % tex_coord_i]
attributes['TEXCOORD_%d' % tex_coord_i] = uvs
for color_i in range(color_max):
colors = np.empty((len(prim_dots), 4), dtype=np.float32)
colors[:, 0] = prim_dots['color%dr' % color_i]
colors[:, 1] = prim_dots['color%dg' % color_i]
colors[:, 2] = prim_dots['color%db' % color_i]
colors[:, 3] = prim_dots['color%da' % color_i]
attributes['COLOR_%d' % color_i] = colors
if skin:
joints = [[] for _ in range(num_joint_sets)]
weights = [[] for _ in range(num_joint_sets)]
for vi in blender_idxs:
bones = vert_bones[vi]
for j in range(0, 4 * num_joint_sets):
if j < len(bones):
joint, weight = bones[j]
else:
joint, weight = 0, 0.0
joints[j//4].append(joint)
weights[j//4].append(weight)
for i, (js, ws) in enumerate(zip(joints, weights)):
attributes['JOINTS_%d' % i] = js
attributes['WEIGHTS_%d' % i] = ws
primitives.append({
'attributes': attributes,
'indices': indices,
'material': material_idx,
})
if export_settings['gltf_loose_edges']:
# Find loose edges
loose_edges = [e for e in blender_mesh.edges if e.is_loose]
blender_idxs = [vi for e in loose_edges for vi in e.vertices]
if blender_idxs:
# Export one glTF vert per unique Blender vert in a loose edge
blender_idxs = np.array(blender_idxs, dtype=np.uint32)
blender_idxs, indices = np.unique(blender_idxs, return_inverse=True)
attributes = {}
attributes['POSITION'] = locs[blender_idxs]
for morph_i, vs in enumerate(morph_locs):
attributes['MORPH_POSITION_%d' % morph_i] = vs[blender_idxs]
if skin:
joints = [[] for _ in range(num_joint_sets)]
weights = [[] for _ in range(num_joint_sets)]
for vi in blender_idxs:
bones = vert_bones[vi]
for j in range(0, 4 * num_joint_sets):
if j < len(bones):
joint, weight = bones[j]
else:
joint, weight = 0, 0.0
joints[j//4].append(joint)
weights[j//4].append(weight)
for i, (js, ws) in enumerate(zip(joints, weights)):
attributes['JOINTS_%d' % i] = js
attributes['WEIGHTS_%d' % i] = ws
primitives.append({
'attributes': attributes,
'indices': indices,
'mode': 1, # LINES
'material': 0,
})
if export_settings['gltf_loose_points']:
# Find loose points
verts_in_edge = set(vi for e in blender_mesh.edges for vi in e.vertices)
blender_idxs = [
vi for vi, _ in enumerate(blender_mesh.vertices)
if vi not in verts_in_edge
]
if blender_idxs:
blender_idxs = np.array(blender_idxs, dtype=np.uint32)
attributes = {}
attributes['POSITION'] = locs[blender_idxs]
for morph_i, vs in enumerate(morph_locs):
attributes['MORPH_POSITION_%d' % morph_i] = vs[blender_idxs]
if skin:
joints = [[] for _ in range(num_joint_sets)]
weights = [[] for _ in range(num_joint_sets)]
for vi in blender_idxs:
bones = vert_bones[vi]
for j in range(0, 4 * num_joint_sets):
if j < len(bones):
joint, weight = bones[j]
else:
joint, weight = 0, 0.0
joints[j//4].append(joint)
weights[j//4].append(weight)
for i, (js, ws) in enumerate(zip(joints, weights)):
attributes['JOINTS_%d' % i] = js
attributes['WEIGHTS_%d' % i] = ws
primitives.append({
'attributes': attributes,
'mode': 0, # POINTS
'material': 0,
})
print_console('INFO', 'Primitives created: %d' % len(primitives))
return primitives
def __get_positions(blender_mesh, key_blocks, armature, blender_object, export_settings):
locs = np.empty(len(blender_mesh.vertices) * 3, dtype=np.float32)
source = key_blocks[0].relative_key.data if key_blocks else blender_mesh.vertices
source.foreach_get('co', locs)
locs = locs.reshape(len(blender_mesh.vertices), 3)
morph_locs = []
for key_block in key_blocks:
vs = np.empty(len(blender_mesh.vertices) * 3, dtype=np.float32)
key_block.data.foreach_get('co', vs)
vs = vs.reshape(len(blender_mesh.vertices), 3)
morph_locs.append(vs)
# Transform for skinning
if armature and blender_object:
apply_matrix = armature.matrix_world.inverted_safe() @ blender_object.matrix_world
loc_transform = armature.matrix_world @ apply_matrix
loc_transform = blender_object.matrix_world
locs[:] = __apply_mat_to_all(loc_transform, locs)
for vs in morph_locs:
vs[:] = __apply_mat_to_all(loc_transform, vs)
# glTF stores deltas in morph targets
for vs in morph_locs:
vs -= locs
if export_settings[gltf2_blender_export_keys.YUP]:
__zup2yup(locs)
for vs in morph_locs:
__zup2yup(vs)
return locs, morph_locs
def __get_normals(blender_mesh, key_blocks, armature, blender_object, export_settings):
"""Get normal for each loop."""
if key_blocks:
normals = key_blocks[0].relative_key.normals_split_get()
normals = np.array(normals, dtype=np.float32)
else:
normals = np.empty(len(blender_mesh.loops) * 3, dtype=np.float32)
blender_mesh.calc_normals_split()
blender_mesh.loops.foreach_get('normal', normals)
normals = normals.reshape(len(blender_mesh.loops), 3)
morph_normals = []
for key_block in key_blocks:
ns = np.array(key_block.normals_split_get(), dtype=np.float32)
ns = ns.reshape(len(blender_mesh.loops), 3)
morph_normals.append(ns)
# Transform for skinning
if armature and blender_object:
apply_matrix = (armature.matrix_world.inverted_safe() @ blender_object.matrix_world)
apply_matrix = apply_matrix.to_3x3().inverted_safe().transposed()
normal_transform = armature.matrix_world.to_3x3() @ apply_matrix
normals[:] = __apply_mat_to_all(normal_transform, normals)
__normalize_vecs(normals)
for ns in morph_normals:
ns[:] = __apply_mat_to_all(normal_transform, ns)
__normalize_vecs(ns)
for ns in [normals, *morph_normals]:
# Replace zero normals with the unit UP vector.
# Seems to happen sometimes with degenerate tris?
is_zero = ~ns.any(axis=1)
ns[is_zero, 2] = 1
# glTF stores deltas in morph targets
for ns in morph_normals:
ns -= normals
if export_settings[gltf2_blender_export_keys.YUP]:
__zup2yup(normals)
for ns in morph_normals:
__zup2yup(ns)
return normals, morph_normals
def __get_tangents(blender_mesh, armature, blender_object, export_settings):
"""Get an array of the tangent for each loop."""
tangents = np.empty(len(blender_mesh.loops) * 3, dtype=np.float32)
blender_mesh.loops.foreach_get('tangent', tangents)
tangents = tangents.reshape(len(blender_mesh.loops), 3)
# Transform for skinning
if armature and blender_object:
apply_matrix = armature.matrix_world.inverted_safe() @ blender_object.matrix_world
tangent_transform = apply_matrix.to_quaternion().to_matrix()
tangents = __apply_mat_to_all(tangent_transform, tangents)
__normalize_vecs(tangents)
if export_settings[gltf2_blender_export_keys.YUP]:
__zup2yup(tangents)
return tangents
def __get_bitangent_signs(blender_mesh, armature, blender_object, export_settings):
signs = np.empty(len(blender_mesh.loops), dtype=np.float32)
blender_mesh.loops.foreach_get('bitangent_sign', signs)
# Transform for skinning
if armature and blender_object:
# Bitangent signs should flip when handedness changes
# TODO: confirm
apply_matrix = armature.matrix_world.inverted_safe() @ blender_object.matrix_world
tangent_transform = apply_matrix.to_quaternion().to_matrix()
flipped = tangent_transform.determinant() < 0
if flipped:
signs *= -1
# No change for Zup -> Yup
return signs
def __calc_morph_tangents(normals, morph_normal_deltas, tangents):
# TODO: check if this works
morph_tangent_deltas = np.empty((len(normals), 3), dtype=np.float32)
for i in range(len(normals)):
n = Vector(normals[i])
morph_n = n + Vector(morph_normal_deltas[i]) # convert back to non-delta
t = Vector(tangents[i, :3])
rotation = morph_n.rotation_difference(n)
t_morph = Vector(t)
t_morph.rotate(rotation)
morph_tangent_deltas[i] = t_morph - t # back to delta
return morph_tangent_deltas
def __get_uvs(blender_mesh, uv_i):
layer = blender_mesh.uv_layers[uv_i]
uvs = np.empty(len(blender_mesh.loops) * 2, dtype=np.float32)
layer.data.foreach_get('uv', uvs)
uvs = uvs.reshape(len(blender_mesh.loops), 2)
# Blender UV space -> glTF UV space
# u,v -> u,1-v
uvs[:, 1] *= -1
uvs[:, 1] += 1
return uvs
def __get_colors(blender_mesh, color_i):
layer = blender_mesh.vertex_colors[color_i]
colors = np.empty(len(blender_mesh.loops) * 4, dtype=np.float32)
layer.data.foreach_get('color', colors)
colors = colors.reshape(len(blender_mesh.loops), 4)
# sRGB -> Linear
rgb = colors[:, :-1]
not_small = rgb >= 0.04045
small_result = np.where(rgb < 0.0, 0.0, rgb * (1.0 / 12.92))
large_result = np.power((rgb + 0.055) * (1.0 / 1.055), 2.4, where=not_small)
rgb[:] = np.where(not_small, large_result, small_result)
return colors
def __get_bone_data(blender_mesh, skin, blender_vertex_groups):
joint_name_to_index = {joint.name: index for index, joint in enumerate(skin.joints)}
group_to_joint = [joint_name_to_index.get(g.name) for g in blender_vertex_groups]
# List of (joint, weight) pairs for each vert
vert_bones = []
max_num_influences = 0
for vertex in blender_mesh.vertices:
bones = []
if vertex.groups:
for group_element in vertex.groups:
weight = group_element.weight
if weight <= 0.0:
continue
try:
joint = group_to_joint[group_element.group]
except Exception:
continue
if joint is None:
continue
bones.append((joint, weight))
bones.sort(key=lambda x: x[1], reverse=True)
if not bones: bones = ((0, 1.0),) # HACK for verts with zero weight (#308)
vert_bones.append(bones)
if len(bones) > max_num_influences:
max_num_influences = len(bones)
# How many joint sets do we need? 1 set = 4 influences
num_joint_sets = (max_num_influences + 3) // 4
return vert_bones, num_joint_sets
def __zup2yup(array):
# x,y,z -> x,z,-y
array[:, [1,2]] = array[:, [2,1]] # x,z,y
array[:, 2] *= -1 # x,z,-y
def __apply_mat_to_all(matrix, vectors):
"""Given matrix m and vectors [v1,v2,...], computes [m@v1,m@v2,...]"""
# Linear part
m = matrix.to_3x3() if len(matrix) == 4 else matrix
res = np.matmul(vectors, np.array(m.transposed()))
# Translation part
if len(matrix) == 4:
res += np.array(matrix.translation)
return res
def __normalize_vecs(vectors):
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
np.divide(vectors, norms, out=vectors, where=norms != 0)
| 37.35786 | 120 | 0.616025 |
import numpy as np
from mathutils import Vector
from . import gltf2_blender_export_keys
from ...io.com.gltf2_io_debug import print_console
from io_scene_gltf2.blender.exp import gltf2_blender_gather_skins
def extract_primitives(glTF, blender_mesh, library, blender_object, blender_vertex_groups, modifiers, export_settings):
print_console('INFO', 'Extracting primitive: ' + blender_mesh.name)
use_normals = export_settings[gltf2_blender_export_keys.NORMALS]
if use_normals:
blender_mesh.calc_normals_split()
use_tangents = False
if use_normals and export_settings[gltf2_blender_export_keys.TANGENTS]:
if blender_mesh.uv_layers.active and len(blender_mesh.uv_layers) > 0:
try:
blender_mesh.calc_tangents()
use_tangents = True
except Exception:
print_console('WARNING', 'Could not calculate tangents. Please try to triangulate the mesh first.')
tex_coord_max = 0
if export_settings[gltf2_blender_export_keys.TEX_COORDS]:
if blender_mesh.uv_layers.active:
tex_coord_max = len(blender_mesh.uv_layers)
color_max = 0
if export_settings[gltf2_blender_export_keys.COLORS]:
color_max = len(blender_mesh.vertex_colors)
armature = None
skin = None
if blender_vertex_groups and export_settings[gltf2_blender_export_keys.SKINS]:
if modifiers is not None:
modifiers_dict = {m.type: m for m in modifiers}
if "ARMATURE" in modifiers_dict:
modifier = modifiers_dict["ARMATURE"]
armature = modifier.object
is_child_of_arma = (
armature and
blender_object and
blender_object.parent_type == "BONE" and
blender_object.parent.name == armature.name
)
if is_child_of_arma:
armature = None
if armature:
skin = gltf2_blender_gather_skins.gather_skin(armature, export_settings)
if not skin:
armature = None
use_morph_normals = use_normals and export_settings[gltf2_blender_export_keys.MORPH_NORMAL]
use_morph_tangents = use_morph_normals and use_tangents and export_settings[gltf2_blender_export_keys.MORPH_TANGENT]
key_blocks = []
if blender_mesh.shape_keys and export_settings[gltf2_blender_export_keys.MORPH]:
key_blocks = [
key_block
for key_block in blender_mesh.shape_keys.key_blocks
if not (key_block == key_block.relative_key or key_block.mute)
]
use_materials = export_settings[gltf2_blender_export_keys.MATERIALS]
locs, morph_locs = __get_positions(blender_mesh, key_blocks, armature, blender_object, export_settings)
if skin:
vert_bones, num_joint_sets = __get_bone_data(blender_mesh, skin, blender_vertex_groups)
# attributes at that loop, namely the vertex index (which determines all
# per-vert data), and all the per-loop data like UVs, etc.
#
# Each unique dot will become one unique glTF vert.
# List all fields the dot struct needs.
dot_fields = [('vertex_index', np.uint32)]
if use_normals:
dot_fields += [('nx', np.float32), ('ny', np.float32), ('nz', np.float32)]
if use_tangents:
dot_fields += [('tx', np.float32), ('ty', np.float32), ('tz', np.float32), ('tw', np.float32)]
for uv_i in range(tex_coord_max):
dot_fields += [('uv%dx' % uv_i, np.float32), ('uv%dy' % uv_i, np.float32)]
for col_i in range(color_max):
dot_fields += [
('color%dr' % col_i, np.float32),
('color%dg' % col_i, np.float32),
('color%db' % col_i, np.float32),
('color%da' % col_i, np.float32),
]
if use_morph_normals:
for morph_i, _ in enumerate(key_blocks):
dot_fields += [
('morph%dnx' % morph_i, np.float32),
('morph%dny' % morph_i, np.float32),
('morph%dnz' % morph_i, np.float32),
]
dots = np.empty(len(blender_mesh.loops), dtype=np.dtype(dot_fields))
vidxs = np.empty(len(blender_mesh.loops))
blender_mesh.loops.foreach_get('vertex_index', vidxs)
dots['vertex_index'] = vidxs
del vidxs
if use_normals:
kbs = key_blocks if use_morph_normals else []
normals, morph_normals = __get_normals(
blender_mesh, kbs, armature, blender_object, export_settings
)
dots['nx'] = normals[:, 0]
dots['ny'] = normals[:, 1]
dots['nz'] = normals[:, 2]
del normals
for morph_i, ns in enumerate(morph_normals):
dots['morph%dnx' % morph_i] = ns[:, 0]
dots['morph%dny' % morph_i] = ns[:, 1]
dots['morph%dnz' % morph_i] = ns[:, 2]
del morph_normals
if use_tangents:
tangents = __get_tangents(blender_mesh, armature, blender_object, export_settings)
dots['tx'] = tangents[:, 0]
dots['ty'] = tangents[:, 1]
dots['tz'] = tangents[:, 2]
del tangents
signs = __get_bitangent_signs(blender_mesh, armature, blender_object, export_settings)
dots['tw'] = signs
del signs
for uv_i in range(tex_coord_max):
uvs = __get_uvs(blender_mesh, uv_i)
dots['uv%dx' % uv_i] = uvs[:, 0]
dots['uv%dy' % uv_i] = uvs[:, 1]
del uvs
for col_i in range(color_max):
colors = __get_colors(blender_mesh, col_i)
dots['color%dr' % col_i] = colors[:, 0]
dots['color%dg' % col_i] = colors[:, 1]
dots['color%db' % col_i] = colors[:, 2]
dots['color%da' % col_i] = colors[:, 3]
del colors
# Calculate triangles and sort them into primitives.
blender_mesh.calc_loop_triangles()
loop_indices = np.empty(len(blender_mesh.loop_triangles) * 3, dtype=np.uint32)
blender_mesh.loop_triangles.foreach_get('loops', loop_indices)
prim_indices = {} # maps material index to TRIANGLES-style indices into dots
if use_materials == "NONE": # Only for None. For placeholder and export, keep primitives
# Put all vertices into one primitive
prim_indices[-1] = loop_indices
else:
# Bucket by material index.
tri_material_idxs = np.empty(len(blender_mesh.loop_triangles), dtype=np.uint32)
blender_mesh.loop_triangles.foreach_get('material_index', tri_material_idxs)
loop_material_idxs = np.repeat(tri_material_idxs, 3) # material index for every loop
unique_material_idxs = np.unique(tri_material_idxs)
del tri_material_idxs
for material_idx in unique_material_idxs:
prim_indices[material_idx] = loop_indices[loop_material_idxs == material_idx]
# Create all the primitives.
primitives = []
for material_idx, dot_indices in prim_indices.items():
# Extract just dots used by this primitive, deduplicate them, and
# calculate indices into this deduplicated list.
prim_dots = dots[dot_indices]
prim_dots, indices = np.unique(prim_dots, return_inverse=True)
if len(prim_dots) == 0:
continue
# Now just move all the data for prim_dots into attribute arrays
attributes = {}
blender_idxs = prim_dots['vertex_index']
attributes['POSITION'] = locs[blender_idxs]
for morph_i, vs in enumerate(morph_locs):
attributes['MORPH_POSITION_%d' % morph_i] = vs[blender_idxs]
if use_normals:
normals = np.empty((len(prim_dots), 3), dtype=np.float32)
normals[:, 0] = prim_dots['nx']
normals[:, 1] = prim_dots['ny']
normals[:, 2] = prim_dots['nz']
attributes['NORMAL'] = normals
if use_tangents:
tangents = np.empty((len(prim_dots), 4), dtype=np.float32)
tangents[:, 0] = prim_dots['tx']
tangents[:, 1] = prim_dots['ty']
tangents[:, 2] = prim_dots['tz']
tangents[:, 3] = prim_dots['tw']
attributes['TANGENT'] = tangents
if use_morph_normals:
for morph_i, _ in enumerate(key_blocks):
ns = np.empty((len(prim_dots), 3), dtype=np.float32)
ns[:, 0] = prim_dots['morph%dnx' % morph_i]
ns[:, 1] = prim_dots['morph%dny' % morph_i]
ns[:, 2] = prim_dots['morph%dnz' % morph_i]
attributes['MORPH_NORMAL_%d' % morph_i] = ns
if use_morph_tangents:
attributes['MORPH_TANGENT_%d' % morph_i] = __calc_morph_tangents(normals, ns, tangents)
for tex_coord_i in range(tex_coord_max):
uvs = np.empty((len(prim_dots), 2), dtype=np.float32)
uvs[:, 0] = prim_dots['uv%dx' % tex_coord_i]
uvs[:, 1] = prim_dots['uv%dy' % tex_coord_i]
attributes['TEXCOORD_%d' % tex_coord_i] = uvs
for color_i in range(color_max):
colors = np.empty((len(prim_dots), 4), dtype=np.float32)
colors[:, 0] = prim_dots['color%dr' % color_i]
colors[:, 1] = prim_dots['color%dg' % color_i]
colors[:, 2] = prim_dots['color%db' % color_i]
colors[:, 3] = prim_dots['color%da' % color_i]
attributes['COLOR_%d' % color_i] = colors
if skin:
joints = [[] for _ in range(num_joint_sets)]
weights = [[] for _ in range(num_joint_sets)]
for vi in blender_idxs:
bones = vert_bones[vi]
for j in range(0, 4 * num_joint_sets):
if j < len(bones):
joint, weight = bones[j]
else:
joint, weight = 0, 0.0
joints[j//4].append(joint)
weights[j//4].append(weight)
for i, (js, ws) in enumerate(zip(joints, weights)):
attributes['JOINTS_%d' % i] = js
attributes['WEIGHTS_%d' % i] = ws
primitives.append({
'attributes': attributes,
'indices': indices,
'material': material_idx,
})
if export_settings['gltf_loose_edges']:
# Find loose edges
loose_edges = [e for e in blender_mesh.edges if e.is_loose]
blender_idxs = [vi for e in loose_edges for vi in e.vertices]
if blender_idxs:
# Export one glTF vert per unique Blender vert in a loose edge
blender_idxs = np.array(blender_idxs, dtype=np.uint32)
blender_idxs, indices = np.unique(blender_idxs, return_inverse=True)
attributes = {}
attributes['POSITION'] = locs[blender_idxs]
for morph_i, vs in enumerate(morph_locs):
attributes['MORPH_POSITION_%d' % morph_i] = vs[blender_idxs]
if skin:
joints = [[] for _ in range(num_joint_sets)]
weights = [[] for _ in range(num_joint_sets)]
for vi in blender_idxs:
bones = vert_bones[vi]
for j in range(0, 4 * num_joint_sets):
if j < len(bones):
joint, weight = bones[j]
else:
joint, weight = 0, 0.0
joints[j//4].append(joint)
weights[j//4].append(weight)
for i, (js, ws) in enumerate(zip(joints, weights)):
attributes['JOINTS_%d' % i] = js
attributes['WEIGHTS_%d' % i] = ws
primitives.append({
'attributes': attributes,
'indices': indices,
'mode': 1, # LINES
'material': 0,
})
if export_settings['gltf_loose_points']:
# Find loose points
verts_in_edge = set(vi for e in blender_mesh.edges for vi in e.vertices)
blender_idxs = [
vi for vi, _ in enumerate(blender_mesh.vertices)
if vi not in verts_in_edge
]
if blender_idxs:
blender_idxs = np.array(blender_idxs, dtype=np.uint32)
attributes = {}
attributes['POSITION'] = locs[blender_idxs]
for morph_i, vs in enumerate(morph_locs):
attributes['MORPH_POSITION_%d' % morph_i] = vs[blender_idxs]
if skin:
joints = [[] for _ in range(num_joint_sets)]
weights = [[] for _ in range(num_joint_sets)]
for vi in blender_idxs:
bones = vert_bones[vi]
for j in range(0, 4 * num_joint_sets):
if j < len(bones):
joint, weight = bones[j]
else:
joint, weight = 0, 0.0
joints[j//4].append(joint)
weights[j//4].append(weight)
for i, (js, ws) in enumerate(zip(joints, weights)):
attributes['JOINTS_%d' % i] = js
attributes['WEIGHTS_%d' % i] = ws
primitives.append({
'attributes': attributes,
'mode': 0, # POINTS
'material': 0,
})
print_console('INFO', 'Primitives created: %d' % len(primitives))
return primitives
def __get_positions(blender_mesh, key_blocks, armature, blender_object, export_settings):
locs = np.empty(len(blender_mesh.vertices) * 3, dtype=np.float32)
source = key_blocks[0].relative_key.data if key_blocks else blender_mesh.vertices
source.foreach_get('co', locs)
locs = locs.reshape(len(blender_mesh.vertices), 3)
morph_locs = []
for key_block in key_blocks:
vs = np.empty(len(blender_mesh.vertices) * 3, dtype=np.float32)
key_block.data.foreach_get('co', vs)
vs = vs.reshape(len(blender_mesh.vertices), 3)
morph_locs.append(vs)
# Transform for skinning
if armature and blender_object:
apply_matrix = armature.matrix_world.inverted_safe() @ blender_object.matrix_world
loc_transform = armature.matrix_world @ apply_matrix
loc_transform = blender_object.matrix_world
locs[:] = __apply_mat_to_all(loc_transform, locs)
for vs in morph_locs:
vs[:] = __apply_mat_to_all(loc_transform, vs)
# glTF stores deltas in morph targets
for vs in morph_locs:
vs -= locs
if export_settings[gltf2_blender_export_keys.YUP]:
__zup2yup(locs)
for vs in morph_locs:
__zup2yup(vs)
return locs, morph_locs
def __get_normals(blender_mesh, key_blocks, armature, blender_object, export_settings):
if key_blocks:
normals = key_blocks[0].relative_key.normals_split_get()
normals = np.array(normals, dtype=np.float32)
else:
normals = np.empty(len(blender_mesh.loops) * 3, dtype=np.float32)
blender_mesh.calc_normals_split()
blender_mesh.loops.foreach_get('normal', normals)
normals = normals.reshape(len(blender_mesh.loops), 3)
morph_normals = []
for key_block in key_blocks:
ns = np.array(key_block.normals_split_get(), dtype=np.float32)
ns = ns.reshape(len(blender_mesh.loops), 3)
morph_normals.append(ns)
# Transform for skinning
if armature and blender_object:
apply_matrix = (armature.matrix_world.inverted_safe() @ blender_object.matrix_world)
apply_matrix = apply_matrix.to_3x3().inverted_safe().transposed()
normal_transform = armature.matrix_world.to_3x3() @ apply_matrix
normals[:] = __apply_mat_to_all(normal_transform, normals)
__normalize_vecs(normals)
for ns in morph_normals:
ns[:] = __apply_mat_to_all(normal_transform, ns)
__normalize_vecs(ns)
for ns in [normals, *morph_normals]:
# Replace zero normals with the unit UP vector.
# Seems to happen sometimes with degenerate tris?
is_zero = ~ns.any(axis=1)
ns[is_zero, 2] = 1
# glTF stores deltas in morph targets
for ns in morph_normals:
ns -= normals
if export_settings[gltf2_blender_export_keys.YUP]:
__zup2yup(normals)
for ns in morph_normals:
__zup2yup(ns)
return normals, morph_normals
def __get_tangents(blender_mesh, armature, blender_object, export_settings):
tangents = np.empty(len(blender_mesh.loops) * 3, dtype=np.float32)
blender_mesh.loops.foreach_get('tangent', tangents)
tangents = tangents.reshape(len(blender_mesh.loops), 3)
# Transform for skinning
if armature and blender_object:
apply_matrix = armature.matrix_world.inverted_safe() @ blender_object.matrix_world
tangent_transform = apply_matrix.to_quaternion().to_matrix()
tangents = __apply_mat_to_all(tangent_transform, tangents)
__normalize_vecs(tangents)
if export_settings[gltf2_blender_export_keys.YUP]:
__zup2yup(tangents)
return tangents
def __get_bitangent_signs(blender_mesh, armature, blender_object, export_settings):
signs = np.empty(len(blender_mesh.loops), dtype=np.float32)
blender_mesh.loops.foreach_get('bitangent_sign', signs)
# Transform for skinning
if armature and blender_object:
# Bitangent signs should flip when handedness changes
# TODO: confirm
apply_matrix = armature.matrix_world.inverted_safe() @ blender_object.matrix_world
tangent_transform = apply_matrix.to_quaternion().to_matrix()
flipped = tangent_transform.determinant() < 0
if flipped:
signs *= -1
# No change for Zup -> Yup
return signs
def __calc_morph_tangents(normals, morph_normal_deltas, tangents):
# TODO: check if this works
morph_tangent_deltas = np.empty((len(normals), 3), dtype=np.float32)
for i in range(len(normals)):
n = Vector(normals[i])
morph_n = n + Vector(morph_normal_deltas[i]) # convert back to non-delta
t = Vector(tangents[i, :3])
rotation = morph_n.rotation_difference(n)
t_morph = Vector(t)
t_morph.rotate(rotation)
morph_tangent_deltas[i] = t_morph - t # back to delta
return morph_tangent_deltas
def __get_uvs(blender_mesh, uv_i):
layer = blender_mesh.uv_layers[uv_i]
uvs = np.empty(len(blender_mesh.loops) * 2, dtype=np.float32)
layer.data.foreach_get('uv', uvs)
uvs = uvs.reshape(len(blender_mesh.loops), 2)
# Blender UV space -> glTF UV space
# u,v -> u,1-v
uvs[:, 1] *= -1
uvs[:, 1] += 1
return uvs
def __get_colors(blender_mesh, color_i):
layer = blender_mesh.vertex_colors[color_i]
colors = np.empty(len(blender_mesh.loops) * 4, dtype=np.float32)
layer.data.foreach_get('color', colors)
colors = colors.reshape(len(blender_mesh.loops), 4)
# sRGB -> Linear
rgb = colors[:, :-1]
not_small = rgb >= 0.04045
small_result = np.where(rgb < 0.0, 0.0, rgb * (1.0 / 12.92))
large_result = np.power((rgb + 0.055) * (1.0 / 1.055), 2.4, where=not_small)
rgb[:] = np.where(not_small, large_result, small_result)
return colors
def __get_bone_data(blender_mesh, skin, blender_vertex_groups):
joint_name_to_index = {joint.name: index for index, joint in enumerate(skin.joints)}
group_to_joint = [joint_name_to_index.get(g.name) for g in blender_vertex_groups]
# List of (joint, weight) pairs for each vert
vert_bones = []
max_num_influences = 0
for vertex in blender_mesh.vertices:
bones = []
if vertex.groups:
for group_element in vertex.groups:
weight = group_element.weight
if weight <= 0.0:
continue
try:
joint = group_to_joint[group_element.group]
except Exception:
continue
if joint is None:
continue
bones.append((joint, weight))
bones.sort(key=lambda x: x[1], reverse=True)
if not bones: bones = ((0, 1.0),) # HACK for verts with zero weight (#308)
vert_bones.append(bones)
if len(bones) > max_num_influences:
max_num_influences = len(bones)
# How many joint sets do we need? 1 set = 4 influences
num_joint_sets = (max_num_influences + 3) // 4
return vert_bones, num_joint_sets
def __zup2yup(array):
# x,y,z -> x,z,-y
array[:, [1,2]] = array[:, [2,1]] # x,z,y
array[:, 2] *= -1 # x,z,-y
def __apply_mat_to_all(matrix, vectors):
# Linear part
m = matrix.to_3x3() if len(matrix) == 4 else matrix
res = np.matmul(vectors, np.array(m.transposed()))
# Translation part
if len(matrix) == 4:
res += np.array(matrix.translation)
return res
def __normalize_vecs(vectors):
norms = np.linalg.norm(vectors, axis=1, keepdims=True)
np.divide(vectors, norms, out=vectors, where=norms != 0)
| true | true |
f7318c381216a7a0a5e30f0ca35e0f3c326d7ead | 184 | py | Python | beecrowd exercises/beecrowd-1145.py | pachecosamuel/Python-Exercises | de542536dd1a2bc0ad27e81824713cda8ad34054 | [
"MIT"
] | null | null | null | beecrowd exercises/beecrowd-1145.py | pachecosamuel/Python-Exercises | de542536dd1a2bc0ad27e81824713cda8ad34054 | [
"MIT"
] | null | null | null | beecrowd exercises/beecrowd-1145.py | pachecosamuel/Python-Exercises | de542536dd1a2bc0ad27e81824713cda8ad34054 | [
"MIT"
] | null | null | null | n1,n2 = list(map(int,input().split()))
cont = 1
for i in range(1,(int((n2/n1))+1)):
r = ""
for y in range(n1):
r += str(cont) + " "
cont += 1
print(r[:-1])
| 20.444444 | 38 | 0.451087 | n1,n2 = list(map(int,input().split()))
cont = 1
for i in range(1,(int((n2/n1))+1)):
r = ""
for y in range(n1):
r += str(cont) + " "
cont += 1
print(r[:-1])
| true | true |
f7318c8d58133c98fa8052706364e5c9c4e4f7bb | 116 | py | Python | ComStream/tf_idf/__init__.py | alimpfard/ComStream | 1bfde3a01ba0b996b4c41e0e9112089618789469 | [
"MIT"
] | null | null | null | ComStream/tf_idf/__init__.py | alimpfard/ComStream | 1bfde3a01ba0b996b4c41e0e9112089618789469 | [
"MIT"
] | null | null | null | ComStream/tf_idf/__init__.py | alimpfard/ComStream | 1bfde3a01ba0b996b4c41e0e9112089618789469 | [
"MIT"
] | null | null | null | from . import Agent
from . import Coordinator
from . import DataManager
from . import DataPoint
from . import Utils
| 19.333333 | 25 | 0.784483 | from . import Agent
from . import Coordinator
from . import DataManager
from . import DataPoint
from . import Utils
| true | true |
f7318c9eae78d9ecd4097a5f77c09245d4f5b3b0 | 14,635 | py | Python | appmetrics/statistics.py | avalente/appmetrics | 366fc7e1ca897e49a2227cbfa43bfa02a47f1acc | [
"Apache-2.0"
] | 60 | 2015-01-19T05:32:32.000Z | 2021-06-08T07:35:02.000Z | appmetrics/statistics.py | alexKeleon/appmetrics | 366fc7e1ca897e49a2227cbfa43bfa02a47f1acc | [
"Apache-2.0"
] | 6 | 2015-03-02T19:25:02.000Z | 2021-03-27T17:26:07.000Z | appmetrics/statistics.py | alexKeleon/appmetrics | 366fc7e1ca897e49a2227cbfa43bfa02a47f1acc | [
"Apache-2.0"
] | 12 | 2015-01-19T05:04:14.000Z | 2020-09-08T07:49:54.000Z | ## Module statistics.py
##
## Copyright (c) 2014 Antonio Valente <y3sman@gmail.com>
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
"""
Statistics module.
The basic functions are stolen from python 3.4 stdlib
"""
from __future__ import division
import collections
import math
import operator
import functools
from fractions import Fraction
from decimal import Decimal
from .exceptions import StatisticsError
from .py3comp import xrange, iteritems
def isfinite(n):
"""Return True if x is neither an infinity nor a NaN, and False otherwise.
(Note that 0.0 is considered finite.)
Backported from python 3
"""
return not (math.isinf(n) or math.isnan(n))
def sum(data, start=0):
"""sum(data [, start]) -> value
Return a high-precision sum of the given numeric data. If optional
argument ``start`` is given, it is added to the total. If ``data`` is
empty, ``start`` (defaulting to 0) is returned.
"""
n, d = exact_ratio(start)
T = type(start)
partials = {d: n} # map {denominator: sum of numerators}
# Micro-optimizations.
coerce_types_ = coerce_types
exact_ratio_ = exact_ratio
partials_get = partials.get
# Add numerators for each denominator, and track the "current" type.
for x in data:
T = coerce_types_(T, type(x))
n, d = exact_ratio_(x)
partials[d] = partials_get(d, 0) + n
if None in partials:
assert issubclass(T, (float, Decimal))
assert not isfinite(partials[None])
return T(partials[None])
total = Fraction()
for d, n in sorted(partials.items()):
total += Fraction(n, d)
if issubclass(T, int):
assert total.denominator == 1
return T(total.numerator)
if issubclass(T, Decimal):
return T(total.numerator) / total.denominator
return T(total)
def exact_ratio(x):
"""Convert Real number x exactly to (numerator, denominator) pair.
x is expected to be an int, Fraction, Decimal or float.
"""
try:
try:
# int, Fraction
return x.numerator, x.denominator
except AttributeError:
# float
try:
return x.as_integer_ratio()
except AttributeError:
# Decimal
try:
return decimal_to_ratio(x)
except AttributeError:
msg = "can't convert type '{}' to numerator/denominator"
raise TypeError(msg.format(type(x).__name__))
except (OverflowError, ValueError):
# INF or NAN
return (x, None)
# FIXME This is faster than Fraction.from_decimal, but still too slow.
def decimal_to_ratio(d):
"""Convert Decimal d to exact integer ratio (numerator, denominator).
"""
sign, digits, exp = d.as_tuple()
if exp in ('F', 'n', 'N'): # INF, NAN, sNAN
assert not d.is_finite()
raise ValueError
num = 0
for digit in digits:
num = num * 10 + digit
if sign:
num = -num
den = 10 ** -exp
return (num, den)
def coerce_types(T1, T2):
"""Coerce types T1 and T2 to a common type.
Coercion is performed according to this table, where "N/A" means
that a TypeError exception is raised.
+----------+-----------+-----------+-----------+----------+
| | int | Fraction | Decimal | float |
+----------+-----------+-----------+-----------+----------+
| int | int | Fraction | Decimal | float |
| Fraction | Fraction | Fraction | N/A | float |
| Decimal | Decimal | N/A | Decimal | float |
| float | float | float | float | float |
+----------+-----------+-----------+-----------+----------+
Subclasses trump their parent class; two subclasses of the same
base class will be coerced to the second of the two.
"""
# Get the common/fast cases out of the way first.
if T1 is T2: return T1
if T1 is int: return T2
if T2 is int: return T1
# Subclasses trump their parent class.
if issubclass(T2, T1): return T2
if issubclass(T1, T2): return T1
# Floats trump everything else.
if issubclass(T2, float): return T2
if issubclass(T1, float): return T1
# Subclasses of the same base class give priority to the second.
if T1.__base__ is T2.__base__: return T2
# Otherwise, just give up.
raise TypeError('cannot coerce types %r and %r' % (T1, T2))
def counts(data):
"""
Generate a table of sorted (value, frequency) pairs.
"""
if data is None:
raise TypeError('None is not iterable')
table = collections.Counter(data).most_common()
if not table:
return table
# Extract the values with the highest frequency.
maxfreq = table[0][1]
for i in range(1, len(table)):
if table[i][1] != maxfreq:
table = table[:i]
break
return table
# === Measures of central tendency (averages) ===
def mean(data):
"""Return the sample arithmetic mean of data.
If ``data`` is empty, StatisticsError will be raised.
"""
if iter(data) is data:
data = list(data)
n = len(data)
if n < 1:
raise StatisticsError('mean requires at least one data point')
return sum(data) / n
# FIXME: investigate ways to calculate medians without sorting? Quickselect?
def median(data):
"""Return the median (middle value) of numeric data.
When the number of data points is odd, return the middle data point.
When the number of data points is even, the median is interpolated by
taking the average of the two middle values:
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
i = n // 2
return (data[i - 1] + data[i]) / 2
def median_low(data):
"""Return the low median of numeric data.
When the number of data points is odd, the middle value is returned.
When it is even, the smaller of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
return data[n // 2 - 1]
def median_high(data):
"""Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
return data[n // 2]
def mode(data):
"""Return the most common data point from discrete or nominal data.
``mode`` assumes discrete data, and returns a single value. This is the
standard treatment of the mode as commonly taught in schools:
If there is not exactly one most common value, ``mode`` will raise
StatisticsError.
"""
# Generate a table of sorted (value, frequency) pairs.
table = counts(data)
if len(table) == 1:
return table[0][0]
elif table:
raise StatisticsError(
'no unique mode; found %d equally common values' % len(table)
)
else:
raise StatisticsError('no mode for empty data')
# === Measures of spread ===
# See http://mathworld.wolfram.com/Variance.html
# http://mathworld.wolfram.com/SampleVariance.html
# http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
#
# Under no circumstances use the so-called "computational formula for
# variance", as that is only suitable for hand calculations with a small
# amount of low-precision data. It has terrible numeric properties.
#
# See a comparison of three computational methods here:
# http://www.johndcook.com/blog/2008/09/26/comparing-three-methods-of-computing-standard-deviation/
def _ss(data, c=None):
"""Return sum of square deviations of sequence data.
If ``c`` is None, the mean is calculated in one pass, and the deviations
from the mean are calculated in a second pass. Otherwise, deviations are
calculated from ``c`` as given. Use the second case with care, as it can
lead to garbage results.
"""
if c is None:
c = mean(data)
ss = sum((x - c) ** 2 for x in data)
# The following sum should mathematically equal zero, but due to rounding
# error may not.
ss -= sum((x - c) for x in data) ** 2 / len(data)
assert not ss < 0, 'negative sum of square deviations: %f' % ss
return ss
def variance(data, xbar=None):
"""Return the sample variance of data.
data should be an iterable of Real-valued numbers, with at least two
values. The optional argument xbar, if given, should be the mean of
the data. If it is missing or None, the mean is automatically calculated.
Use this function when your data is a sample from a population. To
calculate the variance from the entire population, see ``pvariance``.
If you have already calculated the mean of your data, you can pass it as
the optional second argument ``xbar`` to avoid recalculating it:
This function does not check that ``xbar`` is actually the mean of
``data``. Giving arbitrary values for ``xbar`` may lead to invalid or
impossible results.
Decimals and Fractions are supported
"""
if iter(data) is data:
data = list(data)
n = len(data)
if n < 2:
raise StatisticsError('variance requires at least two data points')
ss = _ss(data, xbar)
return ss / (n - 1)
def pvariance(data, mu=None):
"""Return the population variance of ``data``.
data should be an iterable of Real-valued numbers, with at least one
value. The optional argument mu, if given, should be the mean of
the data. If it is missing or None, the mean is automatically calculated.
Use this function to calculate the variance from the entire population.
To estimate the variance from a sample, the ``variance`` function is
usually a better choice.
If you have already calculated the mean of the data, you can pass it as
the optional second argument to avoid recalculating it:
This function does not check that ``mu`` is actually the mean of ``data``.
Giving arbitrary values for ``mu`` may lead to invalid or impossible
results.
Decimals and Fractions are supported:
"""
if iter(data) is data:
data = list(data)
n = len(data)
if n < 1:
raise StatisticsError('pvariance requires at least one data point')
ss = _ss(data, mu)
return ss / n
def stdev(data, xbar=None):
"""Return the square root of the sample variance.
See ``variance`` for arguments and other details.
"""
var = variance(data, xbar)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var)
def pstdev(data, mu=None):
"""Return the square root of the population variance.
See ``pvariance`` for arguments and other details.
"""
var = pvariance(data, mu)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var)
def geometric_mean(data):
"""Return the geometric mean of data
"""
if not data:
raise StatisticsError('geometric_mean requires at least one data point')
# in order to support negative or null values
data = [x if x > 0 else math.e if x == 0 else 1.0 for x in data]
return math.pow(math.fabs(functools.reduce(operator.mul, data)), 1.0 / len(data))
def harmonic_mean(data):
"""Return the harmonic mean of data
"""
if not data:
raise StatisticsError('harmonic_mean requires at least one data point')
divisor = sum(map(lambda x: 1.0 / x if x else 0.0, data))
return len(data) / divisor if divisor else 0.0
def skewness(data):
"""Return the skewness of the data's distribution
"""
if not data:
raise StatisticsError('skewness requires at least one data point')
size = len(data)
sd = stdev(data) ** 3
if not sd:
return 0.0
mn = mean(data)
return sum(map(lambda x: ((x - mn) ** 3 / sd), data)) / size
def kurtosis(data):
"""Return the kurtosis of the data's distribution
"""
if not data:
raise StatisticsError('kurtosis requires at least one data point')
size = len(data)
sd = stdev(data) ** 4
if not sd:
return 0.0
mn = mean(data)
return sum(map(lambda x: ((x - mn) ** 4 / sd), data)) / size - 3
def percentile(data, n):
"""Return the n-th percentile of the given data
Assume that the data are already sorted
"""
size = len(data)
idx = (n / 100.0) * size - 0.5
if idx < 0 or idx > size:
raise StatisticsError("Too few data points ({}) for {}th percentile".format(size, n))
return data[int(idx)]
def get_histogram(data):
"""Return the histogram relative to the given data
Assume that the data are already sorted
"""
count = len(data)
if count < 2:
raise StatisticsError('Too few data points ({}) for get_histogram'.format(count))
min_ = data[0]
max_ = data[-1]
std = stdev(data)
bins = get_histogram_bins(min_, max_, std, count)
res = {x: 0 for x in bins}
for value in data:
for bin_ in bins:
if value <= bin_:
res[bin_] += 1
break
return sorted(iteritems(res))
def get_histogram_bins(min_, max_, std, count):
"""
Return optimal bins given the input parameters
"""
width = _get_bin_width(std, count)
count = int(round((max_ - min_) / width) + 1)
if count:
bins = [i * width + min_ for i in xrange(1, count + 1)]
else:
bins = [min_]
return bins
def _get_bin_width(stdev, count):
"""Return the histogram's optimal bin width based on Sturges
http://www.jstor.org/pss/2965501
"""
w = int(round((3.5 * stdev) / (count ** (1.0 / 3))))
if w:
return w
else:
return 1
| 28.362403 | 99 | 0.624804 | partials[d] = partials_get(d, 0) + n
if None in partials:
assert issubclass(T, (float, Decimal))
assert not isfinite(partials[None])
return T(partials[None])
total = Fraction()
for d, n in sorted(partials.items()):
total += Fraction(n, d)
if issubclass(T, int):
assert total.denominator == 1
return T(total.numerator)
if issubclass(T, Decimal):
return T(total.numerator) / total.denominator
return T(total)
def exact_ratio(x):
try:
try:
return x.numerator, x.denominator
except AttributeError:
try:
return x.as_integer_ratio()
except AttributeError:
try:
return decimal_to_ratio(x)
except AttributeError:
msg = "can't convert type '{}' to numerator/denominator"
raise TypeError(msg.format(type(x).__name__))
except (OverflowError, ValueError):
# INF or NAN
return (x, None)
# FIXME This is faster than Fraction.from_decimal, but still too slow.
def decimal_to_ratio(d):
sign, digits, exp = d.as_tuple()
if exp in ('F', 'n', 'N'): # INF, NAN, sNAN
assert not d.is_finite()
raise ValueError
num = 0
for digit in digits:
num = num * 10 + digit
if sign:
num = -num
den = 10 ** -exp
return (num, den)
def coerce_types(T1, T2):
# Get the common/fast cases out of the way first.
if T1 is T2: return T1
if T1 is int: return T2
if T2 is int: return T1
# Subclasses trump their parent class.
if issubclass(T2, T1): return T2
if issubclass(T1, T2): return T1
# Floats trump everything else.
if issubclass(T2, float): return T2
if issubclass(T1, float): return T1
# Subclasses of the same base class give priority to the second.
if T1.__base__ is T2.__base__: return T2
# Otherwise, just give up.
raise TypeError('cannot coerce types %r and %r' % (T1, T2))
def counts(data):
if data is None:
raise TypeError('None is not iterable')
table = collections.Counter(data).most_common()
if not table:
return table
# Extract the values with the highest frequency.
maxfreq = table[0][1]
for i in range(1, len(table)):
if table[i][1] != maxfreq:
table = table[:i]
break
return table
# === Measures of central tendency (averages) ===
def mean(data):
if iter(data) is data:
data = list(data)
n = len(data)
if n < 1:
raise StatisticsError('mean requires at least one data point')
return sum(data) / n
# FIXME: investigate ways to calculate medians without sorting? Quickselect?
def median(data):
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
i = n // 2
return (data[i - 1] + data[i]) / 2
def median_low(data):
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
if n % 2 == 1:
return data[n // 2]
else:
return data[n // 2 - 1]
def median_high(data):
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for empty data")
return data[n // 2]
def mode(data):
# Generate a table of sorted (value, frequency) pairs.
table = counts(data)
if len(table) == 1:
return table[0][0]
elif table:
raise StatisticsError(
'no unique mode; found %d equally common values' % len(table)
)
else:
raise StatisticsError('no mode for empty data')
# === Measures of spread ===
# See http://mathworld.wolfram.com/Variance.html
# http://mathworld.wolfram.com/SampleVariance.html
# http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance
#
# Under no circumstances use the so-called "computational formula for
# variance", as that is only suitable for hand calculations with a small
# amount of low-precision data. It has terrible numeric properties.
#
# See a comparison of three computational methods here:
# http://www.johndcook.com/blog/2008/09/26/comparing-three-methods-of-computing-standard-deviation/
def _ss(data, c=None):
if c is None:
c = mean(data)
ss = sum((x - c) ** 2 for x in data)
# The following sum should mathematically equal zero, but due to rounding
# error may not.
ss -= sum((x - c) for x in data) ** 2 / len(data)
assert not ss < 0, 'negative sum of square deviations: %f' % ss
return ss
def variance(data, xbar=None):
if iter(data) is data:
data = list(data)
n = len(data)
if n < 2:
raise StatisticsError('variance requires at least two data points')
ss = _ss(data, xbar)
return ss / (n - 1)
def pvariance(data, mu=None):
if iter(data) is data:
data = list(data)
n = len(data)
if n < 1:
raise StatisticsError('pvariance requires at least one data point')
ss = _ss(data, mu)
return ss / n
def stdev(data, xbar=None):
var = variance(data, xbar)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var)
def pstdev(data, mu=None):
var = pvariance(data, mu)
try:
return var.sqrt()
except AttributeError:
return math.sqrt(var)
def geometric_mean(data):
if not data:
raise StatisticsError('geometric_mean requires at least one data point')
# in order to support negative or null values
data = [x if x > 0 else math.e if x == 0 else 1.0 for x in data]
return math.pow(math.fabs(functools.reduce(operator.mul, data)), 1.0 / len(data))
def harmonic_mean(data):
if not data:
raise StatisticsError('harmonic_mean requires at least one data point')
divisor = sum(map(lambda x: 1.0 / x if x else 0.0, data))
return len(data) / divisor if divisor else 0.0
def skewness(data):
if not data:
raise StatisticsError('skewness requires at least one data point')
size = len(data)
sd = stdev(data) ** 3
if not sd:
return 0.0
mn = mean(data)
return sum(map(lambda x: ((x - mn) ** 3 / sd), data)) / size
def kurtosis(data):
if not data:
raise StatisticsError('kurtosis requires at least one data point')
size = len(data)
sd = stdev(data) ** 4
if not sd:
return 0.0
mn = mean(data)
return sum(map(lambda x: ((x - mn) ** 4 / sd), data)) / size - 3
def percentile(data, n):
size = len(data)
idx = (n / 100.0) * size - 0.5
if idx < 0 or idx > size:
raise StatisticsError("Too few data points ({}) for {}th percentile".format(size, n))
return data[int(idx)]
def get_histogram(data):
count = len(data)
if count < 2:
raise StatisticsError('Too few data points ({}) for get_histogram'.format(count))
min_ = data[0]
max_ = data[-1]
std = stdev(data)
bins = get_histogram_bins(min_, max_, std, count)
res = {x: 0 for x in bins}
for value in data:
for bin_ in bins:
if value <= bin_:
res[bin_] += 1
break
return sorted(iteritems(res))
def get_histogram_bins(min_, max_, std, count):
width = _get_bin_width(std, count)
count = int(round((max_ - min_) / width) + 1)
if count:
bins = [i * width + min_ for i in xrange(1, count + 1)]
else:
bins = [min_]
return bins
def _get_bin_width(stdev, count):
w = int(round((3.5 * stdev) / (count ** (1.0 / 3))))
if w:
return w
else:
return 1
| true | true |
f7318cbf69b4247d05d82297d5c1cf2c37526fa4 | 6,137 | py | Python | lavaplayer/websocket.py | xArty4/lavaplayer | 1fae7c74e5e3c1eaf45b06fc05fc5fb16a2e4d3c | [
"MIT"
] | null | null | null | lavaplayer/websocket.py | xArty4/lavaplayer | 1fae7c74e5e3c1eaf45b06fc05fc5fb16a2e4d3c | [
"MIT"
] | null | null | null | lavaplayer/websocket.py | xArty4/lavaplayer | 1fae7c74e5e3c1eaf45b06fc05fc5fb16a2e4d3c | [
"MIT"
] | null | null | null | import asyncio
import aiohttp
import logging
from lavaplayer.exceptions import NodeError
from .objects import (
Info,
PlayerUpdateEvent,
TrackStartEvent,
TrackEndEvent,
TrackExceptionEvent,
TrackStuckEvent,
WebSocketClosedEvent,
)
from .emitter import Emitter
import typing as t
if t.TYPE_CHECKING:
from .client import LavalinkClient
_LOGGER = logging.getLogger("lavaplayer.ws")
class WS:
def __init__(
self,
client: "LavalinkClient",
host: str,
port: int,
is_ssl: bool = False,
) -> None:
self.ws = None
self.ws_url = f"{'wss' if is_ssl else 'ws'}://{host}:{port}"
self.client = client
self._headers = client._headers
self._loop = client._loop
self.emitter: Emitter = client.event_manager
self.is_connect: bool = False
async def _connect(self):
async with aiohttp.ClientSession(headers=self._headers, loop=self._loop) as session:
self.session = session
try:
self.ws = await self.session.ws_connect(self.ws_url)
if session is None:
await self.check_connection()
except (aiohttp.ClientConnectorError, aiohttp.WSServerHandshakeError, aiohttp.ServerDisconnectedError) as error:
if isinstance(error, aiohttp.ClientConnectorError):
_LOGGER.error(f"Could not connect to websocket: {error}")
_LOGGER.warning("Reconnecting to websocket after 10 seconds")
await asyncio.sleep(10)
await self._connect()
return
elif isinstance(error, aiohttp.WSServerHandshakeError):
if error.status in (403, 401): # Unauthorized or Forbidden
_LOGGER.warning("Password authentication failed - closing websocket")
return
_LOGGER.warning("Please check your websocket port - closing websocket")
elif isinstance(error, aiohttp.ServerDisconnectedError):
_LOGGER.error(f"Could not connect to websocket: {error}")
_LOGGER.warning("Reconnecting to websocket after 10 seconds")
await asyncio.sleep(10)
await self._connect()
return
_LOGGER.info("Connected to websocket")
self.is_connect = True
async for msg in self.ws:
if msg.type == aiohttp.WSMsgType.TEXT:
await self.callback(msg.json())
elif msg.type == aiohttp.WSMsgType.CLOSED:
_LOGGER.error("Websocket closed")
break
elif msg.type == aiohttp.WSMsgType.ERROR:
_LOGGER.error(msg.data)
break
async def check_connection(self):
while self.ws.closed is None or not self.ws.closed or not self.is_connected:
_LOGGER.warning("Websocket closed unexpectedly - reconnecting in 10 seconds")
if self.client.nodes:
self.client.nodes.clear()
await asyncio.sleep(10)
await self._connect()
async def callback(self, payload: dict):
if payload["op"] == "stats":
self.client.info = Info(
playing_players=payload["playingPlayers"],
memory_used=payload["memory"]["used"],
memory_free=payload["memory"]["free"],
players=payload["players"],
uptime=payload["uptime"]
)
elif payload["op"] == "playerUpdate":
data = PlayerUpdateEvent(
guild_id=payload["guildId"],
time=payload["state"]["time"],
position=payload["state"].get("position"),
connected=payload["state"]["connected"],
)
self.emitter.emit("playerUpdate", data)
elif payload["op"] == "event":
if not payload.get("track"):
return
track = await self.client._decodetrack(payload["track"])
guild_id = int(payload["guildId"])
try:
node = await self.client.get_guild_node(guild_id)
except NodeError:
node = None
if payload["type"] == "TrackStartEvent":
self.emitter.emit("TrackStartEvent", TrackStartEvent(track, guild_id))
elif payload["type"] == "TrackEndEvent":
self.emitter.emit("TrackEndEvent", TrackEndEvent(track, guild_id, payload["reason"]))
if not node:
return
if not node.queue:
return
if node.repeat:
await self.client.play(guild_id, track, node.queue[0].requester, True)
return
del node.queue[0]
await self.client.set_guild_node(guild_id, node)
if len(node.queue) != 0:
await self.client.play(guild_id, node.queue[0], node.queue[0].requester, True)
elif payload["type"] == "TrackExceptionEvent":
self.emitter.emit("TrackExceptionEvent", TrackExceptionEvent(track, guild_id, payload["exception"], payload["message"], payload["severity"], payload["cause"]))
elif payload["type"] == "TrackStuckEvent":
self.emitter.emit("TrackStuckEvent", TrackStuckEvent(track, guild_id, payload["thresholdMs"]))
elif payload["type"] == "WebSocketClosedEvent":
self.emitter.emit("WebSocketClosedEvent", WebSocketClosedEvent(track, guild_id, payload["code"], payload["reason"], payload["byRemote"]))
@property
def is_connected(self) -> bool:
return self.is_connect and self.ws.closed is False
async def send(self, payload): # only dict
if not self.is_connected:
_LOGGER.error("Not connected to websocket")
await self.check_connection()
return
await self.ws.send_json(payload)
| 40.111111 | 175 | 0.570311 | import asyncio
import aiohttp
import logging
from lavaplayer.exceptions import NodeError
from .objects import (
Info,
PlayerUpdateEvent,
TrackStartEvent,
TrackEndEvent,
TrackExceptionEvent,
TrackStuckEvent,
WebSocketClosedEvent,
)
from .emitter import Emitter
import typing as t
if t.TYPE_CHECKING:
from .client import LavalinkClient
_LOGGER = logging.getLogger("lavaplayer.ws")
class WS:
def __init__(
self,
client: "LavalinkClient",
host: str,
port: int,
is_ssl: bool = False,
) -> None:
self.ws = None
self.ws_url = f"{'wss' if is_ssl else 'ws'}://{host}:{port}"
self.client = client
self._headers = client._headers
self._loop = client._loop
self.emitter: Emitter = client.event_manager
self.is_connect: bool = False
async def _connect(self):
async with aiohttp.ClientSession(headers=self._headers, loop=self._loop) as session:
self.session = session
try:
self.ws = await self.session.ws_connect(self.ws_url)
if session is None:
await self.check_connection()
except (aiohttp.ClientConnectorError, aiohttp.WSServerHandshakeError, aiohttp.ServerDisconnectedError) as error:
if isinstance(error, aiohttp.ClientConnectorError):
_LOGGER.error(f"Could not connect to websocket: {error}")
_LOGGER.warning("Reconnecting to websocket after 10 seconds")
await asyncio.sleep(10)
await self._connect()
return
elif isinstance(error, aiohttp.WSServerHandshakeError):
if error.status in (403, 401):
_LOGGER.warning("Password authentication failed - closing websocket")
return
_LOGGER.warning("Please check your websocket port - closing websocket")
elif isinstance(error, aiohttp.ServerDisconnectedError):
_LOGGER.error(f"Could not connect to websocket: {error}")
_LOGGER.warning("Reconnecting to websocket after 10 seconds")
await asyncio.sleep(10)
await self._connect()
return
_LOGGER.info("Connected to websocket")
self.is_connect = True
async for msg in self.ws:
if msg.type == aiohttp.WSMsgType.TEXT:
await self.callback(msg.json())
elif msg.type == aiohttp.WSMsgType.CLOSED:
_LOGGER.error("Websocket closed")
break
elif msg.type == aiohttp.WSMsgType.ERROR:
_LOGGER.error(msg.data)
break
async def check_connection(self):
while self.ws.closed is None or not self.ws.closed or not self.is_connected:
_LOGGER.warning("Websocket closed unexpectedly - reconnecting in 10 seconds")
if self.client.nodes:
self.client.nodes.clear()
await asyncio.sleep(10)
await self._connect()
async def callback(self, payload: dict):
if payload["op"] == "stats":
self.client.info = Info(
playing_players=payload["playingPlayers"],
memory_used=payload["memory"]["used"],
memory_free=payload["memory"]["free"],
players=payload["players"],
uptime=payload["uptime"]
)
elif payload["op"] == "playerUpdate":
data = PlayerUpdateEvent(
guild_id=payload["guildId"],
time=payload["state"]["time"],
position=payload["state"].get("position"),
connected=payload["state"]["connected"],
)
self.emitter.emit("playerUpdate", data)
elif payload["op"] == "event":
if not payload.get("track"):
return
track = await self.client._decodetrack(payload["track"])
guild_id = int(payload["guildId"])
try:
node = await self.client.get_guild_node(guild_id)
except NodeError:
node = None
if payload["type"] == "TrackStartEvent":
self.emitter.emit("TrackStartEvent", TrackStartEvent(track, guild_id))
elif payload["type"] == "TrackEndEvent":
self.emitter.emit("TrackEndEvent", TrackEndEvent(track, guild_id, payload["reason"]))
if not node:
return
if not node.queue:
return
if node.repeat:
await self.client.play(guild_id, track, node.queue[0].requester, True)
return
del node.queue[0]
await self.client.set_guild_node(guild_id, node)
if len(node.queue) != 0:
await self.client.play(guild_id, node.queue[0], node.queue[0].requester, True)
elif payload["type"] == "TrackExceptionEvent":
self.emitter.emit("TrackExceptionEvent", TrackExceptionEvent(track, guild_id, payload["exception"], payload["message"], payload["severity"], payload["cause"]))
elif payload["type"] == "TrackStuckEvent":
self.emitter.emit("TrackStuckEvent", TrackStuckEvent(track, guild_id, payload["thresholdMs"]))
elif payload["type"] == "WebSocketClosedEvent":
self.emitter.emit("WebSocketClosedEvent", WebSocketClosedEvent(track, guild_id, payload["code"], payload["reason"], payload["byRemote"]))
@property
def is_connected(self) -> bool:
return self.is_connect and self.ws.closed is False
async def send(self, payload):
if not self.is_connected:
_LOGGER.error("Not connected to websocket")
await self.check_connection()
return
await self.ws.send_json(payload)
| true | true |
f7318d4e874e77b247071f0e8f618b2b791be9d7 | 629 | py | Python | skl2onnx/operator_converters/id_op.py | xiaowuhu/sklearn-onnx | e85674a67a0a043e19c2ffe181e5d31eca8ce40b | [
"Apache-2.0"
] | 323 | 2018-12-18T20:23:19.000Z | 2022-03-25T09:47:31.000Z | skl2onnx/operator_converters/id_op.py | xiaowuhu/sklearn-onnx | e85674a67a0a043e19c2ffe181e5d31eca8ce40b | [
"Apache-2.0"
] | 408 | 2019-01-02T12:16:10.000Z | 2022-03-21T14:01:28.000Z | skl2onnx/operator_converters/id_op.py | xiaowuhu/sklearn-onnx | e85674a67a0a043e19c2ffe181e5d31eca8ce40b | [
"Apache-2.0"
] | 70 | 2018-12-20T19:36:07.000Z | 2022-03-14T06:41:36.000Z | # SPDX-License-Identifier: Apache-2.0
from ..common._apply_operation import apply_identity
from ..common._registration import register_converter
from ..common._topology import Scope, Operator
from ..common._container import ModelComponentContainer
def convert_sklearn_identity(scope: Scope, operator: Operator,
container: ModelComponentContainer):
apply_identity(
scope, operator.inputs[0].full_name,
operator.outputs[0].full_name, container,
operator_name=scope.get_unique_operator_name('CIdentity'))
register_converter('SklearnIdentity', convert_sklearn_identity)
| 33.105263 | 66 | 0.761526 |
from ..common._apply_operation import apply_identity
from ..common._registration import register_converter
from ..common._topology import Scope, Operator
from ..common._container import ModelComponentContainer
def convert_sklearn_identity(scope: Scope, operator: Operator,
container: ModelComponentContainer):
apply_identity(
scope, operator.inputs[0].full_name,
operator.outputs[0].full_name, container,
operator_name=scope.get_unique_operator_name('CIdentity'))
register_converter('SklearnIdentity', convert_sklearn_identity)
| true | true |
f7318d62eadf54fe70b5a57ab76350ff07135c26 | 177 | py | Python | tests/cases/core/layer3_pubsub/rpc/timeout/bad_call.py | AaronJGaut/pyspoke | 37bbe8b42c5abe129a3736b255c8a2ee17a4fb59 | [
"MIT"
] | null | null | null | tests/cases/core/layer3_pubsub/rpc/timeout/bad_call.py | AaronJGaut/pyspoke | 37bbe8b42c5abe129a3736b255c8a2ee17a4fb59 | [
"MIT"
] | null | null | null | tests/cases/core/layer3_pubsub/rpc/timeout/bad_call.py | AaronJGaut/pyspoke | 37bbe8b42c5abe129a3736b255c8a2ee17a4fb59 | [
"MIT"
] | null | null | null | import spoke
try:
spoke.call("junk", None, timeout=2)
except TimeoutError:
print("Got expected TimeoutError")
else:
raise TestFailure("Didn't get a TimeoutError")
| 17.7 | 50 | 0.711864 | import spoke
try:
spoke.call("junk", None, timeout=2)
except TimeoutError:
print("Got expected TimeoutError")
else:
raise TestFailure("Didn't get a TimeoutError")
| true | true |
f7318e0ce7bdc535165b2bde29c259ad0a74e64c | 9,150 | py | Python | tests/providers/test_internet.py | shirakia/faker | 4eb4ef24f5edbcbadd38f941025b671f5b6ebe60 | [
"MIT"
] | 1 | 2019-01-16T14:02:54.000Z | 2019-01-16T14:02:54.000Z | tests/providers/test_internet.py | shirakia/faker | 4eb4ef24f5edbcbadd38f941025b671f5b6ebe60 | [
"MIT"
] | null | null | null | tests/providers/test_internet.py | shirakia/faker | 4eb4ef24f5edbcbadd38f941025b671f5b6ebe60 | [
"MIT"
] | 1 | 2019-11-07T03:33:43.000Z | 2019-11-07T03:33:43.000Z | # coding=utf-8
from __future__ import unicode_literals
from itertools import cycle
import unittest
import mock
import pytest
import six
from email_validator import validate_email
from faker import Faker
from faker.providers.person.ja_JP import Provider as JaProvider
from faker.utils import text
class TestInternetProvider(unittest.TestCase):
""" Tests internet """
def setUp(self):
self.factory = Faker()
def test_email(self):
email = self.factory.email(domain='example.com')
assert email.split('@')[1] == 'example.com'
@mock.patch(
'faker.providers.internet.Provider.image_placeholder_services',
{'https://dummyimage.com/{width}x{height}'},
)
def test_image_url(self):
my_width = 500
my_height = 1024
url = self.factory.image_url(my_width, my_height)
assert 'https://dummyimage.com/{}x{}'.format(my_width, my_height) == url
url = self.factory.image_url()
assert 'https://dummyimage.com/' in url
def test_hostname(self):
hostname_1_level = self.factory.hostname(levels=1)
hostname_parts = hostname_1_level.split(".")
assert hostname_1_level
self.assertIsInstance(hostname_1_level, six.string_types)
assert len(hostname_parts) == 3
hostname_0_level = self.factory.hostname(levels=0)
assert hostname_0_level
self.assertIsInstance(hostname_0_level, six.string_types)
class TestInternetProviderUrl(unittest.TestCase):
""" Test internet url generation """
def setUp(self):
self.factory = Faker()
@staticmethod
def is_correct_scheme(url, schemes):
return any(url.startswith('{}://'.format(scheme)) for scheme in schemes)
def test_url_default_schemes(self):
for _ in range(100):
url = self.factory.url()
assert self.is_correct_scheme(url, ['http', 'https'])
def test_url_custom_schemes(self):
schemes_sets = [
['usb'],
['ftp', 'file'],
['usb', 'telnet', 'http'],
]
for _, schemes in zip(range(100), cycle(schemes_sets)):
url = self.factory.url(schemes=schemes)
assert self.is_correct_scheme(url, schemes)
def test_url_empty_schemes_list_generate_schemeless_urls(self):
for _ in range(100):
url = self.factory.url(schemes=[])
assert not url.startswith('http')
assert url.startswith('://')
class TestJaJP(unittest.TestCase):
""" Tests internet in the ja_JP locale """
def setUp(self):
self.factory = Faker('ja')
def test_internet(self):
names = JaProvider.last_romanized_names
domain_word = self.factory.domain_word()
self.assertIsInstance(domain_word, six.string_types)
assert any(domain_word == text.slugify(name) for name in names)
domain_name = self.factory.domain_name()
deep_domain_name = self.factory.domain_name(3)
self.assertIsInstance(domain_name, six.string_types)
self.assertIsInstance(deep_domain_name, six.string_types)
assert deep_domain_name.count('.') == 3
with pytest.raises(ValueError):
self.factory.domain_name(-1)
user_name = self.factory.user_name()
self.assertIsInstance(user_name, six.string_types)
tld = self.factory.tld()
self.assertIsInstance(tld, six.string_types)
class TestZhCN(unittest.TestCase):
def setUp(self):
self.factory = Faker(locale='zh_CN')
def test_email(self):
email = self.factory.email()
validate_email(email, check_deliverability=False)
def test_domain_word(self):
domain_word = self.factory.domain_word()
assert len(domain_word) > 1
@mock.patch(
'faker.providers.internet.Provider.tld',
lambda x: 'cn',
)
def test_domain_name(self):
domain_name_1_level = self.factory.domain_name(levels=1)
domain_parts = domain_name_1_level.split(".")
assert len(domain_parts) == 2
assert domain_parts[-1] == 'cn'
domain_name_2_level = self.factory.domain_name(levels=2)
domain_parts = domain_name_2_level.split(".")
assert len(domain_parts) == 3
assert domain_parts[-1] == 'cn'
assert domain_parts[1] in ['ac', 'com', 'edu', 'gov', 'mil',
'net', 'org', 'ah', 'bj', 'cq',
'fj', 'gd', 'gs', 'gz', 'gx', 'ha',
'hb', 'he', 'hi', 'hk', 'hl', 'hn',
'jl', 'js', 'jx', 'ln', 'mo', 'nm',
'nx', 'qh', 'sc', 'sd', 'sh', 'sn',
'sx', 'tj', 'xj', 'xz', 'yn', 'zj']
class TestZhTW(unittest.TestCase):
def setUp(self):
self.factory = Faker(locale='zh_TW')
def test_email(self):
email = self.factory.email()
validate_email(email, check_deliverability=False)
class TestHuHU(unittest.TestCase):
""" Tests internet module in the hu_HU locale. """
def setUp(self):
self.factory = Faker('hu_HU')
def test_internet(self):
domain_name = self.factory.domain_name()
self.assertIsInstance(domain_name, six.string_types)
tld = self.factory.tld()
self.assertIsInstance(tld, six.string_types)
email = self.factory.email()
self.assertIsInstance(email, six.string_types)
class TestPlPL(unittest.TestCase):
def setUp(self):
self.factory = Faker('pl_PL')
self.provider = self.factory.provider('faker.providers.internet')
def test_free_email_domain(self):
domain = self.factory.free_email_domain()
assert domain in self.provider.free_email_domains
def test_tld(self):
tld = self.factory.tld()
assert tld in self.provider.tlds
class TestNlNl(unittest.TestCase):
def setUp(self):
self.factory = Faker('nl_NL')
self.provider = self.factory.provider('faker.providers.internet')
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'fabiënné',
)
def test_ascii_safe_email(self):
email = self.factory.ascii_safe_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'fabienne'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'fabiënné',
)
def test_ascii_free_email(self):
email = self.factory.ascii_free_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'fabienne'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'fabiënné',
)
def test_ascii_company_email(self):
email = self.factory.ascii_company_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'fabienne'
class TestArAa(unittest.TestCase):
def setUp(self):
self.factory = Faker('ar_AA')
self.provider = self.factory.provider('faker.providers.internet')
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'اصيل',
)
def test_ascii_safe_email(self):
email = self.factory.ascii_safe_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'asyl'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'اصيل',
)
def test_ascii_free_email(self):
email = self.factory.ascii_free_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'asyl'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'اصيل',
)
def test_ascii_company_email(self):
email = self.factory.ascii_company_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'asyl'
class TestPtBR(unittest.TestCase):
def setUp(self):
self.factory = Faker('pt_BR')
self.provider = self.factory.provider('faker.providers.internet')
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'VitóriaMagalhães',
)
def test_ascii_safe_email(self):
email = self.factory.ascii_safe_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'vitoriamagalhaes'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'JoãoSimões',
)
def test_ascii_free_email(self):
email = self.factory.ascii_free_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'joaosimoes'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'AndréCauã',
)
def test_ascii_company_email(self):
email = self.factory.ascii_company_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'andrecaua'
| 31.6609 | 80 | 0.628962 |
from __future__ import unicode_literals
from itertools import cycle
import unittest
import mock
import pytest
import six
from email_validator import validate_email
from faker import Faker
from faker.providers.person.ja_JP import Provider as JaProvider
from faker.utils import text
class TestInternetProvider(unittest.TestCase):
def setUp(self):
self.factory = Faker()
def test_email(self):
email = self.factory.email(domain='example.com')
assert email.split('@')[1] == 'example.com'
@mock.patch(
'faker.providers.internet.Provider.image_placeholder_services',
{'https://dummyimage.com/{width}x{height}'},
)
def test_image_url(self):
my_width = 500
my_height = 1024
url = self.factory.image_url(my_width, my_height)
assert 'https://dummyimage.com/{}x{}'.format(my_width, my_height) == url
url = self.factory.image_url()
assert 'https://dummyimage.com/' in url
def test_hostname(self):
hostname_1_level = self.factory.hostname(levels=1)
hostname_parts = hostname_1_level.split(".")
assert hostname_1_level
self.assertIsInstance(hostname_1_level, six.string_types)
assert len(hostname_parts) == 3
hostname_0_level = self.factory.hostname(levels=0)
assert hostname_0_level
self.assertIsInstance(hostname_0_level, six.string_types)
class TestInternetProviderUrl(unittest.TestCase):
def setUp(self):
self.factory = Faker()
@staticmethod
def is_correct_scheme(url, schemes):
return any(url.startswith('{}://'.format(scheme)) for scheme in schemes)
def test_url_default_schemes(self):
for _ in range(100):
url = self.factory.url()
assert self.is_correct_scheme(url, ['http', 'https'])
def test_url_custom_schemes(self):
schemes_sets = [
['usb'],
['ftp', 'file'],
['usb', 'telnet', 'http'],
]
for _, schemes in zip(range(100), cycle(schemes_sets)):
url = self.factory.url(schemes=schemes)
assert self.is_correct_scheme(url, schemes)
def test_url_empty_schemes_list_generate_schemeless_urls(self):
for _ in range(100):
url = self.factory.url(schemes=[])
assert not url.startswith('http')
assert url.startswith('://')
class TestJaJP(unittest.TestCase):
def setUp(self):
self.factory = Faker('ja')
def test_internet(self):
names = JaProvider.last_romanized_names
domain_word = self.factory.domain_word()
self.assertIsInstance(domain_word, six.string_types)
assert any(domain_word == text.slugify(name) for name in names)
domain_name = self.factory.domain_name()
deep_domain_name = self.factory.domain_name(3)
self.assertIsInstance(domain_name, six.string_types)
self.assertIsInstance(deep_domain_name, six.string_types)
assert deep_domain_name.count('.') == 3
with pytest.raises(ValueError):
self.factory.domain_name(-1)
user_name = self.factory.user_name()
self.assertIsInstance(user_name, six.string_types)
tld = self.factory.tld()
self.assertIsInstance(tld, six.string_types)
class TestZhCN(unittest.TestCase):
def setUp(self):
self.factory = Faker(locale='zh_CN')
def test_email(self):
email = self.factory.email()
validate_email(email, check_deliverability=False)
def test_domain_word(self):
domain_word = self.factory.domain_word()
assert len(domain_word) > 1
@mock.patch(
'faker.providers.internet.Provider.tld',
lambda x: 'cn',
)
def test_domain_name(self):
domain_name_1_level = self.factory.domain_name(levels=1)
domain_parts = domain_name_1_level.split(".")
assert len(domain_parts) == 2
assert domain_parts[-1] == 'cn'
domain_name_2_level = self.factory.domain_name(levels=2)
domain_parts = domain_name_2_level.split(".")
assert len(domain_parts) == 3
assert domain_parts[-1] == 'cn'
assert domain_parts[1] in ['ac', 'com', 'edu', 'gov', 'mil',
'net', 'org', 'ah', 'bj', 'cq',
'fj', 'gd', 'gs', 'gz', 'gx', 'ha',
'hb', 'he', 'hi', 'hk', 'hl', 'hn',
'jl', 'js', 'jx', 'ln', 'mo', 'nm',
'nx', 'qh', 'sc', 'sd', 'sh', 'sn',
'sx', 'tj', 'xj', 'xz', 'yn', 'zj']
class TestZhTW(unittest.TestCase):
def setUp(self):
self.factory = Faker(locale='zh_TW')
def test_email(self):
email = self.factory.email()
validate_email(email, check_deliverability=False)
class TestHuHU(unittest.TestCase):
def setUp(self):
self.factory = Faker('hu_HU')
def test_internet(self):
domain_name = self.factory.domain_name()
self.assertIsInstance(domain_name, six.string_types)
tld = self.factory.tld()
self.assertIsInstance(tld, six.string_types)
email = self.factory.email()
self.assertIsInstance(email, six.string_types)
class TestPlPL(unittest.TestCase):
def setUp(self):
self.factory = Faker('pl_PL')
self.provider = self.factory.provider('faker.providers.internet')
def test_free_email_domain(self):
domain = self.factory.free_email_domain()
assert domain in self.provider.free_email_domains
def test_tld(self):
tld = self.factory.tld()
assert tld in self.provider.tlds
class TestNlNl(unittest.TestCase):
def setUp(self):
self.factory = Faker('nl_NL')
self.provider = self.factory.provider('faker.providers.internet')
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'fabiënné',
)
def test_ascii_safe_email(self):
email = self.factory.ascii_safe_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'fabienne'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'fabiënné',
)
def test_ascii_free_email(self):
email = self.factory.ascii_free_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'fabienne'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'fabiënné',
)
def test_ascii_company_email(self):
email = self.factory.ascii_company_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'fabienne'
class TestArAa(unittest.TestCase):
def setUp(self):
self.factory = Faker('ar_AA')
self.provider = self.factory.provider('faker.providers.internet')
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'اصيل',
)
def test_ascii_safe_email(self):
email = self.factory.ascii_safe_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'asyl'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'اصيل',
)
def test_ascii_free_email(self):
email = self.factory.ascii_free_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'asyl'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'اصيل',
)
def test_ascii_company_email(self):
email = self.factory.ascii_company_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'asyl'
class TestPtBR(unittest.TestCase):
def setUp(self):
self.factory = Faker('pt_BR')
self.provider = self.factory.provider('faker.providers.internet')
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'VitóriaMagalhães',
)
def test_ascii_safe_email(self):
email = self.factory.ascii_safe_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'vitoriamagalhaes'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'JoãoSimões',
)
def test_ascii_free_email(self):
email = self.factory.ascii_free_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'joaosimoes'
@mock.patch(
'faker.providers.internet.Provider.user_name',
lambda x: 'AndréCauã',
)
def test_ascii_company_email(self):
email = self.factory.ascii_company_email()
validate_email(email, check_deliverability=False)
assert email.split('@')[0] == 'andrecaua'
| true | true |
f7318f9b6f7017b7889c28a18c24f5841345d651 | 2,324 | py | Python | project-chat/chatApp/migrations/0001_initial.py | Torkvamedo/smx | a5aef4f430f56ac67100c505902f55e18fba5978 | [
"Unlicense"
] | null | null | null | project-chat/chatApp/migrations/0001_initial.py | Torkvamedo/smx | a5aef4f430f56ac67100c505902f55e18fba5978 | [
"Unlicense"
] | null | null | null | project-chat/chatApp/migrations/0001_initial.py | Torkvamedo/smx | a5aef4f430f56ac67100c505902f55e18fba5978 | [
"Unlicense"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-17 19:23
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Ban',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='ChatUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('login', models.CharField(max_length=30, unique=True)),
('password', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.CharField(max_length=1024)),
('date', models.DateTimeField(auto_now=True)),
('receiver_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='receiver', to='chatApp.ChatUser')),
('sender_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sender', to='chatApp.ChatUser')),
],
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role_name', models.CharField(max_length=20)),
],
),
migrations.AddField(
model_name='chatuser',
name='role_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatApp.Role'),
),
migrations.AddField(
model_name='ban',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatApp.ChatUser'),
),
]
| 38.098361 | 144 | 0.577883 |
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Ban',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateTimeField(auto_now=True)),
],
),
migrations.CreateModel(
name='ChatUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('login', models.CharField(max_length=30, unique=True)),
('password', models.CharField(max_length=30)),
],
),
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('body', models.CharField(max_length=1024)),
('date', models.DateTimeField(auto_now=True)),
('receiver_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='receiver', to='chatApp.ChatUser')),
('sender_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sender', to='chatApp.ChatUser')),
],
),
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role_name', models.CharField(max_length=20)),
],
),
migrations.AddField(
model_name='chatuser',
name='role_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatApp.Role'),
),
migrations.AddField(
model_name='ban',
name='user_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='chatApp.ChatUser'),
),
]
| true | true |
f731902b159345554e51c2ec9585cd8075fabbd5 | 846 | py | Python | setup.py | kenlowrie/avscript | 7e3550adc46a01785ea89cdcd9ddaa5ac35bdbdd | [
"Apache-2.0"
] | 1 | 2021-09-30T19:29:30.000Z | 2021-09-30T19:29:30.000Z | setup.py | kenlowrie/avscript | 7e3550adc46a01785ea89cdcd9ddaa5ac35bdbdd | [
"Apache-2.0"
] | 7 | 2018-07-16T22:52:55.000Z | 2020-05-20T23:48:36.000Z | setup.py | kenlowrie/avscript | 7e3550adc46a01785ea89cdcd9ddaa5ac35bdbdd | [
"Apache-2.0"
] | 1 | 2018-05-24T22:58:44.000Z | 2018-05-24T22:58:44.000Z | from setuptools import setup
from sys import version_info
setup(name='avscript',
version='0.3.8',
description='Audio/Visual Script Parser',
url='https://github.com/kenlowrie/avscript',
author='Ken Lowrie',
author_email='ken@kenlowrie.com',
license='Apache',
packages=['avscript', 'avscript.avs'],
install_requires=['kenl380.pylib'],
entry_points = {
'console_scripts': ['avscript=avscript.avscript_md:av_parse_file',
'avscript{}=avscript.avscript_md:av_parse_file'.format(version_info.major),
'mkavscript=avscript.mkavscript_md:mkavscript_md',
'mkavscript{}=avscript.mkavscript_md:mkavscript_md'.format(version_info.major),
],
},
include_package_data=True,
zip_safe=False)
| 38.454545 | 107 | 0.630024 | from setuptools import setup
from sys import version_info
setup(name='avscript',
version='0.3.8',
description='Audio/Visual Script Parser',
url='https://github.com/kenlowrie/avscript',
author='Ken Lowrie',
author_email='ken@kenlowrie.com',
license='Apache',
packages=['avscript', 'avscript.avs'],
install_requires=['kenl380.pylib'],
entry_points = {
'console_scripts': ['avscript=avscript.avscript_md:av_parse_file',
'avscript{}=avscript.avscript_md:av_parse_file'.format(version_info.major),
'mkavscript=avscript.mkavscript_md:mkavscript_md',
'mkavscript{}=avscript.mkavscript_md:mkavscript_md'.format(version_info.major),
],
},
include_package_data=True,
zip_safe=False)
| true | true |
f73190671897a2b17be262536826df2b282c304f | 24,050 | py | Python | test/galaxy_selenium/navigates_galaxy.py | Galaxyinternship/Galaxy | 204be086a8c16d6684584cefa9053ed7c86a1784 | [
"CC-BY-3.0"
] | null | null | null | test/galaxy_selenium/navigates_galaxy.py | Galaxyinternship/Galaxy | 204be086a8c16d6684584cefa9053ed7c86a1784 | [
"CC-BY-3.0"
] | null | null | null | test/galaxy_selenium/navigates_galaxy.py | Galaxyinternship/Galaxy | 204be086a8c16d6684584cefa9053ed7c86a1784 | [
"CC-BY-3.0"
] | null | null | null | """A mixing that extends a HasDriver class with Galaxy-specific utilities.
Implementer must provide a self.build_url method to target Galaxy.
"""
from __future__ import print_function
import contextlib
import random
import string
import time
from functools import partial, wraps
import requests
import yaml
from .data import NAVIGATION_DATA
from .has_driver import exception_indicates_stale_element, HasDriver
from . import sizzle
# Test case data
DEFAULT_PASSWORD = '123456'
class NullTourCallback(object):
def handle_step(self, step, step_index):
pass
def retry_call_during_transitions(f, attempts=5, sleep=.1):
previous_attempts = 0
while True:
try:
return f()
except Exception as e:
if previous_attempts > attempts:
raise
if not exception_indicates_stale_element(e):
raise
time.sleep(sleep)
previous_attempts += 1
def retry_during_transitions(f, attempts=5, sleep=.1):
@wraps(f)
def _retry(*args, **kwds):
retry_call_during_transitions(partial(f, *args, **kwds), attempts=attempts, sleep=sleep)
return _retry
class NavigatesGalaxy(HasDriver):
default_password = DEFAULT_PASSWORD
def get(self, url=""):
full_url = self.build_url(url)
return self.driver.get(full_url)
@property
def navigation_data(self):
return NAVIGATION_DATA
def home(self):
self.get()
self.wait_for_selector_visible("#masthead")
self.wait_for_selector_visible("#current-history-panel")
def switch_to_main_panel(self):
self.driver.switch_to.frame(self.navigation_data["selectors"]["frames"]["main"])
@contextlib.contextmanager
def main_panel(self):
try:
self.switch_to_main_panel()
yield
finally:
self.driver.switch_to.default_content
def api_get(self, endpoint, data={}, raw=False):
full_url = self.build_url("api/" + endpoint, for_selenium=False)
response = requests.get(full_url, data=data, cookies=self.selenium_to_requests_cookies())
if raw:
return response
else:
return response.json()
def get_galaxy_session(self):
for cookie in self.driver.get_cookies():
if cookie["name"] == "galaxysession":
return cookie["value"]
def selenium_to_requests_cookies(self):
return {
'galaxysession': self.get_galaxy_session()
}
def history_panel_name_element(self):
name_selector = self.test_data["historyPanel"]["selectors"]["history"]["name"]
return self.wait_for_selector(name_selector)
def current_history(self):
history = self.api_get("histories")[0]
return history
def current_history_id(self):
return self.current_history()["id"]
def current_history_contents(self):
current_history_id = self.current_history_id()
history_contents = self.api_get("histories/%s/contents" % current_history_id)
return history_contents
def latest_history_item(self):
history_contents = self.current_history_contents()
assert len(history_contents) > 0
return history_contents[-1]
def wait_for_history(self, timeout=30, assert_ok=True):
def history_becomes_terminal(driver):
current_history_id = self.current_history_id()
state = self.api_get("histories/%s" % current_history_id)["state"]
if state not in ["running", "queued", "new", "ready"]:
return state
else:
return None
final_state = self.wait(timeout).until(history_becomes_terminal)
if assert_ok:
assert final_state == "ok", final_state
return final_state
def history_panel_wait_for_hid_ok(self, hid, timeout=60):
self.history_panel_wait_for_hid_state(hid, 'ok', timeout=timeout)
def history_panel_wait_for_hid_visible(self, hid, timeout=60):
current_history_id = self.current_history_id()
def history_has_hid(driver):
contents = self.api_get("histories/%s/contents" % current_history_id)
return any([d for d in contents if d["hid"] == hid])
self.wait(timeout).until(history_has_hid)
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_hidden(self, hid, timeout=60):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_absent(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_state(self, hid, state, timeout=60):
history_item_selector = self.history_panel_wait_for_hid_visible(hid, timeout=timeout)
history_item_selector_state = "%s.state-%s" % (history_item_selector, state)
try:
self.wait_for_selector_visible(history_item_selector_state)
except self.TimeoutException as e:
history_item = self.driver.find_element_by_css_selector(history_item_selector)
current_state = "UNKNOWN"
classes = history_item.get_attribute("class").split(" ")
for clazz in classes:
if clazz.startswith("state-"):
current_state = clazz[len("state-"):]
template = "Failed waiting on history item %d state to change to [%s] current state [%s]. "
message = template % (hid, state, current_state)
raise self.prepend_timeout_message(e, message)
def get_logged_in_user(self):
return self.api_get("users/current")
def is_logged_in(self):
return "email" in self.get_logged_in_user()
def _get_random_name(self, prefix=None, suffix=None, len=10):
return '%s%s%s' % (
prefix or '',
''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)),
suffix or '',
)
def _get_random_email(self, username=None, domain=None):
username = username or 'test'
domain = domain or 'test.test'
return self._get_random_name(prefix=username, suffix="@" + domain)
def submit_login(self, email, password=None):
if password is None:
password = self.default_password
login_info = {
'login': email,
'password': password,
}
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["login"])
with self.main_panel():
form = self.wait_for_selector(self.navigation_data["selectors"]["loginPage"]["form"])
self.fill(form, login_info)
self.click_submit(form)
def register(self, email=None, password=None, username=None, confirm=None, assert_valid=True):
if email is None:
email = self._get_random_email()
if password is None:
password = self.default_password
if confirm is None:
confirm = password
if username is None:
username = email.split("@")[0]
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["register"])
with self.main_panel():
register_form_id = self.navigation_data["selectors"]["registrationPage"]["form"]
form = self.wait_for_id(register_form_id)
self.fill(form, dict(
email=email,
password=password,
username=username,
confirm=confirm
))
self.click_xpath(self.navigation_data["selectors"]["registrationPage"]["submit_xpath"])
if assert_valid:
self.home()
self.click_masthead_user()
user_email_element = self.wait_for_xpath_visible(self.navigation_data["selectors"]["masthead"]["userMenu"]["userEmail_xpath"])
text = user_email_element.text
assert email in text
assert self.get_logged_in_user()["email"] == email
# Hide masthead menu click
self.click_center()
def click_center(self):
action_chains = self.action_chains()
center_element = self.driver.find_element_by_css_selector("#center")
action_chains.move_to_element(center_element).click().perform()
def perform_upload(self, test_path, ext=None, genome=None, ext_all=None, genome_all=None):
self.home()
upload_button = self.wait_for_selector_clickable(".upload-button")
upload_button.click()
if ext_all is not None:
self.wait_for_selector_visible('.upload-footer-extension')
self.select2_set_value(".upload-footer-extension", ext_all)
if genome_all is not None:
self.wait_for_selector_visible('.upload-footer-genome')
self.select2_set_value(".upload-footer-genome", genome_all)
local_upload_button = self.wait_for_selector_clickable("button#btn-local")
local_upload_button.click()
file_upload = self.wait_for_selector('input[type="file"]')
file_upload.send_keys(test_path)
if ext is not None:
self.wait_for_selector_visible('.upload-extension')
self.select2_set_value(".upload-extension", ext)
if genome is not None:
self.wait_for_selector_visible('.upload-genome')
self.select2_set_value(".upload-genome", genome)
start_button = self.wait_for_selector_clickable("button#btn-start")
start_button.click()
close_button = self.wait_for_selector_clickable("button#btn-close")
close_button.click()
def workflow_index_open(self):
self.home()
self.click_masthead_workflow()
def workflow_index_table_elements(self):
self.wait_for_selector_visible(".manage-table tbody")
table_elements = self.driver.find_elements_by_css_selector(".manage-table tbody > tr")
# drop header
return table_elements[1:]
def workflow_index_click_option(self, option_title, workflow_index=0):
table_elements = self.workflow_index_table_elements()
workflow_row = table_elements[workflow_index]
workflow_button = workflow_row.find_element_by_css_selector(".menubutton")
workflow_button.click()
menu_element = self.wait_for_selector_visible(".popmenu-wrapper .dropdown-menu")
menu_options = menu_element.find_elements_by_css_selector("li a")
found_option = False
for menu_option in menu_options:
if option_title in menu_option.text:
menu_option.click()
found_option = True
break
if not found_option:
raise AssertionError("Failed to find workflow action option with title [%s]" % option_title)
def workflow_run_submit(self):
button = self.wait_for_selector(".ui-form-header button")
button.click()
def tool_open(self, tool_id):
link_element = self.wait_for_selector('a[href$="tool_runner?tool_id=%s"]' % tool_id)
link_element.click()
def tool_parameter_div(self, expanded_parameter_id):
return self.wait_for_selector("div.ui-form-element[tour_id$='%s']" % expanded_parameter_id)
def tool_set_value(self, expanded_parameter_id, value, expected_type=None, test_data_resolver=None):
div_element = self.tool_parameter_div(expanded_parameter_id)
assert div_element
if expected_type == "data":
div_selector = "div.ui-form-element[tour_id$='%s']" % expanded_parameter_id
self.select2_set_value(div_selector, value)
else:
input_element = div_element.find_element_by_css_selector("input")
# Clear default value
input_element.clear()
input_element.send_keys(value)
def tool_execute(self):
execute_button = self.wait_for_selector("button#execute")
execute_button.click()
def click_masthead_user(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["user"])
def click_masthead_workflow(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["workflow"])
def click_button_new_workflow(self):
self.click_selector(self.navigation_data["selectors"]["workflows"]["new_button"])
def click_history_options(self):
history_options_button_selector = self.test_data["historyOptions"]["selectors"]["button"]
history_options_element = self.wait_for_selector(history_options_button_selector)
assert history_options_element.is_displayed()
history_options_button_icon_selector = self.test_data["historyOptions"]["selectors"]["buttonIcon"]
history_options_button_icon_element = self.wait_for_selector(history_options_button_icon_selector)
assert history_options_button_icon_element.is_displayed()
history_options_element.click()
def click_history_option(self, option_label):
# Open menu
self.click_history_options()
# Click labelled option
menu_selector = self.history_options_menu_selector()
menu_element = self.wait_for_selector(menu_selector)
menu_selection_element = menu_element.find_element_by_xpath('//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"%s")]]' % option_label)
menu_selection_element.click()
def history_options_menu_selector(self):
menu_selector = self.test_data["historyOptions"]["selectors"]["menu"]
return menu_selector
@retry_during_transitions
def history_panel_refresh_click(self):
refresh_item = self.wait_for_selector_clickable("#history-refresh-button")
refresh_item.click()
def history_panel_multi_operations_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsIcon"]
def history_panel_multi_operations_show(self):
operations_selector = self.history_panel_multi_operations_selector()
operations_element = self.wait_for_selector_clickable(operations_selector)
operations_element.click()
@retry_during_transitions
def history_panel_muli_operation_select_hid(self, hid):
item_selector = self.history_panel_item_selector(hid, wait=True)
operation_radio_selector = "%s .selector" % item_selector
element = self.wait_for_selector_clickable(operation_radio_selector)
element.click()
def history_panel_multi_operation_action_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsActionBtn"]
def history_panel_multi_operation_action_click(self, action):
time.sleep(5)
button_element = self.wait_for_selector_clickable(self.history_panel_multi_operation_action_selector())
button_element.click()
menu_element = self.wait_for_selector_visible(".list-action-menu.open")
action_element = menu_element.find_element_by_link_text(action)
action_element.click()
def history_panel_item_selector(self, hid, wait=False):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
try:
history_item = [d for d in contents if d["hid"] == hid][0]
except IndexError:
raise Exception("Could not find history item with hid [%s] in contents [%s]" % (hid, contents))
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
if wait:
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def modal_body_selector(self):
return ".modal-body"
def history_panel_item_body_selector(self, hid, wait=False):
selector = "%s %s" % (self.history_panel_item_selector(hid), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
if wait:
self.wait_for_selector_visible(selector)
return selector
def hda_div_selector(self, hda_id):
return "#dataset-%s" % hda_id
def hda_body_selector(self, hda_id):
return "%s %s" % (self.hda_div_selector(hda_id), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
def hda_click_primary_action_button(self, hid, button_key):
self.history_panel_click_item_title(hid=hid, wait=True)
body_selector = self.history_panel_item_body_selector(hid=hid, wait=True)
buttons_selector = body_selector + " " + self.test_data["historyPanel"]["selectors"]["hda"]["primaryActionButtons"]
self.wait_for_selector_visible(buttons_selector)
button_def = self.test_data["historyPanel"]["hdaPrimaryActionButtons"][button_key]
button_selector = button_def["selector"]
button_item = self.wait_for_selector_visible("%s %s" % (buttons_selector, button_selector))
return button_item.click()
def history_panel_click_item_title(self, **kwds):
if "hda_id" in kwds:
item_selector = self.hda_div_selector(kwds["hda_id"])
else:
item_selector = self.history_panel_item_selector(kwds["hid"])
title_selector = "%s .title" % item_selector
title_element = self.wait_for_selector(title_selector)
title_element.click()
if kwds.get("wait", False):
# Find a better way to wait for transition
time.sleep(.5)
def click_hda_title(self, hda_id, wait=False):
# TODO: Replace with calls to history_panel_click_item_title.
return self.history_panel_click_item_title(hda_id=hda_id, wait=wait)
def collection_builder_set_name(self, name):
name_element = self.wait_for_selector_visible("input.collection-name")
name_element.send_keys(name)
def collection_builder_hide_originals(self):
hide_element = self.wait_for_selector_clickable("input.hide-originals")
hide_element.click()
def collection_builder_create(self):
create_element = self.wait_for_selector_clickable("button.create-collection")
create_element.click()
def logout_if_needed(self):
if self.is_logged_in():
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["logout"])
self.click_label('go to the home page')
assert not self.is_logged_in()
def run_tour(self, path, skip_steps=[], sleep_on_steps={}, tour_callback=None):
if tour_callback is None:
tour_callback = NullTourCallback()
self.home()
with open(path, "r") as f:
tour_dict = yaml.load(f)
steps = tour_dict["steps"]
for i, step in enumerate(steps):
title = step.get("title", None)
skip = False
if skip_steps:
for skip_step in skip_steps:
if title == skip_step:
skip = True
if title in sleep_on_steps:
time.sleep(sleep_on_steps[title])
if skip:
continue
self.run_tour_step(step, i, tour_callback)
def tour_wait_for_clickable_element(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_selector_clickable(selector))
return element
def tour_wait_for_element_present(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_presence_of_selector(selector))
return element
def get_tooltip_text(self, element, sleep=0, click_away=True):
tooltip_selector = self.test_data["selectors"]["tooltipBalloon"]
self.wait_for_selector_absent(tooltip_selector)
action_chains = self.action_chains()
action_chains.move_to_element(element)
action_chains.perform()
if sleep > 0:
time.sleep(sleep)
tooltip_element = self.wait_for_selector_visible(tooltip_selector)
text = tooltip_element.text
if click_away:
self.click_center()
return text
def assert_tooltip_text(self, element, expected, sleep=0, click_away=True):
text = self.get_tooltip_text(element, sleep=sleep, click_away=click_away)
assert text == expected, "Tooltip text [%s] was not expected text [%s]." % (text, expected)
def assert_error_message(self, contains=None):
return self._assert_message("error", contains=contains)
def assert_warning_message(self, contains=None):
return self._assert_message("warning", contains=contains)
def _assert_message(self, type, contains=None):
element = self.wait_for_selector(self.test_data["selectors"]["messages"][type])
assert element, "No error message found, one expected."
if contains is not None:
assert contains in element.text
def assert_no_error_message(self):
self.assert_selector_absent(self.test_data["selectors"]["messages"]["error"])
def run_tour_step(self, step, step_index, tour_callback):
preclick = step.get("preclick", [])
for preclick_selector in preclick:
print("(Pre)Clicking %s" % preclick_selector)
element = self.tour_wait_for_clickable_element(preclick_selector)
element.click()
element_str = step.get("element", None)
if element_str is not None:
print("Waiting for element %s" % element_str)
element = self.tour_wait_for_element_present(element_str)
assert element is not None
textinsert = step.get("textinsert", None)
if textinsert is not None:
element.send_keys(textinsert)
tour_callback.handle_step(step, step_index)
postclick = step.get("postclick", [])
for postclick_selector in postclick:
print("(Post)Clicking %s" % postclick_selector)
element = self.tour_wait_for_clickable_element(postclick_selector)
element.click()
def select2_set_value(self, container_selector, value, with_click=True):
# There are two hacky was to select things from the select2 widget -
# with_click=True: This simulates the mouse click after the suggestion contains
# only the selected value.
# with_click=False: This presses enter on the selection. Not sure
# why.
# with_click seems to work in all situtations - the enter methods
# doesn't seem to work with the tool form for some reason.
container_elem = self.wait_for_selector(container_selector)
text_element = container_elem.find_element_by_css_selector("input[type='text']")
text_element.send_keys(value)
# Wait for select2 options to load and then click to add this one.
drop_elem = self.wait_for_selector_visible("#select2-drop")
# Sleep seems to be needed - at least for send_enter.
time.sleep(.5)
if not with_click:
# Wait for select2 options to load and then click to add this one.
self.send_enter(text_element)
else:
select_elem = drop_elem.find_elements_by_css_selector(".select2-result-label")[0]
action_chains = self.action_chains()
action_chains.move_to_element(select_elem).click().perform()
self.wait_for_selector_absent_or_hidden("#select2-drop")
| 40.083333 | 156 | 0.669023 | from __future__ import print_function
import contextlib
import random
import string
import time
from functools import partial, wraps
import requests
import yaml
from .data import NAVIGATION_DATA
from .has_driver import exception_indicates_stale_element, HasDriver
from . import sizzle
DEFAULT_PASSWORD = '123456'
class NullTourCallback(object):
def handle_step(self, step, step_index):
pass
def retry_call_during_transitions(f, attempts=5, sleep=.1):
previous_attempts = 0
while True:
try:
return f()
except Exception as e:
if previous_attempts > attempts:
raise
if not exception_indicates_stale_element(e):
raise
time.sleep(sleep)
previous_attempts += 1
def retry_during_transitions(f, attempts=5, sleep=.1):
@wraps(f)
def _retry(*args, **kwds):
retry_call_during_transitions(partial(f, *args, **kwds), attempts=attempts, sleep=sleep)
return _retry
class NavigatesGalaxy(HasDriver):
default_password = DEFAULT_PASSWORD
def get(self, url=""):
full_url = self.build_url(url)
return self.driver.get(full_url)
@property
def navigation_data(self):
return NAVIGATION_DATA
def home(self):
self.get()
self.wait_for_selector_visible("#masthead")
self.wait_for_selector_visible("#current-history-panel")
def switch_to_main_panel(self):
self.driver.switch_to.frame(self.navigation_data["selectors"]["frames"]["main"])
@contextlib.contextmanager
def main_panel(self):
try:
self.switch_to_main_panel()
yield
finally:
self.driver.switch_to.default_content
def api_get(self, endpoint, data={}, raw=False):
full_url = self.build_url("api/" + endpoint, for_selenium=False)
response = requests.get(full_url, data=data, cookies=self.selenium_to_requests_cookies())
if raw:
return response
else:
return response.json()
def get_galaxy_session(self):
for cookie in self.driver.get_cookies():
if cookie["name"] == "galaxysession":
return cookie["value"]
def selenium_to_requests_cookies(self):
return {
'galaxysession': self.get_galaxy_session()
}
def history_panel_name_element(self):
name_selector = self.test_data["historyPanel"]["selectors"]["history"]["name"]
return self.wait_for_selector(name_selector)
def current_history(self):
history = self.api_get("histories")[0]
return history
def current_history_id(self):
return self.current_history()["id"]
def current_history_contents(self):
current_history_id = self.current_history_id()
history_contents = self.api_get("histories/%s/contents" % current_history_id)
return history_contents
def latest_history_item(self):
history_contents = self.current_history_contents()
assert len(history_contents) > 0
return history_contents[-1]
def wait_for_history(self, timeout=30, assert_ok=True):
def history_becomes_terminal(driver):
current_history_id = self.current_history_id()
state = self.api_get("histories/%s" % current_history_id)["state"]
if state not in ["running", "queued", "new", "ready"]:
return state
else:
return None
final_state = self.wait(timeout).until(history_becomes_terminal)
if assert_ok:
assert final_state == "ok", final_state
return final_state
def history_panel_wait_for_hid_ok(self, hid, timeout=60):
self.history_panel_wait_for_hid_state(hid, 'ok', timeout=timeout)
def history_panel_wait_for_hid_visible(self, hid, timeout=60):
current_history_id = self.current_history_id()
def history_has_hid(driver):
contents = self.api_get("histories/%s/contents" % current_history_id)
return any([d for d in contents if d["hid"] == hid])
self.wait(timeout).until(history_has_hid)
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_hidden(self, hid, timeout=60):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
history_item = [d for d in contents if d["hid"] == hid][0]
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
self.wait_for_selector_absent(history_item_selector)
return history_item_selector
def history_panel_wait_for_hid_state(self, hid, state, timeout=60):
history_item_selector = self.history_panel_wait_for_hid_visible(hid, timeout=timeout)
history_item_selector_state = "%s.state-%s" % (history_item_selector, state)
try:
self.wait_for_selector_visible(history_item_selector_state)
except self.TimeoutException as e:
history_item = self.driver.find_element_by_css_selector(history_item_selector)
current_state = "UNKNOWN"
classes = history_item.get_attribute("class").split(" ")
for clazz in classes:
if clazz.startswith("state-"):
current_state = clazz[len("state-"):]
template = "Failed waiting on history item %d state to change to [%s] current state [%s]. "
message = template % (hid, state, current_state)
raise self.prepend_timeout_message(e, message)
def get_logged_in_user(self):
return self.api_get("users/current")
def is_logged_in(self):
return "email" in self.get_logged_in_user()
def _get_random_name(self, prefix=None, suffix=None, len=10):
return '%s%s%s' % (
prefix or '',
''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(len)),
suffix or '',
)
def _get_random_email(self, username=None, domain=None):
username = username or 'test'
domain = domain or 'test.test'
return self._get_random_name(prefix=username, suffix="@" + domain)
def submit_login(self, email, password=None):
if password is None:
password = self.default_password
login_info = {
'login': email,
'password': password,
}
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["login"])
with self.main_panel():
form = self.wait_for_selector(self.navigation_data["selectors"]["loginPage"]["form"])
self.fill(form, login_info)
self.click_submit(form)
def register(self, email=None, password=None, username=None, confirm=None, assert_valid=True):
if email is None:
email = self._get_random_email()
if password is None:
password = self.default_password
if confirm is None:
confirm = password
if username is None:
username = email.split("@")[0]
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["register"])
with self.main_panel():
register_form_id = self.navigation_data["selectors"]["registrationPage"]["form"]
form = self.wait_for_id(register_form_id)
self.fill(form, dict(
email=email,
password=password,
username=username,
confirm=confirm
))
self.click_xpath(self.navigation_data["selectors"]["registrationPage"]["submit_xpath"])
if assert_valid:
self.home()
self.click_masthead_user()
user_email_element = self.wait_for_xpath_visible(self.navigation_data["selectors"]["masthead"]["userMenu"]["userEmail_xpath"])
text = user_email_element.text
assert email in text
assert self.get_logged_in_user()["email"] == email
self.click_center()
def click_center(self):
action_chains = self.action_chains()
center_element = self.driver.find_element_by_css_selector("#center")
action_chains.move_to_element(center_element).click().perform()
def perform_upload(self, test_path, ext=None, genome=None, ext_all=None, genome_all=None):
self.home()
upload_button = self.wait_for_selector_clickable(".upload-button")
upload_button.click()
if ext_all is not None:
self.wait_for_selector_visible('.upload-footer-extension')
self.select2_set_value(".upload-footer-extension", ext_all)
if genome_all is not None:
self.wait_for_selector_visible('.upload-footer-genome')
self.select2_set_value(".upload-footer-genome", genome_all)
local_upload_button = self.wait_for_selector_clickable("button#btn-local")
local_upload_button.click()
file_upload = self.wait_for_selector('input[type="file"]')
file_upload.send_keys(test_path)
if ext is not None:
self.wait_for_selector_visible('.upload-extension')
self.select2_set_value(".upload-extension", ext)
if genome is not None:
self.wait_for_selector_visible('.upload-genome')
self.select2_set_value(".upload-genome", genome)
start_button = self.wait_for_selector_clickable("button#btn-start")
start_button.click()
close_button = self.wait_for_selector_clickable("button#btn-close")
close_button.click()
def workflow_index_open(self):
self.home()
self.click_masthead_workflow()
def workflow_index_table_elements(self):
self.wait_for_selector_visible(".manage-table tbody")
table_elements = self.driver.find_elements_by_css_selector(".manage-table tbody > tr")
return table_elements[1:]
def workflow_index_click_option(self, option_title, workflow_index=0):
table_elements = self.workflow_index_table_elements()
workflow_row = table_elements[workflow_index]
workflow_button = workflow_row.find_element_by_css_selector(".menubutton")
workflow_button.click()
menu_element = self.wait_for_selector_visible(".popmenu-wrapper .dropdown-menu")
menu_options = menu_element.find_elements_by_css_selector("li a")
found_option = False
for menu_option in menu_options:
if option_title in menu_option.text:
menu_option.click()
found_option = True
break
if not found_option:
raise AssertionError("Failed to find workflow action option with title [%s]" % option_title)
def workflow_run_submit(self):
button = self.wait_for_selector(".ui-form-header button")
button.click()
def tool_open(self, tool_id):
link_element = self.wait_for_selector('a[href$="tool_runner?tool_id=%s"]' % tool_id)
link_element.click()
def tool_parameter_div(self, expanded_parameter_id):
return self.wait_for_selector("div.ui-form-element[tour_id$='%s']" % expanded_parameter_id)
def tool_set_value(self, expanded_parameter_id, value, expected_type=None, test_data_resolver=None):
div_element = self.tool_parameter_div(expanded_parameter_id)
assert div_element
if expected_type == "data":
div_selector = "div.ui-form-element[tour_id$='%s']" % expanded_parameter_id
self.select2_set_value(div_selector, value)
else:
input_element = div_element.find_element_by_css_selector("input")
input_element.clear()
input_element.send_keys(value)
def tool_execute(self):
execute_button = self.wait_for_selector("button#execute")
execute_button.click()
def click_masthead_user(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["user"])
def click_masthead_workflow(self):
self.click_xpath(self.navigation_data["selectors"]["masthead"]["workflow"])
def click_button_new_workflow(self):
self.click_selector(self.navigation_data["selectors"]["workflows"]["new_button"])
def click_history_options(self):
history_options_button_selector = self.test_data["historyOptions"]["selectors"]["button"]
history_options_element = self.wait_for_selector(history_options_button_selector)
assert history_options_element.is_displayed()
history_options_button_icon_selector = self.test_data["historyOptions"]["selectors"]["buttonIcon"]
history_options_button_icon_element = self.wait_for_selector(history_options_button_icon_selector)
assert history_options_button_icon_element.is_displayed()
history_options_element.click()
def click_history_option(self, option_label):
self.click_history_options()
menu_selector = self.history_options_menu_selector()
menu_element = self.wait_for_selector(menu_selector)
menu_selection_element = menu_element.find_element_by_xpath('//ul[@id="history-options-button-menu"]/li/a[text()[contains(.,"%s")]]' % option_label)
menu_selection_element.click()
def history_options_menu_selector(self):
menu_selector = self.test_data["historyOptions"]["selectors"]["menu"]
return menu_selector
@retry_during_transitions
def history_panel_refresh_click(self):
refresh_item = self.wait_for_selector_clickable("#history-refresh-button")
refresh_item.click()
def history_panel_multi_operations_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsIcon"]
def history_panel_multi_operations_show(self):
operations_selector = self.history_panel_multi_operations_selector()
operations_element = self.wait_for_selector_clickable(operations_selector)
operations_element.click()
@retry_during_transitions
def history_panel_muli_operation_select_hid(self, hid):
item_selector = self.history_panel_item_selector(hid, wait=True)
operation_radio_selector = "%s .selector" % item_selector
element = self.wait_for_selector_clickable(operation_radio_selector)
element.click()
def history_panel_multi_operation_action_selector(self):
return self.test_data["historyPanel"]["selectors"]["history"]["multiOperationsActionBtn"]
def history_panel_multi_operation_action_click(self, action):
time.sleep(5)
button_element = self.wait_for_selector_clickable(self.history_panel_multi_operation_action_selector())
button_element.click()
menu_element = self.wait_for_selector_visible(".list-action-menu.open")
action_element = menu_element.find_element_by_link_text(action)
action_element.click()
def history_panel_item_selector(self, hid, wait=False):
current_history_id = self.current_history_id()
contents = self.api_get("histories/%s/contents" % current_history_id)
try:
history_item = [d for d in contents if d["hid"] == hid][0]
except IndexError:
raise Exception("Could not find history item with hid [%s] in contents [%s]" % (hid, contents))
history_item_selector = "#%s-%s" % (history_item["history_content_type"], history_item["id"])
if wait:
self.wait_for_selector_visible(history_item_selector)
return history_item_selector
def modal_body_selector(self):
return ".modal-body"
def history_panel_item_body_selector(self, hid, wait=False):
selector = "%s %s" % (self.history_panel_item_selector(hid), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
if wait:
self.wait_for_selector_visible(selector)
return selector
def hda_div_selector(self, hda_id):
return "#dataset-%s" % hda_id
def hda_body_selector(self, hda_id):
return "%s %s" % (self.hda_div_selector(hda_id), self.test_data["historyPanel"]["selectors"]["hda"]["body"])
def hda_click_primary_action_button(self, hid, button_key):
self.history_panel_click_item_title(hid=hid, wait=True)
body_selector = self.history_panel_item_body_selector(hid=hid, wait=True)
buttons_selector = body_selector + " " + self.test_data["historyPanel"]["selectors"]["hda"]["primaryActionButtons"]
self.wait_for_selector_visible(buttons_selector)
button_def = self.test_data["historyPanel"]["hdaPrimaryActionButtons"][button_key]
button_selector = button_def["selector"]
button_item = self.wait_for_selector_visible("%s %s" % (buttons_selector, button_selector))
return button_item.click()
def history_panel_click_item_title(self, **kwds):
if "hda_id" in kwds:
item_selector = self.hda_div_selector(kwds["hda_id"])
else:
item_selector = self.history_panel_item_selector(kwds["hid"])
title_selector = "%s .title" % item_selector
title_element = self.wait_for_selector(title_selector)
title_element.click()
if kwds.get("wait", False):
time.sleep(.5)
def click_hda_title(self, hda_id, wait=False):
return self.history_panel_click_item_title(hda_id=hda_id, wait=wait)
def collection_builder_set_name(self, name):
name_element = self.wait_for_selector_visible("input.collection-name")
name_element.send_keys(name)
def collection_builder_hide_originals(self):
hide_element = self.wait_for_selector_clickable("input.hide-originals")
hide_element.click()
def collection_builder_create(self):
create_element = self.wait_for_selector_clickable("button.create-collection")
create_element.click()
def logout_if_needed(self):
if self.is_logged_in():
self.home()
self.click_masthead_user()
self.click_label(self.navigation_data["labels"]["masthead"]["userMenu"]["logout"])
self.click_label('go to the home page')
assert not self.is_logged_in()
def run_tour(self, path, skip_steps=[], sleep_on_steps={}, tour_callback=None):
if tour_callback is None:
tour_callback = NullTourCallback()
self.home()
with open(path, "r") as f:
tour_dict = yaml.load(f)
steps = tour_dict["steps"]
for i, step in enumerate(steps):
title = step.get("title", None)
skip = False
if skip_steps:
for skip_step in skip_steps:
if title == skip_step:
skip = True
if title in sleep_on_steps:
time.sleep(sleep_on_steps[title])
if skip:
continue
self.run_tour_step(step, i, tour_callback)
def tour_wait_for_clickable_element(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_selector_clickable(selector))
return element
def tour_wait_for_element_present(self, selector):
wait = self.wait()
element = wait.until(sizzle.sizzle_presence_of_selector(selector))
return element
def get_tooltip_text(self, element, sleep=0, click_away=True):
tooltip_selector = self.test_data["selectors"]["tooltipBalloon"]
self.wait_for_selector_absent(tooltip_selector)
action_chains = self.action_chains()
action_chains.move_to_element(element)
action_chains.perform()
if sleep > 0:
time.sleep(sleep)
tooltip_element = self.wait_for_selector_visible(tooltip_selector)
text = tooltip_element.text
if click_away:
self.click_center()
return text
def assert_tooltip_text(self, element, expected, sleep=0, click_away=True):
text = self.get_tooltip_text(element, sleep=sleep, click_away=click_away)
assert text == expected, "Tooltip text [%s] was not expected text [%s]." % (text, expected)
def assert_error_message(self, contains=None):
return self._assert_message("error", contains=contains)
def assert_warning_message(self, contains=None):
return self._assert_message("warning", contains=contains)
def _assert_message(self, type, contains=None):
element = self.wait_for_selector(self.test_data["selectors"]["messages"][type])
assert element, "No error message found, one expected."
if contains is not None:
assert contains in element.text
def assert_no_error_message(self):
self.assert_selector_absent(self.test_data["selectors"]["messages"]["error"])
def run_tour_step(self, step, step_index, tour_callback):
preclick = step.get("preclick", [])
for preclick_selector in preclick:
print("(Pre)Clicking %s" % preclick_selector)
element = self.tour_wait_for_clickable_element(preclick_selector)
element.click()
element_str = step.get("element", None)
if element_str is not None:
print("Waiting for element %s" % element_str)
element = self.tour_wait_for_element_present(element_str)
assert element is not None
textinsert = step.get("textinsert", None)
if textinsert is not None:
element.send_keys(textinsert)
tour_callback.handle_step(step, step_index)
postclick = step.get("postclick", [])
for postclick_selector in postclick:
print("(Post)Clicking %s" % postclick_selector)
element = self.tour_wait_for_clickable_element(postclick_selector)
element.click()
def select2_set_value(self, container_selector, value, with_click=True):
container_elem = self.wait_for_selector(container_selector)
text_element = container_elem.find_element_by_css_selector("input[type='text']")
text_element.send_keys(value)
# Wait for select2 options to load and then click to add this one.
drop_elem = self.wait_for_selector_visible("#select2-drop")
# Sleep seems to be needed - at least for send_enter.
time.sleep(.5)
if not with_click:
# Wait for select2 options to load and then click to add this one.
self.send_enter(text_element)
else:
select_elem = drop_elem.find_elements_by_css_selector(".select2-result-label")[0]
action_chains = self.action_chains()
action_chains.move_to_element(select_elem).click().perform()
self.wait_for_selector_absent_or_hidden("#select2-drop")
| true | true |
f7319258f8ed765fde12cec23303820fd0ad703d | 14,348 | py | Python | Lib/warnings.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | 2 | 2017-05-05T02:07:59.000Z | 2017-08-18T09:24:48.000Z | Lib/warnings.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | null | null | null | Lib/warnings.py | adamwen829/cpython | 0f1c7c760c6b2804f5d05cae9ca045d1fdf3d667 | [
"PSF-2.0"
] | 3 | 2016-04-21T07:58:27.000Z | 2016-05-06T21:34:44.000Z | """Python part of the warnings subsystem."""
import sys
__all__ = ["warn", "warn_explicit", "showwarning",
"formatwarning", "filterwarnings", "simplefilter",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except OSError:
pass # the file (probably stderr) is invalid - this warning gets lost.
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
import linecache
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def simplefilter(action, category=Warning, lineno=0, append=False):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
_filters_mutated()
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
if not (isinstance(category, type) and issubclass(category, Warning)):
raise TypeError("category must be a Warning subclass, "
"not '{:s}'".format(type(category).__name__))
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if registry.get('version', 0) != _filters_version:
registry.clear()
registry['version'] = _filters_version
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, *, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._module._filters_mutated()
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module._filters_mutated()
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit, _filters_mutated)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
_filters_version = 1
def _filters_mutated():
global _filters_version
_filters_version += 1
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
| 34.995122 | 79 | 0.617299 |
import sys
__all__ = ["warn", "warn_explicit", "showwarning",
"formatwarning", "filterwarnings", "simplefilter",
"resetwarnings", "catch_warnings"]
def showwarning(message, category, filename, lineno, file=None, line=None):
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except OSError:
pass
def formatwarning(message, category, filename, lineno, line=None):
import linecache
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=False):
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, str), "message must be a string"
assert isinstance(category, type), "category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, str), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def simplefilter(action, category=Warning, lineno=0, append=False):
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
_filters_mutated()
def resetwarnings():
filters[:] = []
_filters_mutated()
class _OptionError(Exception):
pass
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError as msg:
print("Invalid -W option ignored:", msg, file=sys.stderr)
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
def _getaction(action):
if not action:
return "default"
if action == "all": return "always"
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
def warn(message, category=None, stacklevel=1):
if isinstance(message, Warning):
category = message.__class__
if category is None:
category = UserWarning
if not (isinstance(category, type) and issubclass(category, Warning)):
raise TypeError("category must be a Warning subclass, "
"not '{:s}'".format(type(category).__name__))
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if registry.get('version', 0) != _filters_version:
registry.clear()
registry['version'] = _filters_version
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
import linecache
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
if not callable(showwarning):
raise TypeError("warnings.showwarning() must be set to a "
"function or method")
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
def __init__(self, *, record=False, module=None):
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._module._filters_mutated()
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module._filters_mutated()
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, _defaultaction, _onceregistry,
warn, warn_explicit, _filters_mutated)
defaultaction = _defaultaction
onceregistry = _onceregistry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
_filters_version = 1
def _filters_mutated():
global _filters_version
_filters_version += 1
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
# resource usage warnings are enabled by default in pydebug mode
if hasattr(sys, 'gettotalrefcount'):
resource_action = "always"
else:
resource_action = "ignore"
simplefilter(resource_action, category=ResourceWarning, append=1)
del _warnings_defaults
| true | true |
f731926d323d02ff6850d4318c6001330ae3775c | 737 | py | Python | benchmarks/__init__.py | cedric05/dothttp | ae68f2c3dbb8f1053a0feeab0ac829aa5d3a2a6e | [
"Apache-2.0"
] | 10 | 2021-02-09T17:21:49.000Z | 2021-12-05T19:10:53.000Z | benchmarks/__init__.py | cedric05/dothttp | ae68f2c3dbb8f1053a0feeab0ac829aa5d3a2a6e | [
"Apache-2.0"
] | 60 | 2021-02-07T17:20:47.000Z | 2022-03-12T06:26:23.000Z | benchmarks/__init__.py | cedric05/dothttp | ae68f2c3dbb8f1053a0feeab0ac829aa5d3a2a6e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
import os
from dothttp import Config
from dothttp.request_base import RequestCompiler
def run_model():
"""
main use case is to check benchmarks for loading from file to textx model
modifying http.tx would increase or lower performance.
"""
filename = os.path.join(os.path.dirname(__file__), "../examples/example.http")
envs = []
target = "1"
nocookie = False
curl = False
properties = []
config = Config(file=filename, env=envs, properties=properties, curl=curl, property_file=None, debug=True,
no_cookie=nocookie, format=False, info=False, target=target)
comp = RequestCompiler(config)
comp.load()
comp.load_def()
return
| 28.346154 | 110 | 0.671642 |
import os
from dothttp import Config
from dothttp.request_base import RequestCompiler
def run_model():
filename = os.path.join(os.path.dirname(__file__), "../examples/example.http")
envs = []
target = "1"
nocookie = False
curl = False
properties = []
config = Config(file=filename, env=envs, properties=properties, curl=curl, property_file=None, debug=True,
no_cookie=nocookie, format=False, info=False, target=target)
comp = RequestCompiler(config)
comp.load()
comp.load_def()
return
| true | true |
f73192bc3cb337f8236fb50957755b303a00785e | 6,757 | py | Python | cheating/client.py | totu/challenge-fi-2021 | 7e41581e3dd5a61b838ecb7aadfde501b94802c5 | [
"Unlicense"
] | null | null | null | cheating/client.py | totu/challenge-fi-2021 | 7e41581e3dd5a61b838ecb7aadfde501b94802c5 | [
"Unlicense"
] | null | null | null | cheating/client.py | totu/challenge-fi-2021 | 7e41581e3dd5a61b838ecb7aadfde501b94802c5 | [
"Unlicense"
] | null | null | null | import pygame
from pygame.locals import *
from _thread import *
import os
import random
import socket
import json
import math
from gamelogic import objects, world, common
SOCK_COUNT = 2
pygame.init()
W, H = 800, 437
win = pygame.display.set_mode((W, H))
pygame.display.set_caption("Projekt Kyyber 2021 Client")
bg_orig = [
pygame.image.load(os.path.join("images/bg1/", str(x) + ".png")).convert_alpha()
for x in range(1, 8)
]
# bg_orig = [pygame.image.load(os.path.join('images/bg2/', str(x) + '.png')).convert_alpha() for x in range(1,7)]
bg = []
BGW = int(bg_orig[0].get_width() * (H / float(bg_orig[0].get_height())))
for i in bg_orig:
bg.append(pygame.transform.scale(i, (BGW, H)))
bg.reverse()
clock = pygame.time.Clock()
camera_pos = 0
gameoverfade = 0
def redrawWindow():
global camera_pos, gameoverfade
largeFont = pygame.font.SysFont("comicsans", 30)
hugeFont = pygame.font.SysFont("comicsans", 90)
gameover = False
for player in gamestate.players:
if player.id == my_id:
camera_pos = player.x - W / 2
camera_pos = min(camera_pos, world.WORLD_SIZE - W)
camera_pos = max(camera_pos, 0)
text = largeFont.render(
"AMMO: {}, RELOAD: {}, HP: {}".format(
player.ammo, player.reloadleft, player.hp
),
1,
(255, 255, 255),
)
break
else:
text = hugeFont.render("TRY HARDER!", 1, (255, 255, 255))
gameover = True
for j, layer in enumerate(bg):
for i in range(0, W * 2, BGW):
camera_pos_bg = (camera_pos * (float(j) / float(len(bg)))) % BGW
win.blit(bg[j], (i - camera_pos_bg, 0))
for player in gamestate.players:
player.draw(win, camera_pos, my_id)
sorted_enemies = sorted(gamestate.enemies, key=lambda i: i.y_pos)
sorted_enemies.reverse() # Closest ones to front
for enemy in sorted_enemies:
enemy.draw(win, camera_pos)
for boss in gamestate.bosses:
boss.draw(win, camera_pos)
for projectile in gamestate.projectiles:
projectile.draw(win, camera_pos)
if gameover:
veil = pygame.Surface((W, H))
veil.fill((0, 0, 0))
veil.set_alpha(gameoverfade)
gameoverfade += 0.1
if gameoverfade > 255:
gameoverfade = 255
win.blit(veil, (0, 0))
win.blit(text, (W / 2 - text.get_width() / 2, H / 2 - text.get_height() / 2))
else:
win.blit(text, (20, 400))
for i, achievement in enumerate(achievements):
win.blit(
largeFont.render(
str(achievement) + ": " + str(achievements[achievement]),
1,
(255, 255, 255),
),
(10, 10 + 30 * i),
)
pygame.display.update()
def update_gamestate_thread():
global gamestate
while True:
clock.tick(world.SPEED)
gamestate = common.physics(gamestate)
redrawWindow()
gamestate = objects.gamestate()
achievements = []
my_id = 0
sendevent = []
start_new_thread(update_gamestate_thread, ())
sockets = set()
main = None
for i in range(SOCK_COUNT):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockets.add(sock)
serveraddr = "127.0.0.1"
serveraddr = "challenge.fi" # Join remote server
for sock in sockets:
if main is None:
main = sock
sock.connect((serveraddr, 9999))
data = main.recv(1024).strip()
my_id = json.loads(data)["player_id"]
while True:
me = None
for player in gamestate.players:
if player.id == my_id:
me = player
if me:
sendevent.append(
[
['shoot', 4900, 100, 0.38]
])
keys = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if keys[pygame.K_x]:
eka = me.x #+ math.cos(me.mouseDir(camera_pos)) * 60
toka = me.y + 60 #- math.sin(me.mouseDir(camera_pos)) * 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_r]:
eka = 4900
sendevent.append(
# ['shoot', 4931.029795212544, 260.67432742841345, 1.7208623491309805]
#['shoot', 4754.656506303356, 100, 0]
['shoot', 4900, 100, 0.38]
#205.57772405728508, 297.39211224788403, 0.38633721482131445]
)
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
eka = me.x - 20 + math.cos(me.mouseDir(camera_pos)) * 60
print(eka)
toka = me.y - math.sin(me.mouseDir(camera_pos)) * 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_SPACE]:
sendevent.append(["jump"])
if keys[pygame.K_m]:
sendevent.append(["jump"])
sendevent.append(["reload"])
sendevent.append(["jump"])
if keys[pygame.K_a] and not me.dir == 1:
sendevent.append(["left"])
elif keys[pygame.K_d] and not me.dir == -1:
sendevent.append(["right"])
elif not me.dir == 0:
sendevent.append(["stop"])
common.parse_clientevents(my_id, json.dumps(sendevent), gamestate)
# for boss in gamestate.bosses:
# print(f"{boss.id} {boss.x},{boss.y}, hp:{boss.hp}")
for sock in sockets:
sock.sendall(bytes(json.dumps(sendevent), "utf-8"))
# if gamestate.projectiles:
# for projectile in gamestate.projectiles:
# print(f"{projectile.x},{projectile.y} d:{projectile.dir} r:{projectile.moveremaining}")
if sendevent:
print(sendevent)
print(f"{me.x},{me.y}")
sendevent = []
for boss in gamestate.bosses:
print(boss.x-me.x, boss.y-me.y, boss.hp)
# receive packets until a valid json can be formed
response = b""
while True:
chunk = main.recv(1000)
response += chunk
try:
gamestate_dict, achievements = json.loads(response)
gamestate = common.update_gamestate_from_dict(
gamestate, gamestate_dict
)
break
except Exception as e:
pass
| 31.282407 | 113 | 0.547432 | import pygame
from pygame.locals import *
from _thread import *
import os
import random
import socket
import json
import math
from gamelogic import objects, world, common
SOCK_COUNT = 2
pygame.init()
W, H = 800, 437
win = pygame.display.set_mode((W, H))
pygame.display.set_caption("Projekt Kyyber 2021 Client")
bg_orig = [
pygame.image.load(os.path.join("images/bg1/", str(x) + ".png")).convert_alpha()
for x in range(1, 8)
]
bg = []
BGW = int(bg_orig[0].get_width() * (H / float(bg_orig[0].get_height())))
for i in bg_orig:
bg.append(pygame.transform.scale(i, (BGW, H)))
bg.reverse()
clock = pygame.time.Clock()
camera_pos = 0
gameoverfade = 0
def redrawWindow():
global camera_pos, gameoverfade
largeFont = pygame.font.SysFont("comicsans", 30)
hugeFont = pygame.font.SysFont("comicsans", 90)
gameover = False
for player in gamestate.players:
if player.id == my_id:
camera_pos = player.x - W / 2
camera_pos = min(camera_pos, world.WORLD_SIZE - W)
camera_pos = max(camera_pos, 0)
text = largeFont.render(
"AMMO: {}, RELOAD: {}, HP: {}".format(
player.ammo, player.reloadleft, player.hp
),
1,
(255, 255, 255),
)
break
else:
text = hugeFont.render("TRY HARDER!", 1, (255, 255, 255))
gameover = True
for j, layer in enumerate(bg):
for i in range(0, W * 2, BGW):
camera_pos_bg = (camera_pos * (float(j) / float(len(bg)))) % BGW
win.blit(bg[j], (i - camera_pos_bg, 0))
for player in gamestate.players:
player.draw(win, camera_pos, my_id)
sorted_enemies = sorted(gamestate.enemies, key=lambda i: i.y_pos)
sorted_enemies.reverse()
for enemy in sorted_enemies:
enemy.draw(win, camera_pos)
for boss in gamestate.bosses:
boss.draw(win, camera_pos)
for projectile in gamestate.projectiles:
projectile.draw(win, camera_pos)
if gameover:
veil = pygame.Surface((W, H))
veil.fill((0, 0, 0))
veil.set_alpha(gameoverfade)
gameoverfade += 0.1
if gameoverfade > 255:
gameoverfade = 255
win.blit(veil, (0, 0))
win.blit(text, (W / 2 - text.get_width() / 2, H / 2 - text.get_height() / 2))
else:
win.blit(text, (20, 400))
for i, achievement in enumerate(achievements):
win.blit(
largeFont.render(
str(achievement) + ": " + str(achievements[achievement]),
1,
(255, 255, 255),
),
(10, 10 + 30 * i),
)
pygame.display.update()
def update_gamestate_thread():
global gamestate
while True:
clock.tick(world.SPEED)
gamestate = common.physics(gamestate)
redrawWindow()
gamestate = objects.gamestate()
achievements = []
my_id = 0
sendevent = []
start_new_thread(update_gamestate_thread, ())
sockets = set()
main = None
for i in range(SOCK_COUNT):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockets.add(sock)
serveraddr = "127.0.0.1"
serveraddr = "challenge.fi"
for sock in sockets:
if main is None:
main = sock
sock.connect((serveraddr, 9999))
data = main.recv(1024).strip()
my_id = json.loads(data)["player_id"]
while True:
me = None
for player in gamestate.players:
if player.id == my_id:
me = player
if me:
sendevent.append(
[
['shoot', 4900, 100, 0.38]
])
keys = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
if keys[pygame.K_x]:
eka = me.x
toka = me.y + 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_r]:
eka = 4900
sendevent.append(
['shoot', 4900, 100, 0.38]
)
if event.type == pygame.MOUSEBUTTONDOWN and event.button == 1:
eka = me.x - 20 + math.cos(me.mouseDir(camera_pos)) * 60
print(eka)
toka = me.y - math.sin(me.mouseDir(camera_pos)) * 60
mouse = me.mouseDir(camera_pos)
sendevent.append(
[
"shoot", eka, toka, mouse
]
)
print(f"{eka}, {toka}, {mouse}")
if keys[pygame.K_SPACE]:
sendevent.append(["jump"])
if keys[pygame.K_m]:
sendevent.append(["jump"])
sendevent.append(["reload"])
sendevent.append(["jump"])
if keys[pygame.K_a] and not me.dir == 1:
sendevent.append(["left"])
elif keys[pygame.K_d] and not me.dir == -1:
sendevent.append(["right"])
elif not me.dir == 0:
sendevent.append(["stop"])
common.parse_clientevents(my_id, json.dumps(sendevent), gamestate)
for sock in sockets:
sock.sendall(bytes(json.dumps(sendevent), "utf-8"))
if sendevent:
print(sendevent)
print(f"{me.x},{me.y}")
sendevent = []
for boss in gamestate.bosses:
print(boss.x-me.x, boss.y-me.y, boss.hp)
response = b""
while True:
chunk = main.recv(1000)
response += chunk
try:
gamestate_dict, achievements = json.loads(response)
gamestate = common.update_gamestate_from_dict(
gamestate, gamestate_dict
)
break
except Exception as e:
pass
| true | true |
f73192e61c63b58e1cc57017f11c01d7531dc764 | 3,051 | py | Python | pyaz/pyaz_utils.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/pyaz_utils.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | null | null | null | pyaz/pyaz_utils.py | py-az-cli/py-az-cli | 9a7dc44e360c096a5a2f15595353e9dad88a9792 | [
"MIT"
] | 1 | 2022-02-03T09:12:01.000Z | 2022-02-03T09:12:01.000Z | """Utility functions for the pyaz generated code to use."""
import json
import logging
import shlex
import shutil
import subprocess
from typing import Dict
def _call_az(command: str, parameters: Dict) -> object:
"""
Call an az command (supplied as a string, and parameters as dictionary).
Calls az cli via a subprocess
Returns the az cli json converted to python object
Example:
`
_call_az("az group create", locals())
`
"""
# format the parameters into a list
params = _get_params(parameters)
# split commands into a list
commands = command.split()
# add the params to the commands
commands.extend(params)
full_command = " ".join(commands)
print(f"Executing command: {full_command}")
logging.info("Executing command: %s", full_command)
# split full command using shlex rules
commands = shlex.split(full_command)
# strip off az and replace it with full path to az to accomodate Windows
commands.pop(0)
commands.insert(0, shutil.which("az"))
output = subprocess.run(
commands,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
try:
return json.loads(stdout)
except: # pylint: disable=bare-except
return stdout
elif stderr:
raise Exception(stderr)
def _get_cli_param_name(name: str) -> str:
"""
Convert parameter name back to cli format from pythonic version.
- Strips trailing underscore from keywords
- Converts remaining underscores to dashes
- Adds leading dashes
"""
if name[-1] == "_":
name = name[0:-1]
name = name.replace("_", "-")
name = f"--{name}"
return name
def _get_params(params: Dict) -> list:
"""
Given the built-in locals dictionary returns a formatted list of parameters.
The list returned contains the az cli formatted parameter names and values
"""
# return params
output = []
# loop through locals and append list of parameters and their values
# as long as the parameter has a value
for param in params:
if params[param]:
# if value is a boolean then don't append value, just param, used for flags
if isinstance(params[param], bool):
output.append(_get_cli_param_name(param))
else:
output.append(_get_cli_param_name(param))
# special case to handle tags, need to apply shlex.split
# to handle case where there are multiple tags
if param == "tags":
param_values = shlex.split(params[param])
param_values = [f'"{value}"' for value in param_values]
output.extend(param_values)
else:
# wrap parameter value in quotes
output.append(f'"{params[param]}"')
return output
| 28.514019 | 87 | 0.621763 |
import json
import logging
import shlex
import shutil
import subprocess
from typing import Dict
def _call_az(command: str, parameters: Dict) -> object:
params = _get_params(parameters)
commands = command.split()
commands.extend(params)
full_command = " ".join(commands)
print(f"Executing command: {full_command}")
logging.info("Executing command: %s", full_command)
commands = shlex.split(full_command)
commands.pop(0)
commands.insert(0, shutil.which("az"))
output = subprocess.run(
commands,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=True,
)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
try:
return json.loads(stdout)
except:
return stdout
elif stderr:
raise Exception(stderr)
def _get_cli_param_name(name: str) -> str:
if name[-1] == "_":
name = name[0:-1]
name = name.replace("_", "-")
name = f"--{name}"
return name
def _get_params(params: Dict) -> list:
output = []
for param in params:
if params[param]:
if isinstance(params[param], bool):
output.append(_get_cli_param_name(param))
else:
output.append(_get_cli_param_name(param))
# special case to handle tags, need to apply shlex.split
# to handle case where there are multiple tags
if param == "tags":
param_values = shlex.split(params[param])
param_values = [f'"{value}"' for value in param_values]
output.extend(param_values)
else:
# wrap parameter value in quotes
output.append(f'"{params[param]}"')
return output
| true | true |
f731932dfc82782e68c3b57938cdc030d69e56ef | 53,591 | py | Python | electrum/wallet_db.py | L-47/qtum-electrum | dd1b0a8b6ef6e96015a6210de36b23949eaad359 | [
"MIT"
] | 1 | 2020-07-21T18:37:59.000Z | 2020-07-21T18:37:59.000Z | electrum/wallet_db.py | L-47/qtum-electrum | dd1b0a8b6ef6e96015a6210de36b23949eaad359 | [
"MIT"
] | null | null | null | electrum/wallet_db.py | L-47/qtum-electrum | dd1b0a8b6ef6e96015a6210de36b23949eaad359 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2015 Thomas Voegtlin
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os
import ast
import json
import copy
import threading
from collections import defaultdict
from functools import reduce
from typing import Dict, Optional, List, Tuple, Set, Iterable, NamedTuple, Sequence, TYPE_CHECKING, Union
import binascii
from . import util, bitcoin
from .util import profiler, WalletFileException, multisig_type, TxMinedInfo, bfh
from .invoices import PR_TYPE_ONCHAIN, Invoice
from .keystore import bip44_derivation
from .transaction import Transaction, TxOutpoint, tx_from_any, PartialTransaction, PartialTxOutput
from .logging import Logger
from .lnutil import LOCAL, REMOTE, FeeUpdate, UpdateAddHtlc, LocalConfig, RemoteConfig, Keypair, OnlyPubkeyKeypair, RevocationStore, ChannelBackupStorage
from .lnutil import ChannelConstraints, Outpoint, ShachainElement
from .json_db import StoredDict, JsonDB, locked, modifier
from .plugin import run_hook, plugin_loaders
from .paymentrequest import PaymentRequest
from .submarine_swaps import SwapData
from .bitcoin import Token, Delegation
if TYPE_CHECKING:
from .storage import WalletStorage
# seed_version is now used for the version of the wallet file
OLD_SEED_VERSION = 4 # electrum versions < 2.0
NEW_SEED_VERSION = 11 # electrum versions >= 2.0
FINAL_SEED_VERSION = 32 # electrum >= 2.7 will set this to prevent
# old versions from overwriting new format
class TxFeesValue(NamedTuple):
fee: Optional[int] = None
is_calculated_by_us: bool = False
num_inputs: Optional[int] = None
class WalletDB(JsonDB):
def __init__(self, raw, *, manual_upgrades: bool):
JsonDB.__init__(self, {})
self._manual_upgrades = manual_upgrades
self._called_after_upgrade_tasks = False
if raw: # loading existing db
self.load_data(raw)
self.load_plugins()
else: # creating new db
self.put('seed_version', FINAL_SEED_VERSION)
self._after_upgrade_tasks()
def load_data(self, s):
try:
self.data = json.loads(s)
except:
try:
d = ast.literal_eval(s)
labels = d.get('labels', {})
except Exception as e:
raise WalletFileException("Cannot read wallet file. (parsing failed)")
self.data = {}
for key, value in d.items():
try:
json.dumps(key)
json.dumps(value)
except:
self.logger.info(f'Failed to convert label to json format: {key}')
continue
self.data[key] = value
if not isinstance(self.data, dict):
raise WalletFileException("Malformed wallet file (not dict)")
if not self._manual_upgrades and self.requires_split():
raise WalletFileException("This wallet has multiple accounts and must be split")
if not self.requires_upgrade():
self._after_upgrade_tasks()
elif not self._manual_upgrades:
self.upgrade()
def requires_split(self):
d = self.get('accounts', {})
return len(d) > 1
def get_split_accounts(self):
result = []
# backward compatibility with old wallets
d = self.get('accounts', {})
if len(d) < 2:
return
wallet_type = self.get('wallet_type')
if wallet_type == 'old':
assert len(d) == 2
data1 = copy.deepcopy(self.data)
data1['accounts'] = {'0': d['0']}
data1['suffix'] = 'deterministic'
data2 = copy.deepcopy(self.data)
data2['accounts'] = {'/x': d['/x']}
data2['seed'] = None
data2['seed_version'] = None
data2['master_public_key'] = None
data2['wallet_type'] = 'imported'
data2['suffix'] = 'imported'
result = [data1, data2]
elif wallet_type in ['bip44', 'trezor', 'keepkey', 'ledger', 'btchip', 'digitalbitbox', 'safe_t']:
mpk = self.get('master_public_keys')
for k in d.keys():
i = int(k)
x = d[k]
if x.get("pending"):
continue
xpub = mpk["x/%d'"%i]
new_data = copy.deepcopy(self.data)
# save account, derivation and xpub at index 0
new_data['accounts'] = {'0': x}
new_data['master_public_keys'] = {"x/0'": xpub}
new_data['derivation'] = bip44_derivation(k)
new_data['suffix'] = k
result.append(new_data)
else:
raise WalletFileException("This wallet has multiple accounts and must be split")
return result
def requires_upgrade(self):
return self.get_seed_version() < FINAL_SEED_VERSION
@profiler
def upgrade(self):
self.logger.info('upgrading wallet format')
if self._called_after_upgrade_tasks:
# we need strict ordering between upgrade() and after_upgrade_tasks()
raise Exception("'after_upgrade_tasks' must NOT be called before 'upgrade'")
self._convert_imported()
self._convert_wallet_type()
self._convert_account()
self._convert_version_13_b()
self._convert_version_14()
self._convert_version_15()
self._convert_version_16()
self._convert_version_17()
self._convert_version_18()
self._convert_version_19()
self._convert_version_20()
self._convert_version_21()
self._convert_version_22()
self._convert_version_23()
self._convert_version_24()
self._convert_version_25()
self._convert_version_26()
self._convert_version_27()
self._convert_version_28()
self._convert_version_29()
self._convert_version_30()
self._convert_version_31()
self._convert_version_32()
self.put('seed_version', FINAL_SEED_VERSION) # just to be sure
self._after_upgrade_tasks()
def _after_upgrade_tasks(self):
self._called_after_upgrade_tasks = True
self._load_transactions()
def _convert_wallet_type(self):
if not self._is_upgrade_method_needed(0, 13):
return
wallet_type = self.get('wallet_type')
if wallet_type == 'btchip': wallet_type = 'ledger'
if self.get('keystore') or self.get('x1/') or wallet_type=='imported':
return False
assert not self.requires_split()
seed_version = self.get_seed_version()
seed = self.get('seed')
xpubs = self.get('master_public_keys')
xprvs = self.get('master_private_keys', {})
mpk = self.get('master_public_key')
keypairs = self.get('keypairs')
key_type = self.get('key_type')
if seed_version == OLD_SEED_VERSION or wallet_type == 'old':
d = {
'type': 'old',
'seed': seed,
'mpk': mpk,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif key_type == 'imported':
d = {
'type': 'imported',
'keypairs': keypairs,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['xpub', 'standard']:
xpub = xpubs["x/"]
xprv = xprvs.get("x/")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
'seed': seed,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['bip44']:
xpub = xpubs["x/0'"]
xprv = xprvs.get("x/0'")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['trezor', 'keepkey', 'ledger', 'digitalbitbox', 'safe_t']:
xpub = xpubs["x/0'"]
derivation = self.get('derivation', bip44_derivation(0))
d = {
'type': 'hardware',
'hw_type': wallet_type,
'xpub': xpub,
'derivation': derivation,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif (wallet_type == '2fa') or multisig_type(wallet_type):
for key in xpubs.keys():
d = {
'type': 'bip32',
'xpub': xpubs[key],
'xprv': xprvs.get(key),
}
if key == 'x1/' and seed:
d['seed'] = seed
self.put(key, d)
else:
raise WalletFileException('Unable to tell wallet type. Is this even a wallet file?')
# remove junk
self.put('master_public_key', None)
self.put('master_public_keys', None)
self.put('master_private_keys', None)
self.put('derivation', None)
self.put('seed', None)
self.put('keypairs', None)
self.put('key_type', None)
def _convert_version_13_b(self):
# version 13 is ambiguous, and has an earlier and a later structure
if not self._is_upgrade_method_needed(0, 13):
return
if self.get('wallet_type') == 'standard':
if self.get('keystore').get('type') == 'imported':
pubkeys = self.get('keystore').get('keypairs').keys()
d = {'change': []}
receiving_addresses = []
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
receiving_addresses.append(addr)
d['receiving'] = receiving_addresses
self.put('addresses', d)
self.put('pubkeys', None)
self.put('seed_version', 13)
def _convert_version_14(self):
# convert imported wallets for 3.0
if not self._is_upgrade_method_needed(13, 13):
return
if self.get('wallet_type') =='imported':
addresses = self.get('addresses')
if type(addresses) is list:
addresses = dict([(x, None) for x in addresses])
self.put('addresses', addresses)
elif self.get('wallet_type') == 'standard':
if self.get('keystore').get('type')=='imported':
addresses = set(self.get('addresses').get('receiving'))
pubkeys = self.get('keystore').get('keypairs').keys()
assert len(addresses) == len(pubkeys)
d = {}
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
assert addr in addresses
d[addr] = {
'pubkey': pubkey,
'redeem_script': None,
'type': 'p2pkh'
}
self.put('addresses', d)
self.put('pubkeys', None)
self.put('wallet_type', 'imported')
self.put('seed_version', 14)
def _convert_version_15(self):
if not self._is_upgrade_method_needed(14, 14):
return
if self.get('seed_type') == 'segwit':
# should not get here; get_seed_version should have caught this
raise Exception('unsupported derivation (development segwit, v14)')
self.put('seed_version', 15)
def _convert_version_16(self):
# fixes issue #3193 for Imported_Wallets with addresses
# also, previous versions allowed importing any garbage as an address
# which we now try to remove, see pr #3191
if not self._is_upgrade_method_needed(15, 15):
return
def remove_address(addr):
def remove_from_dict(dict_name):
d = self.get(dict_name, None)
if d is not None:
d.pop(addr, None)
self.put(dict_name, d)
def remove_from_list(list_name):
lst = self.get(list_name, None)
if lst is not None:
s = set(lst)
s -= {addr}
self.put(list_name, list(s))
# note: we don't remove 'addr' from self.get('addresses')
remove_from_dict('addr_history')
remove_from_dict('labels')
remove_from_dict('payment_requests')
remove_from_list('frozen_addresses')
if self.get('wallet_type') == 'imported':
addresses = self.get('addresses')
assert isinstance(addresses, dict)
addresses_new = dict()
for address, details in addresses.items():
if not bitcoin.is_address(address):
remove_address(address)
continue
if details is None:
addresses_new[address] = {}
else:
addresses_new[address] = details
self.put('addresses', addresses_new)
self.put('seed_version', 16)
def _convert_version_17(self):
# delete pruned_txo; construct spent_outpoints
if not self._is_upgrade_method_needed(16, 16):
return
self.put('pruned_txo', None)
transactions = self.get('transactions', {}) # txid -> raw_tx
spent_outpoints = defaultdict(dict)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for txin in tx.inputs():
if txin.is_coinbase_input():
continue
prevout_hash = txin.prevout.txid.hex()
prevout_n = txin.prevout.out_idx
spent_outpoints[prevout_hash][str(prevout_n)] = txid
self.put('spent_outpoints', spent_outpoints)
tokens = self.get('tokens', {}) # contractAddr_bindAddr -> [name, symbol, decimals, balance]
new_tokens = {}
for key, value in tokens.items():
contract_addr, bind_addr = key.split('_')
new_token = Token(contract_addr, bind_addr, value[0], value[1], value[2], value[3])
new_tokens[new_token.get_key()] = new_token
self.put('tokens', new_tokens)
self.put('seed_version', 17)
def _convert_version_18(self):
# delete verified_tx3 as its structure changed
if not self._is_upgrade_method_needed(17, 17):
return
self.put('verified_tx3', None)
self.put('seed_version', 18)
def _convert_version_19(self):
# delete tx_fees as its structure changed
if not self._is_upgrade_method_needed(18, 18):
return
self.put('tx_fees', None)
self.put('seed_version', 19)
def _convert_version_20(self):
# store 'derivation' (prefix) and 'root_fingerprint' in all xpub-based keystores.
# store explicit None values if we cannot retroactively determine them
if not self._is_upgrade_method_needed(19, 19):
return
from .bip32 import BIP32Node, convert_bip32_intpath_to_strpath
# note: This upgrade method reimplements bip32.root_fp_and_der_prefix_from_xkey.
# This is done deliberately, to avoid introducing that method as a dependency to this upgrade.
for ks_name in ('keystore', *['x{}/'.format(i) for i in range(1, 16)]):
ks = self.get(ks_name, None)
if ks is None: continue
xpub = ks.get('xpub', None)
if xpub is None: continue
bip32node = BIP32Node.from_xkey(xpub)
# derivation prefix
derivation_prefix = ks.get('derivation', None)
if derivation_prefix is None:
assert bip32node.depth >= 0, bip32node.depth
if bip32node.depth == 0:
derivation_prefix = 'm'
elif bip32node.depth == 1:
child_number_int = int.from_bytes(bip32node.child_number, 'big')
derivation_prefix = convert_bip32_intpath_to_strpath([child_number_int])
ks['derivation'] = derivation_prefix
# root fingerprint
root_fingerprint = ks.get('ckcc_xfp', None)
if root_fingerprint is not None:
root_fingerprint = root_fingerprint.to_bytes(4, byteorder="little", signed=False).hex().lower()
if root_fingerprint is None:
if bip32node.depth == 0:
root_fingerprint = bip32node.calc_fingerprint_of_this_node().hex().lower()
elif bip32node.depth == 1:
root_fingerprint = bip32node.fingerprint.hex()
ks['root_fingerprint'] = root_fingerprint
ks.pop('ckcc_xfp', None)
self.put(ks_name, ks)
self.put('seed_version', 20)
def _convert_version_21(self):
if not self._is_upgrade_method_needed(20, 20):
return
channels = self.get('channels')
if channels:
for channel in channels:
channel['state'] = 'OPENING'
self.put('channels', channels)
self.put('seed_version', 21)
def _convert_version_22(self):
# construct prevouts_by_scripthash
if not self._is_upgrade_method_needed(21, 21):
return
from .bitcoin import script_to_scripthash
transactions = self.get('transactions', {}) # txid -> raw_tx
prevouts_by_scripthash = defaultdict(list)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for idx, txout in enumerate(tx.outputs()):
outpoint = f"{txid}:{idx}"
scripthash = script_to_scripthash(txout.scriptpubkey.hex())
prevouts_by_scripthash[scripthash].append((outpoint, txout.value))
self.put('prevouts_by_scripthash', prevouts_by_scripthash)
self.put('seed_version', 22)
def _convert_version_23(self):
if not self._is_upgrade_method_needed(22, 22):
return
channels = self.get('channels', [])
LOCAL = 1
REMOTE = -1
for c in channels:
# move revocation store from remote_config
r = c['remote_config'].pop('revocation_store')
c['revocation_store'] = r
# convert fee updates
log = c.get('log', {})
for sub in LOCAL, REMOTE:
l = log[str(sub)]['fee_updates']
d = {}
for i, fu in enumerate(l):
d[str(i)] = {
'rate':fu['rate'],
'ctn_local':fu['ctns'][str(LOCAL)],
'ctn_remote':fu['ctns'][str(REMOTE)]
}
log[str(int(sub))]['fee_updates'] = d
self.data['channels'] = channels
self.data['seed_version'] = 23
def _convert_version_24(self):
if not self._is_upgrade_method_needed(23, 23):
return
channels = self.get('channels', [])
for c in channels:
# convert revocation store to dict
r = c['revocation_store']
d = {}
for i in range(49):
v = r['buckets'][i]
if v is not None:
d[str(i)] = v
r['buckets'] = d
c['revocation_store'] = r
# convert channels to dict
self.data['channels'] = { x['channel_id']: x for x in channels }
# convert txi & txo
txi = self.get('txi', {})
for tx_hash, d in txi.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for ser, v in l:
d2[addr][ser] = v
txi[tx_hash] = d2
self.data['txi'] = txi
txo = self.get('txo', {})
for tx_hash, d in txo.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for n, v, cb in l:
d2[addr][str(n)] = (v, cb)
txo[tx_hash] = d2
self.data['txo'] = txo
self.data['seed_version'] = 24
def _convert_version_25(self):
if not self._is_upgrade_method_needed(24, 24):
return
# add 'type' field to onchain requests
requests = self.data.get('payment_requests', {})
for k, r in list(requests.items()):
if r.get('address') == k:
requests[k] = {
'address': r['address'],
'amount': r.get('amount'),
'exp': r.get('exp'),
'id': r.get('id'),
'memo': r.get('memo'),
'time': r.get('time'),
'type': PR_TYPE_ONCHAIN,
}
# convert bip70 invoices
invoices = self.data.get('invoices', {})
for k, r in list(invoices.items()):
data = r.get("hex")
if data:
pr = PaymentRequest(bytes.fromhex(data))
if pr.id != k:
continue
invoices[k] = {
'type': PR_TYPE_ONCHAIN,
'amount': pr.get_amount(),
'bip70': data,
'exp': pr.get_expiration_date() - pr.get_time(),
'id': pr.id,
'message': pr.get_memo(),
'outputs': [x.to_legacy_tuple() for x in pr.get_outputs()],
'time': pr.get_time(),
'requestor': pr.get_requestor(),
}
self.data['seed_version'] = 25
def _convert_version_26(self):
if not self._is_upgrade_method_needed(25, 25):
return
channels = self.data.get('channels', {})
channel_timestamps = self.data.pop('lightning_channel_timestamps', {})
for channel_id, c in channels.items():
item = channel_timestamps.get(channel_id)
if item:
funding_txid, funding_height, funding_timestamp, closing_txid, closing_height, closing_timestamp = item
if funding_txid:
c['funding_height'] = funding_txid, funding_height, funding_timestamp
if closing_txid:
c['closing_height'] = closing_txid, closing_height, closing_timestamp
self.data['seed_version'] = 26
def _convert_version_27(self):
if not self._is_upgrade_method_needed(26, 26):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['htlc_minimum_msat'] = 1
self.data['seed_version'] = 27
def _convert_version_28(self):
if not self._is_upgrade_method_needed(27, 27):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['channel_seed'] = None
self.data['seed_version'] = 28
def _convert_version_29(self):
if not self._is_upgrade_method_needed(28, 28):
return
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, r in list(d.items()):
_type = r.get('type', 0)
item = {
'type': _type,
'message': r.get('message') or r.get('memo', ''),
'amount': r.get('amount'),
'exp': r.get('exp') or 0,
'time': r.get('time', 0),
}
if _type == PR_TYPE_ONCHAIN:
address = r.pop('address', None)
if address:
outputs = [(0, address, r.get('amount'))]
else:
outputs = r.get('outputs')
item.update({
'outputs': outputs,
'id': r.get('id'),
'bip70': r.get('bip70'),
'requestor': r.get('requestor'),
})
else:
item.update({
'rhash': r['rhash'],
'invoice': r['invoice'],
})
d[key] = item
self.data['seed_version'] = 29
def _convert_version_30(self):
if not self._is_upgrade_method_needed(29, 29):
return
from .invoices import PR_TYPE_ONCHAIN, PR_TYPE_LN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
_type = item['type']
if _type == PR_TYPE_ONCHAIN:
item['amount_sat'] = item.pop('amount')
elif _type == PR_TYPE_LN:
amount_sat = item.pop('amount')
item['amount_msat'] = 1000 * amount_sat if amount_sat is not None else None
item.pop('exp')
item.pop('message')
item.pop('rhash')
item.pop('time')
else:
raise Exception(f"unknown invoice type: {_type}")
self.data['seed_version'] = 30
def _convert_version_31(self):
if not self._is_upgrade_method_needed(30, 30):
return
from .invoices import PR_TYPE_ONCHAIN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
if item['type'] == PR_TYPE_ONCHAIN:
item['amount_sat'] = item['amount_sat'] or 0
item['exp'] = item['exp'] or 0
item['time'] = item['time'] or 0
self.data['seed_version'] = 31
def _convert_version_32(self):
if not self._is_upgrade_method_needed(31, 31):
return
from .invoices import PR_TYPE_ONCHAIN
invoices_old = self.data.get('invoices', {})
invoices_new = {k: item for k, item in invoices_old.items()
if not (item['type'] == PR_TYPE_ONCHAIN and item['outputs'] is None)}
self.data['invoices'] = invoices_new
self.data['seed_version'] = 32
def _convert_imported(self):
if not self._is_upgrade_method_needed(0, 13):
return
# '/x' is the internal ID for imported accounts
d = self.get('accounts', {}).get('/x', {}).get('imported',{})
if not d:
return False
addresses = []
keypairs = {}
for addr, v in d.items():
pubkey, privkey = v
if privkey:
keypairs[pubkey] = privkey
else:
addresses.append(addr)
if addresses and keypairs:
raise WalletFileException('mixed addresses and privkeys')
elif addresses:
self.put('addresses', addresses)
self.put('accounts', None)
elif keypairs:
self.put('wallet_type', 'standard')
self.put('key_type', 'imported')
self.put('keypairs', keypairs)
self.put('accounts', None)
else:
raise WalletFileException('no addresses or privkeys')
def _convert_account(self):
if not self._is_upgrade_method_needed(0, 13):
return
self.put('accounts', None)
def _is_upgrade_method_needed(self, min_version, max_version):
assert min_version <= max_version
cur_version = self.get_seed_version()
if cur_version > max_version:
return False
elif cur_version < min_version:
raise WalletFileException(
'storage upgrade: unexpected version {} (should be {}-{})'
.format(cur_version, min_version, max_version))
else:
return True
@locked
def get_seed_version(self):
seed_version = self.get('seed_version')
if not seed_version:
seed_version = OLD_SEED_VERSION if len(self.get('master_public_key','')) == 128 else NEW_SEED_VERSION
if seed_version > FINAL_SEED_VERSION:
raise WalletFileException('This version of Electrum is too old to open this wallet.\n'
'(highest supported storage version: {}, version of this file: {})'
.format(FINAL_SEED_VERSION, seed_version))
if seed_version==14 and self.get('seed_type') == 'segwit':
self._raise_unsupported_version(seed_version)
if seed_version >=12:
return seed_version
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
self._raise_unsupported_version(seed_version)
return seed_version
def _raise_unsupported_version(self, seed_version):
msg = f"Your wallet has an unsupported seed version: {seed_version}."
if seed_version in [5, 7, 8, 9, 10, 14]:
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
if seed_version == 6:
# version 1.9.8 created v6 wallets when an incorrect seed was entered in the restore dialog
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
if self.get('master_public_keys') is None and self.get('master_private_keys') is None and self.get('imported_keys') is None:
# pbkdf2 (at that time an additional dependency) was not included with the binaries, and wallet creation aborted.
msg += "\nIt does not contain any keys, and can safely be removed."
else:
# creation was complete if electrum was run from source
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
raise WalletFileException(msg)
@locked
def get_txi_addresses(self, tx_hash: str) -> List[str]:
"""Returns list of is_mine addresses that appear as inputs in tx."""
assert isinstance(tx_hash, str)
return list(self.txi.get(tx_hash, {}).keys())
@locked
def get_txo_addresses(self, tx_hash: str) -> List[str]:
"""Returns list of is_mine addresses that appear as outputs in tx."""
assert isinstance(tx_hash, str)
return list(self.txo.get(tx_hash, {}).keys())
@locked
def get_txi_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[str, int]]:
"""Returns an iterable of (prev_outpoint, value)."""
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txi.get(tx_hash, {}).get(address, {})
return list(d.items())
@locked
def get_txo_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[int, int, bool]]:
"""Returns an iterable of (output_index, value, is_coinbase)."""
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txo.get(tx_hash, {}).get(address, {})
return [(int(n), v, cb) for (n, (v, cb)) in d.items()]
@modifier
def add_txi_addr(self, tx_hash: str, addr: str, ser: str, v: int) -> None:
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(ser, str)
assert isinstance(v, int)
if tx_hash not in self.txi:
self.txi[tx_hash] = {}
d = self.txi[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][ser] = v
@modifier
def add_txo_addr(self, tx_hash: str, addr: str, n: Union[int, str], v: int, is_coinbase: bool) -> None:
n = str(n)
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(n, str)
assert isinstance(v, int)
assert isinstance(is_coinbase, bool)
if tx_hash not in self.txo:
self.txo[tx_hash] = {}
d = self.txo[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][n] = (v, is_coinbase)
@locked
def list_txi(self) -> Sequence[str]:
return list(self.txi.keys())
@locked
def list_txo(self) -> Sequence[str]:
return list(self.txo.keys())
@modifier
def remove_txi(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txi.pop(tx_hash, None)
@modifier
def remove_txo(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txo.pop(tx_hash, None)
@locked
def list_spent_outpoints(self) -> Sequence[Tuple[str, str]]:
return [(h, n)
for h in self.spent_outpoints.keys()
for n in self.get_spent_outpoints(h)
]
@locked
def get_spent_outpoints(self, prevout_hash: str) -> Sequence[str]:
assert isinstance(prevout_hash, str)
return list(self.spent_outpoints.get(prevout_hash, {}).keys())
@locked
def get_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> Optional[str]:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
return self.spent_outpoints.get(prevout_hash, {}).get(prevout_n)
@modifier
def remove_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> None:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
if not self.spent_outpoints[prevout_hash]:
self.spent_outpoints.pop(prevout_hash)
@modifier
def set_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str], tx_hash: str) -> None:
assert isinstance(prevout_hash, str)
assert isinstance(tx_hash, str)
prevout_n = str(prevout_n)
if prevout_hash not in self.spent_outpoints:
self.spent_outpoints[prevout_hash] = {}
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
@modifier
def add_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
if scripthash not in self._prevouts_by_scripthash:
self._prevouts_by_scripthash[scripthash] = set()
self._prevouts_by_scripthash[scripthash].add((prevout.to_str(), value))
@modifier
def remove_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
self._prevouts_by_scripthash[scripthash].discard((prevout.to_str(), value))
if not self._prevouts_by_scripthash[scripthash]:
self._prevouts_by_scripthash.pop(scripthash)
@locked
def get_prevouts_by_scripthash(self, scripthash: str) -> Set[Tuple[TxOutpoint, int]]:
assert isinstance(scripthash, str)
prevouts_and_values = self._prevouts_by_scripthash.get(scripthash, set())
return {(TxOutpoint.from_str(prevout), value) for prevout, value in prevouts_and_values}
@modifier
def add_transaction(self, tx_hash: str, tx: Transaction) -> None:
assert isinstance(tx_hash, str)
assert isinstance(tx, Transaction), tx
# note that tx might be a PartialTransaction
if not tx_hash:
raise Exception("trying to add tx to db without txid")
if tx_hash != tx.txid():
raise Exception(f"trying to add tx to db with inconsistent txid: {tx_hash} != {tx.txid()}")
# don't allow overwriting complete tx with partial tx
tx_we_already_have = self.transactions.get(tx_hash, None)
if tx_we_already_have is None or isinstance(tx_we_already_have, PartialTransaction):
self.transactions[tx_hash] = tx
@modifier
def remove_transaction(self, tx_hash: str) -> Optional[Transaction]:
assert isinstance(tx_hash, str)
return self.transactions.pop(tx_hash, None)
@locked
def get_transaction(self, tx_hash: Optional[str]) -> Optional[Transaction]:
if tx_hash is None:
return None
assert isinstance(tx_hash, str)
return self.transactions.get(tx_hash)
@locked
def list_transactions(self) -> Sequence[str]:
return list(self.transactions.keys())
@locked
def get_history(self) -> Sequence[str]:
return list(self.history.keys())
def is_addr_in_history(self, addr: str) -> bool:
# does not mean history is non-empty!
assert isinstance(addr, str)
return addr in self.history
@locked
def get_addr_history(self, addr: str) -> Sequence[Tuple[str, int]]:
assert isinstance(addr, str)
return self.history.get(addr, [])
@modifier
def set_addr_history(self, addr: str, hist) -> None:
assert isinstance(addr, str)
self.history[addr] = hist
@modifier
def remove_addr_history(self, addr: str) -> None:
assert isinstance(addr, str)
self.history.pop(addr, None)
@locked
def list_verified_tx(self) -> Sequence[str]:
return list(self.verified_tx.keys())
@locked
def get_verified_tx(self, txid: str) -> Optional[TxMinedInfo]:
assert isinstance(txid, str)
if txid not in self.verified_tx:
return None
height, timestamp, txpos, header_hash = self.verified_tx[txid]
return TxMinedInfo(height=height,
conf=None,
timestamp=timestamp,
txpos=txpos,
header_hash=header_hash)
@modifier
def add_verified_tx(self, txid: str, info: TxMinedInfo):
assert isinstance(txid, str)
assert isinstance(info, TxMinedInfo)
self.verified_tx[txid] = (info.height, info.timestamp, info.txpos, info.header_hash)
@modifier
def remove_verified_tx(self, txid: str):
assert isinstance(txid, str)
self.verified_tx.pop(txid, None)
def is_in_verified_tx(self, txid: str) -> bool:
assert isinstance(txid, str)
return txid in self.verified_tx
@modifier
def add_tx_fee_from_server(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
# note: when called with (fee_sat is None), rm currently saved value
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
tx_fees_value = self.tx_fees[txid]
if tx_fees_value.is_calculated_by_us:
return
self.tx_fees[txid] = tx_fees_value._replace(fee=fee_sat, is_calculated_by_us=False)
@modifier
def add_tx_fee_we_calculated(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
if fee_sat is None:
return
assert isinstance(fee_sat, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(fee=fee_sat, is_calculated_by_us=True)
@locked
def get_tx_fee(self, txid: str, *, trust_server: bool = False) -> Optional[int]:
assert isinstance(txid, str)
"""Returns tx_fee."""
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
if not trust_server and not tx_fees_value.is_calculated_by_us:
return None
return tx_fees_value.fee
@modifier
def add_num_inputs_to_tx(self, txid: str, num_inputs: int) -> None:
assert isinstance(txid, str)
assert isinstance(num_inputs, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(num_inputs=num_inputs)
@locked
def get_num_all_inputs_of_tx(self, txid: str) -> Optional[int]:
assert isinstance(txid, str)
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
return tx_fees_value.num_inputs
@locked
def get_num_ismine_inputs_of_tx(self, txid: str) -> int:
assert isinstance(txid, str)
txins = self.txi.get(txid, {})
return sum([len(tupls) for addr, tupls in txins.items()])
@modifier
def remove_tx_fee(self, txid: str) -> None:
assert isinstance(txid, str)
self.tx_fees.pop(txid, None)
@locked
def get_dict(self, name):
# Warning: interacts un-intuitively with 'put': certain parts
# of 'data' will have pointers saved as separate variables.
if name not in self.data:
self.data[name] = {}
return self.data[name]
@locked
def num_change_addresses(self) -> int:
return len(self.change_addresses)
@locked
def num_receiving_addresses(self) -> int:
return len(self.receiving_addresses)
@locked
def get_change_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.change_addresses[slice_start:slice_stop]
@locked
def get_receiving_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.receiving_addresses[slice_start:slice_stop]
@modifier
def add_change_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (1, len(self.change_addresses))
self.change_addresses.append(addr)
@modifier
def add_receiving_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (0, len(self.receiving_addresses))
self.receiving_addresses.append(addr)
@locked
def get_address_index(self, address: str) -> Optional[Sequence[int]]:
assert isinstance(address, str)
return self._addr_to_addr_index.get(address)
@modifier
def add_imported_address(self, addr: str, d: dict) -> None:
assert isinstance(addr, str)
self.imported_addresses[addr] = d
@modifier
def remove_imported_address(self, addr: str) -> None:
assert isinstance(addr, str)
self.imported_addresses.pop(addr)
@locked
def has_imported_address(self, addr: str) -> bool:
assert isinstance(addr, str)
return addr in self.imported_addresses
@locked
def get_imported_addresses(self) -> Sequence[str]:
return list(sorted(self.imported_addresses.keys()))
@locked
def get_imported_address(self, addr: str) -> Optional[dict]:
assert isinstance(addr, str)
return self.imported_addresses.get(addr)
def load_addresses(self, wallet_type):
""" called from Abstract_Wallet.__init__ """
if wallet_type == 'imported':
self.imported_addresses = self.get_dict('addresses') # type: Dict[str, dict]
else:
self.get_dict('addresses')
for name in ['receiving', 'change']:
if name not in self.data['addresses']:
self.data['addresses'][name] = []
self.change_addresses = self.data['addresses']['change']
self.receiving_addresses = self.data['addresses']['receiving']
self._addr_to_addr_index = {} # type: Dict[str, Sequence[int]] # key: address, value: (is_change, index)
for i, addr in enumerate(self.receiving_addresses):
self._addr_to_addr_index[addr] = (0, i)
for i, addr in enumerate(self.change_addresses):
self._addr_to_addr_index[addr] = (1, i)
@profiler
def _load_transactions(self):
self.data = StoredDict(self.data, self, [])
# references in self.data
# TODO make all these private
# txid -> address -> prev_outpoint -> value
self.txi = self.get_dict('txi') # type: Dict[str, Dict[str, Dict[str, int]]]
# txid -> address -> output_index -> (value, is_coinbase)
self.txo = self.get_dict('txo') # type: Dict[str, Dict[str, Dict[str, Tuple[int, bool]]]]
self.transactions = self.get_dict('transactions') # type: Dict[str, Transaction]
self.spent_outpoints = self.get_dict('spent_outpoints') # txid -> output_index -> next_txid
self.history = self.get_dict('addr_history') # address -> list of (txid, height)
self.verified_tx = self.get_dict('verified_tx3') # txid -> (height, timestamp, txpos, header_hash)
self.tx_fees = self.get_dict('tx_fees') # type: Dict[str, TxFeesValue]
# scripthash -> set of (outpoint, value)
self._prevouts_by_scripthash = self.get_dict('prevouts_by_scripthash') # type: Dict[str, Set[Tuple[str, int]]]
# remove unreferenced tx
for tx_hash in list(self.transactions.keys()):
if not self.get_txi_addresses(tx_hash) and not self.get_txo_addresses(tx_hash):
self.logger.info(f"removing unreferenced tx: {tx_hash}")
self.transactions.pop(tx_hash)
# remove unreferenced outpoints
for prevout_hash in self.spent_outpoints.keys():
d = self.spent_outpoints[prevout_hash]
for prevout_n, spending_txid in list(d.items()):
if spending_txid not in self.transactions:
self.logger.info("removing unreferenced spent outpoint")
d.pop(prevout_n)
self.tokens = self.get_dict('tokens')
# contract_addr + '_' + b58addr -> list(txid, height, log_index)
self.token_history = self.get_dict('addr_token_history')
# txid -> tx receipt
self.tx_receipt = self.get_dict('tx_receipt')
# txid -> raw tx
self.token_txs = self.get_dict('token_txs')
self.smart_contracts = self.get_dict('smart_contracts')
self.delegations = self.get_dict('delegations')
if self.token_history:
token_hist_txids = [x2[0] for x2 in reduce(lambda x1, y1: x1+y1, self.token_history.values())]
else:
token_hist_txids = []
for tx_hash, raw in self.token_txs.items():
if tx_hash in token_hist_txids:
tx = Transaction(raw)
self.token_txs[tx_hash] = tx
@modifier
def set_token(self, token: Token):
self.tokens[token.get_key()] = token
@modifier
def delete_token(self, key: str):
self.tokens.pop(key, None)
self.token_history.pop(key, None)
@locked
def get_token(self, key: str) -> Optional[Token]:
return Token(*self.tokens.get(key))
@locked
def list_tokens(self) -> list:
return list(self.tokens.keys())
@modifier
def set_token_history(self, key: str, hist: list):
self.token_history[key] = hist
@modifier
def delete_token_history(self, key: str):
self.token_history.pop(key, None)
@locked
def get_token_history(self, key: str) -> list:
return self.token_history.get(key, [])
@locked
def list_token_histories(self) -> list:
return list(self.token_history.keys())
@modifier
def set_token_tx(self, txid: str, raw: str):
self.token_txs[txid] = raw
@modifier
def delete_token_tx(self, txid: str):
self.token_txs.pop(txid, None)
@locked
def get_token_tx(self, txid: str):
return self.token_txs.get(txid)
@locked
def list_token_txs(self) -> list:
return list(self.token_txs.keys())
@modifier
def set_tx_receipt(self, txid: str, receipt: list):
self.tx_receipt[txid] = receipt
@modifier
def delete_tx_receipt(self, txid: str):
return self.tx_receipt.pop(txid, None)
@locked
def get_tx_receipt(self, txid: str) -> list:
return self.tx_receipt.get(txid, [])
@locked
def list_tx_receipts(self) -> list:
return list(self.tx_receipt.keys())
@modifier
def set_delegation(self, dele: Delegation):
self.delegations[dele.addr] = [dele.staker, dele.fee]
@modifier
def delete_delegation(self, addr: str):
self.delegations.pop(addr, None)
@locked
def get_delegation(self, addr: str) -> Optional[Delegation]:
dele = self.delegations.get(addr, [])
if len(dele) != 2:
return None
return Delegation(addr=addr, staker=dele[0], fee=dele[1])
@locked
def list_delegations(self) -> Sequence[str]:
return list(self.delegations.keys())
@modifier
def clear_history(self):
self.txi.clear()
self.txo.clear()
self.spent_outpoints.clear()
self.transactions.clear()
self.history.clear()
self.verified_tx.clear()
self.tx_fees.clear()
self.token_txs.clear()
self.token_history.clear()
self.tx_receipt.clear()
self._prevouts_by_scripthash.clear()
def _convert_dict(self, path, key, v):
if key == 'transactions':
# note: for performance, "deserialize=False" so that we will deserialize these on-demand
v = dict((k, tx_from_any(x, deserialize=False)) for k, x in v.items())
if key == 'invoices':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
if key == 'payment_requests':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
elif key == 'adds':
v = dict((k, UpdateAddHtlc.from_tuple(*x)) for k, x in v.items())
elif key == 'fee_updates':
v = dict((k, FeeUpdate(**x)) for k, x in v.items())
elif key == 'submarine_swaps':
v = dict((k, SwapData(**x)) for k, x in v.items())
elif key == 'channel_backups':
v = dict((k, ChannelBackupStorage(**x)) for k, x in v.items())
elif key == 'tx_fees':
v = dict((k, TxFeesValue(*x)) for k, x in v.items())
elif key == 'prevouts_by_scripthash':
v = dict((k, {(prevout, value) for (prevout, value) in x}) for k, x in v.items())
elif key == 'buckets':
v = dict((k, ShachainElement(bfh(x[0]), int(x[1]))) for k, x in v.items())
elif key == 'data_loss_protect_remote_pcp':
v = dict((k, bfh(x)) for k, x in v.items())
return v
def _convert_value(self, path, key, v):
if key == 'local_config':
v = LocalConfig(**v)
elif key == 'remote_config':
v = RemoteConfig(**v)
elif key == 'constraints':
v = ChannelConstraints(**v)
elif key == 'funding_outpoint':
v = Outpoint(**v)
return v
def write(self, storage: 'WalletStorage'):
with self.lock:
self._write(storage)
def _write(self, storage: 'WalletStorage'):
if threading.currentThread().isDaemon():
self.logger.warning('daemon thread cannot write db')
return
if not self.modified():
return
storage.write(self.dump())
self.set_modified(False)
def is_ready_to_be_used_by_wallet(self):
return not self.requires_upgrade() and self._called_after_upgrade_tasks
def split_accounts(self, root_path):
from .storage import WalletStorage
out = []
result = self.get_split_accounts()
for data in result:
path = root_path + '.' + data['suffix']
storage = WalletStorage(path)
db = WalletDB(json.dumps(data), manual_upgrades=False)
db._called_after_upgrade_tasks = False
db.upgrade()
db.write(storage)
out.append(path)
return out
def get_action(self):
action = run_hook('get_action', self)
return action
def load_plugins(self):
wallet_type = self.get('wallet_type')
if wallet_type in plugin_loaders:
plugin_loaders[wallet_type]()
def set_keystore_encryption(self, enable):
self.put('use_encryption', enable)
| 39.088986 | 153 | 0.582038 |
import os
import ast
import json
import copy
import threading
from collections import defaultdict
from functools import reduce
from typing import Dict, Optional, List, Tuple, Set, Iterable, NamedTuple, Sequence, TYPE_CHECKING, Union
import binascii
from . import util, bitcoin
from .util import profiler, WalletFileException, multisig_type, TxMinedInfo, bfh
from .invoices import PR_TYPE_ONCHAIN, Invoice
from .keystore import bip44_derivation
from .transaction import Transaction, TxOutpoint, tx_from_any, PartialTransaction, PartialTxOutput
from .logging import Logger
from .lnutil import LOCAL, REMOTE, FeeUpdate, UpdateAddHtlc, LocalConfig, RemoteConfig, Keypair, OnlyPubkeyKeypair, RevocationStore, ChannelBackupStorage
from .lnutil import ChannelConstraints, Outpoint, ShachainElement
from .json_db import StoredDict, JsonDB, locked, modifier
from .plugin import run_hook, plugin_loaders
from .paymentrequest import PaymentRequest
from .submarine_swaps import SwapData
from .bitcoin import Token, Delegation
if TYPE_CHECKING:
from .storage import WalletStorage
OLD_SEED_VERSION = 4
NEW_SEED_VERSION = 11
FINAL_SEED_VERSION = 32
class TxFeesValue(NamedTuple):
fee: Optional[int] = None
is_calculated_by_us: bool = False
num_inputs: Optional[int] = None
class WalletDB(JsonDB):
def __init__(self, raw, *, manual_upgrades: bool):
JsonDB.__init__(self, {})
self._manual_upgrades = manual_upgrades
self._called_after_upgrade_tasks = False
if raw:
self.load_data(raw)
self.load_plugins()
else:
self.put('seed_version', FINAL_SEED_VERSION)
self._after_upgrade_tasks()
def load_data(self, s):
try:
self.data = json.loads(s)
except:
try:
d = ast.literal_eval(s)
labels = d.get('labels', {})
except Exception as e:
raise WalletFileException("Cannot read wallet file. (parsing failed)")
self.data = {}
for key, value in d.items():
try:
json.dumps(key)
json.dumps(value)
except:
self.logger.info(f'Failed to convert label to json format: {key}')
continue
self.data[key] = value
if not isinstance(self.data, dict):
raise WalletFileException("Malformed wallet file (not dict)")
if not self._manual_upgrades and self.requires_split():
raise WalletFileException("This wallet has multiple accounts and must be split")
if not self.requires_upgrade():
self._after_upgrade_tasks()
elif not self._manual_upgrades:
self.upgrade()
def requires_split(self):
d = self.get('accounts', {})
return len(d) > 1
def get_split_accounts(self):
result = []
d = self.get('accounts', {})
if len(d) < 2:
return
wallet_type = self.get('wallet_type')
if wallet_type == 'old':
assert len(d) == 2
data1 = copy.deepcopy(self.data)
data1['accounts'] = {'0': d['0']}
data1['suffix'] = 'deterministic'
data2 = copy.deepcopy(self.data)
data2['accounts'] = {'/x': d['/x']}
data2['seed'] = None
data2['seed_version'] = None
data2['master_public_key'] = None
data2['wallet_type'] = 'imported'
data2['suffix'] = 'imported'
result = [data1, data2]
elif wallet_type in ['bip44', 'trezor', 'keepkey', 'ledger', 'btchip', 'digitalbitbox', 'safe_t']:
mpk = self.get('master_public_keys')
for k in d.keys():
i = int(k)
x = d[k]
if x.get("pending"):
continue
xpub = mpk["x/%d'"%i]
new_data = copy.deepcopy(self.data)
# save account, derivation and xpub at index 0
new_data['accounts'] = {'0': x}
new_data['master_public_keys'] = {"x/0'": xpub}
new_data['derivation'] = bip44_derivation(k)
new_data['suffix'] = k
result.append(new_data)
else:
raise WalletFileException("This wallet has multiple accounts and must be split")
return result
def requires_upgrade(self):
return self.get_seed_version() < FINAL_SEED_VERSION
@profiler
def upgrade(self):
self.logger.info('upgrading wallet format')
if self._called_after_upgrade_tasks:
raise Exception("'after_upgrade_tasks' must NOT be called before 'upgrade'")
self._convert_imported()
self._convert_wallet_type()
self._convert_account()
self._convert_version_13_b()
self._convert_version_14()
self._convert_version_15()
self._convert_version_16()
self._convert_version_17()
self._convert_version_18()
self._convert_version_19()
self._convert_version_20()
self._convert_version_21()
self._convert_version_22()
self._convert_version_23()
self._convert_version_24()
self._convert_version_25()
self._convert_version_26()
self._convert_version_27()
self._convert_version_28()
self._convert_version_29()
self._convert_version_30()
self._convert_version_31()
self._convert_version_32()
self.put('seed_version', FINAL_SEED_VERSION)
self._after_upgrade_tasks()
def _after_upgrade_tasks(self):
self._called_after_upgrade_tasks = True
self._load_transactions()
def _convert_wallet_type(self):
if not self._is_upgrade_method_needed(0, 13):
return
wallet_type = self.get('wallet_type')
if wallet_type == 'btchip': wallet_type = 'ledger'
if self.get('keystore') or self.get('x1/') or wallet_type=='imported':
return False
assert not self.requires_split()
seed_version = self.get_seed_version()
seed = self.get('seed')
xpubs = self.get('master_public_keys')
xprvs = self.get('master_private_keys', {})
mpk = self.get('master_public_key')
keypairs = self.get('keypairs')
key_type = self.get('key_type')
if seed_version == OLD_SEED_VERSION or wallet_type == 'old':
d = {
'type': 'old',
'seed': seed,
'mpk': mpk,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif key_type == 'imported':
d = {
'type': 'imported',
'keypairs': keypairs,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['xpub', 'standard']:
xpub = xpubs["x/"]
xprv = xprvs.get("x/")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
'seed': seed,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['bip44']:
xpub = xpubs["x/0'"]
xprv = xprvs.get("x/0'")
d = {
'type': 'bip32',
'xpub': xpub,
'xprv': xprv,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif wallet_type in ['trezor', 'keepkey', 'ledger', 'digitalbitbox', 'safe_t']:
xpub = xpubs["x/0'"]
derivation = self.get('derivation', bip44_derivation(0))
d = {
'type': 'hardware',
'hw_type': wallet_type,
'xpub': xpub,
'derivation': derivation,
}
self.put('wallet_type', 'standard')
self.put('keystore', d)
elif (wallet_type == '2fa') or multisig_type(wallet_type):
for key in xpubs.keys():
d = {
'type': 'bip32',
'xpub': xpubs[key],
'xprv': xprvs.get(key),
}
if key == 'x1/' and seed:
d['seed'] = seed
self.put(key, d)
else:
raise WalletFileException('Unable to tell wallet type. Is this even a wallet file?')
# remove junk
self.put('master_public_key', None)
self.put('master_public_keys', None)
self.put('master_private_keys', None)
self.put('derivation', None)
self.put('seed', None)
self.put('keypairs', None)
self.put('key_type', None)
def _convert_version_13_b(self):
# version 13 is ambiguous, and has an earlier and a later structure
if not self._is_upgrade_method_needed(0, 13):
return
if self.get('wallet_type') == 'standard':
if self.get('keystore').get('type') == 'imported':
pubkeys = self.get('keystore').get('keypairs').keys()
d = {'change': []}
receiving_addresses = []
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
receiving_addresses.append(addr)
d['receiving'] = receiving_addresses
self.put('addresses', d)
self.put('pubkeys', None)
self.put('seed_version', 13)
def _convert_version_14(self):
# convert imported wallets for 3.0
if not self._is_upgrade_method_needed(13, 13):
return
if self.get('wallet_type') =='imported':
addresses = self.get('addresses')
if type(addresses) is list:
addresses = dict([(x, None) for x in addresses])
self.put('addresses', addresses)
elif self.get('wallet_type') == 'standard':
if self.get('keystore').get('type')=='imported':
addresses = set(self.get('addresses').get('receiving'))
pubkeys = self.get('keystore').get('keypairs').keys()
assert len(addresses) == len(pubkeys)
d = {}
for pubkey in pubkeys:
addr = bitcoin.pubkey_to_address('p2pkh', pubkey)
assert addr in addresses
d[addr] = {
'pubkey': pubkey,
'redeem_script': None,
'type': 'p2pkh'
}
self.put('addresses', d)
self.put('pubkeys', None)
self.put('wallet_type', 'imported')
self.put('seed_version', 14)
def _convert_version_15(self):
if not self._is_upgrade_method_needed(14, 14):
return
if self.get('seed_type') == 'segwit':
# should not get here; get_seed_version should have caught this
raise Exception('unsupported derivation (development segwit, v14)')
self.put('seed_version', 15)
def _convert_version_16(self):
# fixes issue #3193 for Imported_Wallets with addresses
# also, previous versions allowed importing any garbage as an address
# which we now try to remove, see pr #3191
if not self._is_upgrade_method_needed(15, 15):
return
def remove_address(addr):
def remove_from_dict(dict_name):
d = self.get(dict_name, None)
if d is not None:
d.pop(addr, None)
self.put(dict_name, d)
def remove_from_list(list_name):
lst = self.get(list_name, None)
if lst is not None:
s = set(lst)
s -= {addr}
self.put(list_name, list(s))
# note: we don't remove 'addr' from self.get('addresses')
remove_from_dict('addr_history')
remove_from_dict('labels')
remove_from_dict('payment_requests')
remove_from_list('frozen_addresses')
if self.get('wallet_type') == 'imported':
addresses = self.get('addresses')
assert isinstance(addresses, dict)
addresses_new = dict()
for address, details in addresses.items():
if not bitcoin.is_address(address):
remove_address(address)
continue
if details is None:
addresses_new[address] = {}
else:
addresses_new[address] = details
self.put('addresses', addresses_new)
self.put('seed_version', 16)
def _convert_version_17(self):
if not self._is_upgrade_method_needed(16, 16):
return
self.put('pruned_txo', None)
transactions = self.get('transactions', {})
spent_outpoints = defaultdict(dict)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for txin in tx.inputs():
if txin.is_coinbase_input():
continue
prevout_hash = txin.prevout.txid.hex()
prevout_n = txin.prevout.out_idx
spent_outpoints[prevout_hash][str(prevout_n)] = txid
self.put('spent_outpoints', spent_outpoints)
tokens = self.get('tokens', {})
new_tokens = {}
for key, value in tokens.items():
contract_addr, bind_addr = key.split('_')
new_token = Token(contract_addr, bind_addr, value[0], value[1], value[2], value[3])
new_tokens[new_token.get_key()] = new_token
self.put('tokens', new_tokens)
self.put('seed_version', 17)
def _convert_version_18(self):
if not self._is_upgrade_method_needed(17, 17):
return
self.put('verified_tx3', None)
self.put('seed_version', 18)
def _convert_version_19(self):
if not self._is_upgrade_method_needed(18, 18):
return
self.put('tx_fees', None)
self.put('seed_version', 19)
def _convert_version_20(self):
if not self._is_upgrade_method_needed(19, 19):
return
from .bip32 import BIP32Node, convert_bip32_intpath_to_strpath
for ks_name in ('keystore', *['x{}/'.format(i) for i in range(1, 16)]):
ks = self.get(ks_name, None)
if ks is None: continue
xpub = ks.get('xpub', None)
if xpub is None: continue
bip32node = BIP32Node.from_xkey(xpub)
derivation_prefix = ks.get('derivation', None)
if derivation_prefix is None:
assert bip32node.depth >= 0, bip32node.depth
if bip32node.depth == 0:
derivation_prefix = 'm'
elif bip32node.depth == 1:
child_number_int = int.from_bytes(bip32node.child_number, 'big')
derivation_prefix = convert_bip32_intpath_to_strpath([child_number_int])
ks['derivation'] = derivation_prefix
root_fingerprint = ks.get('ckcc_xfp', None)
if root_fingerprint is not None:
root_fingerprint = root_fingerprint.to_bytes(4, byteorder="little", signed=False).hex().lower()
if root_fingerprint is None:
if bip32node.depth == 0:
root_fingerprint = bip32node.calc_fingerprint_of_this_node().hex().lower()
elif bip32node.depth == 1:
root_fingerprint = bip32node.fingerprint.hex()
ks['root_fingerprint'] = root_fingerprint
ks.pop('ckcc_xfp', None)
self.put(ks_name, ks)
self.put('seed_version', 20)
def _convert_version_21(self):
if not self._is_upgrade_method_needed(20, 20):
return
channels = self.get('channels')
if channels:
for channel in channels:
channel['state'] = 'OPENING'
self.put('channels', channels)
self.put('seed_version', 21)
def _convert_version_22(self):
if not self._is_upgrade_method_needed(21, 21):
return
from .bitcoin import script_to_scripthash
transactions = self.get('transactions', {})
prevouts_by_scripthash = defaultdict(list)
for txid, raw_tx in transactions.items():
tx = Transaction(raw_tx)
for idx, txout in enumerate(tx.outputs()):
outpoint = f"{txid}:{idx}"
scripthash = script_to_scripthash(txout.scriptpubkey.hex())
prevouts_by_scripthash[scripthash].append((outpoint, txout.value))
self.put('prevouts_by_scripthash', prevouts_by_scripthash)
self.put('seed_version', 22)
def _convert_version_23(self):
if not self._is_upgrade_method_needed(22, 22):
return
channels = self.get('channels', [])
LOCAL = 1
REMOTE = -1
for c in channels:
r = c['remote_config'].pop('revocation_store')
c['revocation_store'] = r
log = c.get('log', {})
for sub in LOCAL, REMOTE:
l = log[str(sub)]['fee_updates']
d = {}
for i, fu in enumerate(l):
d[str(i)] = {
'rate':fu['rate'],
'ctn_local':fu['ctns'][str(LOCAL)],
'ctn_remote':fu['ctns'][str(REMOTE)]
}
log[str(int(sub))]['fee_updates'] = d
self.data['channels'] = channels
self.data['seed_version'] = 23
def _convert_version_24(self):
if not self._is_upgrade_method_needed(23, 23):
return
channels = self.get('channels', [])
for c in channels:
r = c['revocation_store']
d = {}
for i in range(49):
v = r['buckets'][i]
if v is not None:
d[str(i)] = v
r['buckets'] = d
c['revocation_store'] = r
self.data['channels'] = { x['channel_id']: x for x in channels }
txi = self.get('txi', {})
for tx_hash, d in txi.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for ser, v in l:
d2[addr][ser] = v
txi[tx_hash] = d2
self.data['txi'] = txi
txo = self.get('txo', {})
for tx_hash, d in txo.items():
d2 = {}
for addr, l in d.items():
d2[addr] = {}
for n, v, cb in l:
d2[addr][str(n)] = (v, cb)
txo[tx_hash] = d2
self.data['txo'] = txo
self.data['seed_version'] = 24
def _convert_version_25(self):
if not self._is_upgrade_method_needed(24, 24):
return
requests = self.data.get('payment_requests', {})
for k, r in list(requests.items()):
if r.get('address') == k:
requests[k] = {
'address': r['address'],
'amount': r.get('amount'),
'exp': r.get('exp'),
'id': r.get('id'),
'memo': r.get('memo'),
'time': r.get('time'),
'type': PR_TYPE_ONCHAIN,
}
invoices = self.data.get('invoices', {})
for k, r in list(invoices.items()):
data = r.get("hex")
if data:
pr = PaymentRequest(bytes.fromhex(data))
if pr.id != k:
continue
invoices[k] = {
'type': PR_TYPE_ONCHAIN,
'amount': pr.get_amount(),
'bip70': data,
'exp': pr.get_expiration_date() - pr.get_time(),
'id': pr.id,
'message': pr.get_memo(),
'outputs': [x.to_legacy_tuple() for x in pr.get_outputs()],
'time': pr.get_time(),
'requestor': pr.get_requestor(),
}
self.data['seed_version'] = 25
def _convert_version_26(self):
if not self._is_upgrade_method_needed(25, 25):
return
channels = self.data.get('channels', {})
channel_timestamps = self.data.pop('lightning_channel_timestamps', {})
for channel_id, c in channels.items():
item = channel_timestamps.get(channel_id)
if item:
funding_txid, funding_height, funding_timestamp, closing_txid, closing_height, closing_timestamp = item
if funding_txid:
c['funding_height'] = funding_txid, funding_height, funding_timestamp
if closing_txid:
c['closing_height'] = closing_txid, closing_height, closing_timestamp
self.data['seed_version'] = 26
def _convert_version_27(self):
if not self._is_upgrade_method_needed(26, 26):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['htlc_minimum_msat'] = 1
self.data['seed_version'] = 27
def _convert_version_28(self):
if not self._is_upgrade_method_needed(27, 27):
return
channels = self.data.get('channels', {})
for channel_id, c in channels.items():
c['local_config']['channel_seed'] = None
self.data['seed_version'] = 28
def _convert_version_29(self):
if not self._is_upgrade_method_needed(28, 28):
return
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, r in list(d.items()):
_type = r.get('type', 0)
item = {
'type': _type,
'message': r.get('message') or r.get('memo', ''),
'amount': r.get('amount'),
'exp': r.get('exp') or 0,
'time': r.get('time', 0),
}
if _type == PR_TYPE_ONCHAIN:
address = r.pop('address', None)
if address:
outputs = [(0, address, r.get('amount'))]
else:
outputs = r.get('outputs')
item.update({
'outputs': outputs,
'id': r.get('id'),
'bip70': r.get('bip70'),
'requestor': r.get('requestor'),
})
else:
item.update({
'rhash': r['rhash'],
'invoice': r['invoice'],
})
d[key] = item
self.data['seed_version'] = 29
def _convert_version_30(self):
if not self._is_upgrade_method_needed(29, 29):
return
from .invoices import PR_TYPE_ONCHAIN, PR_TYPE_LN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
_type = item['type']
if _type == PR_TYPE_ONCHAIN:
item['amount_sat'] = item.pop('amount')
elif _type == PR_TYPE_LN:
amount_sat = item.pop('amount')
item['amount_msat'] = 1000 * amount_sat if amount_sat is not None else None
item.pop('exp')
item.pop('message')
item.pop('rhash')
item.pop('time')
else:
raise Exception(f"unknown invoice type: {_type}")
self.data['seed_version'] = 30
def _convert_version_31(self):
if not self._is_upgrade_method_needed(30, 30):
return
from .invoices import PR_TYPE_ONCHAIN
requests = self.data.get('payment_requests', {})
invoices = self.data.get('invoices', {})
for d in [invoices, requests]:
for key, item in list(d.items()):
if item['type'] == PR_TYPE_ONCHAIN:
item['amount_sat'] = item['amount_sat'] or 0
item['exp'] = item['exp'] or 0
item['time'] = item['time'] or 0
self.data['seed_version'] = 31
def _convert_version_32(self):
if not self._is_upgrade_method_needed(31, 31):
return
from .invoices import PR_TYPE_ONCHAIN
invoices_old = self.data.get('invoices', {})
invoices_new = {k: item for k, item in invoices_old.items()
if not (item['type'] == PR_TYPE_ONCHAIN and item['outputs'] is None)}
self.data['invoices'] = invoices_new
self.data['seed_version'] = 32
def _convert_imported(self):
if not self._is_upgrade_method_needed(0, 13):
return
d = self.get('accounts', {}).get('/x', {}).get('imported',{})
if not d:
return False
addresses = []
keypairs = {}
for addr, v in d.items():
pubkey, privkey = v
if privkey:
keypairs[pubkey] = privkey
else:
addresses.append(addr)
if addresses and keypairs:
raise WalletFileException('mixed addresses and privkeys')
elif addresses:
self.put('addresses', addresses)
self.put('accounts', None)
elif keypairs:
self.put('wallet_type', 'standard')
self.put('key_type', 'imported')
self.put('keypairs', keypairs)
self.put('accounts', None)
else:
raise WalletFileException('no addresses or privkeys')
def _convert_account(self):
if not self._is_upgrade_method_needed(0, 13):
return
self.put('accounts', None)
def _is_upgrade_method_needed(self, min_version, max_version):
assert min_version <= max_version
cur_version = self.get_seed_version()
if cur_version > max_version:
return False
elif cur_version < min_version:
raise WalletFileException(
'storage upgrade: unexpected version {} (should be {}-{})'
.format(cur_version, min_version, max_version))
else:
return True
@locked
def get_seed_version(self):
seed_version = self.get('seed_version')
if not seed_version:
seed_version = OLD_SEED_VERSION if len(self.get('master_public_key','')) == 128 else NEW_SEED_VERSION
if seed_version > FINAL_SEED_VERSION:
raise WalletFileException('This version of Electrum is too old to open this wallet.\n'
'(highest supported storage version: {}, version of this file: {})'
.format(FINAL_SEED_VERSION, seed_version))
if seed_version==14 and self.get('seed_type') == 'segwit':
self._raise_unsupported_version(seed_version)
if seed_version >=12:
return seed_version
if seed_version not in [OLD_SEED_VERSION, NEW_SEED_VERSION]:
self._raise_unsupported_version(seed_version)
return seed_version
def _raise_unsupported_version(self, seed_version):
msg = f"Your wallet has an unsupported seed version: {seed_version}."
if seed_version in [5, 7, 8, 9, 10, 14]:
msg += "\n\nTo open this wallet, try 'git checkout seed_v%d'"%seed_version
if seed_version == 6:
msg += '\n\nThis file was created because of a bug in version 1.9.8.'
if self.get('master_public_keys') is None and self.get('master_private_keys') is None and self.get('imported_keys') is None:
msg += "\nIt does not contain any keys, and can safely be removed."
else:
msg += "\nPlease open this file with Electrum 1.9.8, and move your coins to a new wallet."
raise WalletFileException(msg)
@locked
def get_txi_addresses(self, tx_hash: str) -> List[str]:
assert isinstance(tx_hash, str)
return list(self.txi.get(tx_hash, {}).keys())
@locked
def get_txo_addresses(self, tx_hash: str) -> List[str]:
assert isinstance(tx_hash, str)
return list(self.txo.get(tx_hash, {}).keys())
@locked
def get_txi_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[str, int]]:
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txi.get(tx_hash, {}).get(address, {})
return list(d.items())
@locked
def get_txo_addr(self, tx_hash: str, address: str) -> Iterable[Tuple[int, int, bool]]:
assert isinstance(tx_hash, str)
assert isinstance(address, str)
d = self.txo.get(tx_hash, {}).get(address, {})
return [(int(n), v, cb) for (n, (v, cb)) in d.items()]
@modifier
def add_txi_addr(self, tx_hash: str, addr: str, ser: str, v: int) -> None:
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(ser, str)
assert isinstance(v, int)
if tx_hash not in self.txi:
self.txi[tx_hash] = {}
d = self.txi[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][ser] = v
@modifier
def add_txo_addr(self, tx_hash: str, addr: str, n: Union[int, str], v: int, is_coinbase: bool) -> None:
n = str(n)
assert isinstance(tx_hash, str)
assert isinstance(addr, str)
assert isinstance(n, str)
assert isinstance(v, int)
assert isinstance(is_coinbase, bool)
if tx_hash not in self.txo:
self.txo[tx_hash] = {}
d = self.txo[tx_hash]
if addr not in d:
d[addr] = {}
d[addr][n] = (v, is_coinbase)
@locked
def list_txi(self) -> Sequence[str]:
return list(self.txi.keys())
@locked
def list_txo(self) -> Sequence[str]:
return list(self.txo.keys())
@modifier
def remove_txi(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txi.pop(tx_hash, None)
@modifier
def remove_txo(self, tx_hash: str) -> None:
assert isinstance(tx_hash, str)
self.txo.pop(tx_hash, None)
@locked
def list_spent_outpoints(self) -> Sequence[Tuple[str, str]]:
return [(h, n)
for h in self.spent_outpoints.keys()
for n in self.get_spent_outpoints(h)
]
@locked
def get_spent_outpoints(self, prevout_hash: str) -> Sequence[str]:
assert isinstance(prevout_hash, str)
return list(self.spent_outpoints.get(prevout_hash, {}).keys())
@locked
def get_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> Optional[str]:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
return self.spent_outpoints.get(prevout_hash, {}).get(prevout_n)
@modifier
def remove_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str]) -> None:
assert isinstance(prevout_hash, str)
prevout_n = str(prevout_n)
self.spent_outpoints[prevout_hash].pop(prevout_n, None)
if not self.spent_outpoints[prevout_hash]:
self.spent_outpoints.pop(prevout_hash)
@modifier
def set_spent_outpoint(self, prevout_hash: str, prevout_n: Union[int, str], tx_hash: str) -> None:
assert isinstance(prevout_hash, str)
assert isinstance(tx_hash, str)
prevout_n = str(prevout_n)
if prevout_hash not in self.spent_outpoints:
self.spent_outpoints[prevout_hash] = {}
self.spent_outpoints[prevout_hash][prevout_n] = tx_hash
@modifier
def add_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
if scripthash not in self._prevouts_by_scripthash:
self._prevouts_by_scripthash[scripthash] = set()
self._prevouts_by_scripthash[scripthash].add((prevout.to_str(), value))
@modifier
def remove_prevout_by_scripthash(self, scripthash: str, *, prevout: TxOutpoint, value: int) -> None:
assert isinstance(scripthash, str)
assert isinstance(prevout, TxOutpoint)
assert isinstance(value, int)
self._prevouts_by_scripthash[scripthash].discard((prevout.to_str(), value))
if not self._prevouts_by_scripthash[scripthash]:
self._prevouts_by_scripthash.pop(scripthash)
@locked
def get_prevouts_by_scripthash(self, scripthash: str) -> Set[Tuple[TxOutpoint, int]]:
assert isinstance(scripthash, str)
prevouts_and_values = self._prevouts_by_scripthash.get(scripthash, set())
return {(TxOutpoint.from_str(prevout), value) for prevout, value in prevouts_and_values}
@modifier
def add_transaction(self, tx_hash: str, tx: Transaction) -> None:
assert isinstance(tx_hash, str)
assert isinstance(tx, Transaction), tx
if not tx_hash:
raise Exception("trying to add tx to db without txid")
if tx_hash != tx.txid():
raise Exception(f"trying to add tx to db with inconsistent txid: {tx_hash} != {tx.txid()}")
tx_we_already_have = self.transactions.get(tx_hash, None)
if tx_we_already_have is None or isinstance(tx_we_already_have, PartialTransaction):
self.transactions[tx_hash] = tx
@modifier
def remove_transaction(self, tx_hash: str) -> Optional[Transaction]:
assert isinstance(tx_hash, str)
return self.transactions.pop(tx_hash, None)
@locked
def get_transaction(self, tx_hash: Optional[str]) -> Optional[Transaction]:
if tx_hash is None:
return None
assert isinstance(tx_hash, str)
return self.transactions.get(tx_hash)
@locked
def list_transactions(self) -> Sequence[str]:
return list(self.transactions.keys())
@locked
def get_history(self) -> Sequence[str]:
return list(self.history.keys())
def is_addr_in_history(self, addr: str) -> bool:
# does not mean history is non-empty!
assert isinstance(addr, str)
return addr in self.history
@locked
def get_addr_history(self, addr: str) -> Sequence[Tuple[str, int]]:
assert isinstance(addr, str)
return self.history.get(addr, [])
@modifier
def set_addr_history(self, addr: str, hist) -> None:
assert isinstance(addr, str)
self.history[addr] = hist
@modifier
def remove_addr_history(self, addr: str) -> None:
assert isinstance(addr, str)
self.history.pop(addr, None)
@locked
def list_verified_tx(self) -> Sequence[str]:
return list(self.verified_tx.keys())
@locked
def get_verified_tx(self, txid: str) -> Optional[TxMinedInfo]:
assert isinstance(txid, str)
if txid not in self.verified_tx:
return None
height, timestamp, txpos, header_hash = self.verified_tx[txid]
return TxMinedInfo(height=height,
conf=None,
timestamp=timestamp,
txpos=txpos,
header_hash=header_hash)
@modifier
def add_verified_tx(self, txid: str, info: TxMinedInfo):
assert isinstance(txid, str)
assert isinstance(info, TxMinedInfo)
self.verified_tx[txid] = (info.height, info.timestamp, info.txpos, info.header_hash)
@modifier
def remove_verified_tx(self, txid: str):
assert isinstance(txid, str)
self.verified_tx.pop(txid, None)
def is_in_verified_tx(self, txid: str) -> bool:
assert isinstance(txid, str)
return txid in self.verified_tx
@modifier
def add_tx_fee_from_server(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
# note: when called with (fee_sat is None), rm currently saved value
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
tx_fees_value = self.tx_fees[txid]
if tx_fees_value.is_calculated_by_us:
return
self.tx_fees[txid] = tx_fees_value._replace(fee=fee_sat, is_calculated_by_us=False)
@modifier
def add_tx_fee_we_calculated(self, txid: str, fee_sat: Optional[int]) -> None:
assert isinstance(txid, str)
if fee_sat is None:
return
assert isinstance(fee_sat, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(fee=fee_sat, is_calculated_by_us=True)
@locked
def get_tx_fee(self, txid: str, *, trust_server: bool = False) -> Optional[int]:
assert isinstance(txid, str)
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
if not trust_server and not tx_fees_value.is_calculated_by_us:
return None
return tx_fees_value.fee
@modifier
def add_num_inputs_to_tx(self, txid: str, num_inputs: int) -> None:
assert isinstance(txid, str)
assert isinstance(num_inputs, int)
if txid not in self.tx_fees:
self.tx_fees[txid] = TxFeesValue()
self.tx_fees[txid] = self.tx_fees[txid]._replace(num_inputs=num_inputs)
@locked
def get_num_all_inputs_of_tx(self, txid: str) -> Optional[int]:
assert isinstance(txid, str)
tx_fees_value = self.tx_fees.get(txid)
if tx_fees_value is None:
return None
return tx_fees_value.num_inputs
@locked
def get_num_ismine_inputs_of_tx(self, txid: str) -> int:
assert isinstance(txid, str)
txins = self.txi.get(txid, {})
return sum([len(tupls) for addr, tupls in txins.items()])
@modifier
def remove_tx_fee(self, txid: str) -> None:
assert isinstance(txid, str)
self.tx_fees.pop(txid, None)
@locked
def get_dict(self, name):
# Warning: interacts un-intuitively with 'put': certain parts
# of 'data' will have pointers saved as separate variables.
if name not in self.data:
self.data[name] = {}
return self.data[name]
@locked
def num_change_addresses(self) -> int:
return len(self.change_addresses)
@locked
def num_receiving_addresses(self) -> int:
return len(self.receiving_addresses)
@locked
def get_change_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.change_addresses[slice_start:slice_stop]
@locked
def get_receiving_addresses(self, *, slice_start=None, slice_stop=None) -> List[str]:
# note: slicing makes a shallow copy
return self.receiving_addresses[slice_start:slice_stop]
@modifier
def add_change_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (1, len(self.change_addresses))
self.change_addresses.append(addr)
@modifier
def add_receiving_address(self, addr: str) -> None:
assert isinstance(addr, str)
self._addr_to_addr_index[addr] = (0, len(self.receiving_addresses))
self.receiving_addresses.append(addr)
@locked
def get_address_index(self, address: str) -> Optional[Sequence[int]]:
assert isinstance(address, str)
return self._addr_to_addr_index.get(address)
@modifier
def add_imported_address(self, addr: str, d: dict) -> None:
assert isinstance(addr, str)
self.imported_addresses[addr] = d
@modifier
def remove_imported_address(self, addr: str) -> None:
assert isinstance(addr, str)
self.imported_addresses.pop(addr)
@locked
def has_imported_address(self, addr: str) -> bool:
assert isinstance(addr, str)
return addr in self.imported_addresses
@locked
def get_imported_addresses(self) -> Sequence[str]:
return list(sorted(self.imported_addresses.keys()))
@locked
def get_imported_address(self, addr: str) -> Optional[dict]:
assert isinstance(addr, str)
return self.imported_addresses.get(addr)
def load_addresses(self, wallet_type):
if wallet_type == 'imported':
self.imported_addresses = self.get_dict('addresses') # type: Dict[str, dict]
else:
self.get_dict('addresses')
for name in ['receiving', 'change']:
if name not in self.data['addresses']:
self.data['addresses'][name] = []
self.change_addresses = self.data['addresses']['change']
self.receiving_addresses = self.data['addresses']['receiving']
self._addr_to_addr_index = {} # type: Dict[str, Sequence[int]] # key: address, value: (is_change, index)
for i, addr in enumerate(self.receiving_addresses):
self._addr_to_addr_index[addr] = (0, i)
for i, addr in enumerate(self.change_addresses):
self._addr_to_addr_index[addr] = (1, i)
@profiler
def _load_transactions(self):
self.data = StoredDict(self.data, self, [])
# references in self.data
# TODO make all these private
# txid -> address -> prev_outpoint -> value
self.txi = self.get_dict('txi') # type: Dict[str, Dict[str, Dict[str, int]]]
# txid -> address -> output_index -> (value, is_coinbase)
self.txo = self.get_dict('txo') # type: Dict[str, Dict[str, Dict[str, Tuple[int, bool]]]]
self.transactions = self.get_dict('transactions') # type: Dict[str, Transaction]
self.spent_outpoints = self.get_dict('spent_outpoints') # txid -> output_index -> next_txid
self.history = self.get_dict('addr_history') # address -> list of (txid, height)
self.verified_tx = self.get_dict('verified_tx3') # txid -> (height, timestamp, txpos, header_hash)
self.tx_fees = self.get_dict('tx_fees') # type: Dict[str, TxFeesValue]
# scripthash -> set of (outpoint, value)
self._prevouts_by_scripthash = self.get_dict('prevouts_by_scripthash') # type: Dict[str, Set[Tuple[str, int]]]
# remove unreferenced tx
for tx_hash in list(self.transactions.keys()):
if not self.get_txi_addresses(tx_hash) and not self.get_txo_addresses(tx_hash):
self.logger.info(f"removing unreferenced tx: {tx_hash}")
self.transactions.pop(tx_hash)
# remove unreferenced outpoints
for prevout_hash in self.spent_outpoints.keys():
d = self.spent_outpoints[prevout_hash]
for prevout_n, spending_txid in list(d.items()):
if spending_txid not in self.transactions:
self.logger.info("removing unreferenced spent outpoint")
d.pop(prevout_n)
self.tokens = self.get_dict('tokens')
# contract_addr + '_' + b58addr -> list(txid, height, log_index)
self.token_history = self.get_dict('addr_token_history')
# txid -> tx receipt
self.tx_receipt = self.get_dict('tx_receipt')
# txid -> raw tx
self.token_txs = self.get_dict('token_txs')
self.smart_contracts = self.get_dict('smart_contracts')
self.delegations = self.get_dict('delegations')
if self.token_history:
token_hist_txids = [x2[0] for x2 in reduce(lambda x1, y1: x1+y1, self.token_history.values())]
else:
token_hist_txids = []
for tx_hash, raw in self.token_txs.items():
if tx_hash in token_hist_txids:
tx = Transaction(raw)
self.token_txs[tx_hash] = tx
@modifier
def set_token(self, token: Token):
self.tokens[token.get_key()] = token
@modifier
def delete_token(self, key: str):
self.tokens.pop(key, None)
self.token_history.pop(key, None)
@locked
def get_token(self, key: str) -> Optional[Token]:
return Token(*self.tokens.get(key))
@locked
def list_tokens(self) -> list:
return list(self.tokens.keys())
@modifier
def set_token_history(self, key: str, hist: list):
self.token_history[key] = hist
@modifier
def delete_token_history(self, key: str):
self.token_history.pop(key, None)
@locked
def get_token_history(self, key: str) -> list:
return self.token_history.get(key, [])
@locked
def list_token_histories(self) -> list:
return list(self.token_history.keys())
@modifier
def set_token_tx(self, txid: str, raw: str):
self.token_txs[txid] = raw
@modifier
def delete_token_tx(self, txid: str):
self.token_txs.pop(txid, None)
@locked
def get_token_tx(self, txid: str):
return self.token_txs.get(txid)
@locked
def list_token_txs(self) -> list:
return list(self.token_txs.keys())
@modifier
def set_tx_receipt(self, txid: str, receipt: list):
self.tx_receipt[txid] = receipt
@modifier
def delete_tx_receipt(self, txid: str):
return self.tx_receipt.pop(txid, None)
@locked
def get_tx_receipt(self, txid: str) -> list:
return self.tx_receipt.get(txid, [])
@locked
def list_tx_receipts(self) -> list:
return list(self.tx_receipt.keys())
@modifier
def set_delegation(self, dele: Delegation):
self.delegations[dele.addr] = [dele.staker, dele.fee]
@modifier
def delete_delegation(self, addr: str):
self.delegations.pop(addr, None)
@locked
def get_delegation(self, addr: str) -> Optional[Delegation]:
dele = self.delegations.get(addr, [])
if len(dele) != 2:
return None
return Delegation(addr=addr, staker=dele[0], fee=dele[1])
@locked
def list_delegations(self) -> Sequence[str]:
return list(self.delegations.keys())
@modifier
def clear_history(self):
self.txi.clear()
self.txo.clear()
self.spent_outpoints.clear()
self.transactions.clear()
self.history.clear()
self.verified_tx.clear()
self.tx_fees.clear()
self.token_txs.clear()
self.token_history.clear()
self.tx_receipt.clear()
self._prevouts_by_scripthash.clear()
def _convert_dict(self, path, key, v):
if key == 'transactions':
# note: for performance, "deserialize=False" so that we will deserialize these on-demand
v = dict((k, tx_from_any(x, deserialize=False)) for k, x in v.items())
if key == 'invoices':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
if key == 'payment_requests':
v = dict((k, Invoice.from_json(x)) for k, x in v.items())
elif key == 'adds':
v = dict((k, UpdateAddHtlc.from_tuple(*x)) for k, x in v.items())
elif key == 'fee_updates':
v = dict((k, FeeUpdate(**x)) for k, x in v.items())
elif key == 'submarine_swaps':
v = dict((k, SwapData(**x)) for k, x in v.items())
elif key == 'channel_backups':
v = dict((k, ChannelBackupStorage(**x)) for k, x in v.items())
elif key == 'tx_fees':
v = dict((k, TxFeesValue(*x)) for k, x in v.items())
elif key == 'prevouts_by_scripthash':
v = dict((k, {(prevout, value) for (prevout, value) in x}) for k, x in v.items())
elif key == 'buckets':
v = dict((k, ShachainElement(bfh(x[0]), int(x[1]))) for k, x in v.items())
elif key == 'data_loss_protect_remote_pcp':
v = dict((k, bfh(x)) for k, x in v.items())
return v
def _convert_value(self, path, key, v):
if key == 'local_config':
v = LocalConfig(**v)
elif key == 'remote_config':
v = RemoteConfig(**v)
elif key == 'constraints':
v = ChannelConstraints(**v)
elif key == 'funding_outpoint':
v = Outpoint(**v)
return v
def write(self, storage: 'WalletStorage'):
with self.lock:
self._write(storage)
def _write(self, storage: 'WalletStorage'):
if threading.currentThread().isDaemon():
self.logger.warning('daemon thread cannot write db')
return
if not self.modified():
return
storage.write(self.dump())
self.set_modified(False)
def is_ready_to_be_used_by_wallet(self):
return not self.requires_upgrade() and self._called_after_upgrade_tasks
def split_accounts(self, root_path):
from .storage import WalletStorage
out = []
result = self.get_split_accounts()
for data in result:
path = root_path + '.' + data['suffix']
storage = WalletStorage(path)
db = WalletDB(json.dumps(data), manual_upgrades=False)
db._called_after_upgrade_tasks = False
db.upgrade()
db.write(storage)
out.append(path)
return out
def get_action(self):
action = run_hook('get_action', self)
return action
def load_plugins(self):
wallet_type = self.get('wallet_type')
if wallet_type in plugin_loaders:
plugin_loaders[wallet_type]()
def set_keystore_encryption(self, enable):
self.put('use_encryption', enable)
| true | true |
f7319439ecbf5ae98691f76abcedaa8bb60c23b1 | 4,513 | py | Python | lib/bindings/samples/server/API/social_api.py | tlalexander/stitchEm | cdff821ad2c500703e6cb237ec61139fce7bf11c | [
"MIT"
] | 182 | 2019-04-19T12:38:30.000Z | 2022-03-20T16:48:20.000Z | lib/bindings/samples/server/API/social_api.py | doymcc/stitchEm | 20693a55fa522d7a196b92635e7a82df9917c2e2 | [
"MIT"
] | 107 | 2019-04-23T10:49:35.000Z | 2022-03-02T18:12:28.000Z | lib/bindings/samples/server/API/social_api.py | doymcc/stitchEm | 20693a55fa522d7a196b92635e7a82df9917c2e2 | [
"MIT"
] | 59 | 2019-06-04T11:27:25.000Z | 2022-03-17T23:49:49.000Z | from concurrent.futures import ThreadPoolExecutor
from tornado.concurrent import run_on_executor
from API.handlers import APIHandler
from API.schema import api
import errors
from social.social_factory import SOCIAL_NETWORKS
class SocialAPI(APIHandler):
"""REST interface related to social networks
"""
executor = ThreadPoolExecutor(1)
def __init__(self, extra):
"""Init
"""
super(SocialAPI, self).__init__(extra)
"""
self.server = extra["server"]
self.project_manager = extra["project_manager"]
self.output_manager = extra["output_manager"]
"""
@api(name="MakeLink",
endpoint="social.make_link",
description="Link the box to a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"token": {
"type": "string"
}
},
"required": ["social_network", "token"]
}
)
@run_on_executor
def make_link(self, parameters):
social_network_name = parameters.get("social_network")
token = parameters.get("token")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Disconnected():
raise errors.InvalidParameter("cannot link to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_make_connection(token)
@api(name="MakeLink",
endpoint="social.unmake_link",
description="Remove the link between the box and a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
},
"required": ["social_network"]
}
)
@run_on_executor
def unmake_link(self, parameters):
social_network_name = parameters.get("social_network")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot unlink from social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_remove_connection()
@api(name="Call",
endpoint="social.call",
description="Make a call to a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"endpoint" : {
"type": "string"
},
"parameters" : {
"type": "object"
}
},
"required": ["social_network", "endpoint"]
},
result={
"type": "object"
}
)
@run_on_executor
def call(self, parameters):
social_network_name = parameters.get("social_network")
endpoint = parameters.get("endpoint")
call_parameters = parameters.get("parameters")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot make API call to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
result = SOCIAL_NETWORKS[social_network_name].call_api(endpoint, call_parameters)
return result if result is not None else {} | 39.587719 | 114 | 0.561489 | from concurrent.futures import ThreadPoolExecutor
from tornado.concurrent import run_on_executor
from API.handlers import APIHandler
from API.schema import api
import errors
from social.social_factory import SOCIAL_NETWORKS
class SocialAPI(APIHandler):
executor = ThreadPoolExecutor(1)
def __init__(self, extra):
super(SocialAPI, self).__init__(extra)
@api(name="MakeLink",
endpoint="social.make_link",
description="Link the box to a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"token": {
"type": "string"
}
},
"required": ["social_network", "token"]
}
)
@run_on_executor
def make_link(self, parameters):
social_network_name = parameters.get("social_network")
token = parameters.get("token")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Disconnected():
raise errors.InvalidParameter("cannot link to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_make_connection(token)
@api(name="MakeLink",
endpoint="social.unmake_link",
description="Remove the link between the box and a user account on a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
},
"required": ["social_network"]
}
)
@run_on_executor
def unmake_link(self, parameters):
social_network_name = parameters.get("social_network")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot unlink from social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
SOCIAL_NETWORKS[social_network_name].t_remove_connection()
@api(name="Call",
endpoint="social.call",
description="Make a call to a social network",
parameters={
"type": "object",
"properties":
{
"social_network": {
"type": "string",
"enum": ["facebook"]
},
"endpoint" : {
"type": "string"
},
"parameters" : {
"type": "object"
}
},
"required": ["social_network", "endpoint"]
},
result={
"type": "object"
}
)
@run_on_executor
def call(self, parameters):
social_network_name = parameters.get("social_network")
endpoint = parameters.get("endpoint")
call_parameters = parameters.get("parameters")
if social_network_name not in SOCIAL_NETWORKS:
raise errors.InvalidParameter("the social network {} is not implemented".format(social_network_name))
if not SOCIAL_NETWORKS[social_network_name].is_Connected():
raise errors.InvalidParameter("cannot make API call to social network {} as it is in state {}".format(
social_network_name, SOCIAL_NETWORKS[social_network_name].state))
result = SOCIAL_NETWORKS[social_network_name].call_api(endpoint, call_parameters)
return result if result is not None else {} | true | true |
f7319447be1c583c56c689293955c02433bab7f1 | 4,941 | py | Python | docs/source/conf.py | Geofroy/meerkat_dev | ebff9d16b3edc6efdc580f940ca8d60e733d9da1 | [
"MIT"
] | null | null | null | docs/source/conf.py | Geofroy/meerkat_dev | ebff9d16b3edc6efdc580f940ca8d60e733d9da1 | [
"MIT"
] | 4 | 2017-11-06T13:11:03.000Z | 2018-05-04T14:57:18.000Z | docs/source/conf.py | meerkat-code/meerkat_dev | c3f4bef39a72aa99a460ed1a0022c7ea594e037c | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/stable/config
import os
import sys
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('../../'))
# -- Project information -----------------------------------------------------
project = 'meerkat_dev'
copyright = '2018, Meerkat Developers'
author = 'Jonathan Berry'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = ''
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.autohttp.flask',
'sphinxcontrib.napoleon'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path .
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'meerkat_authdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'meerkat_dev.tex', 'meerkat\\_dev Documentation',
'Jonathan Berry', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
author, 'meerkat_dev', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| 30.5 | 79 | 0.65088 |
import os
import sys
project = 'meerkat_dev'
copyright = '2018, Meerkat Developers'
author = 'Jonathan Berry'
version = ''
release = ''
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.autohttp.flask',
'sphinxcontrib.napoleon'
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_static_path = ['_static']
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'meerkat_authdoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'meerkat_dev.tex', 'meerkat\\_dev Documentation',
'Jonathan Berry', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'meerkat_dev', 'meerkat_dev Documentation',
author, 'meerkat_dev', 'One line description of project.',
'Miscellaneous'),
]
# -- Extension configuration -------------------------------------------------
| true | true |
f731958aa03e65b62779fd052badafaa8d11eb1d | 8,117 | py | Python | 07_train/privacy/tensorflow_privacy/privacy/membership_inference_attack/membership_inference_attack.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 2,327 | 2020-03-01T09:47:34.000Z | 2021-11-25T12:38:42.000Z | 07_train/privacy/tensorflow_privacy/privacy/membership_inference_attack/membership_inference_attack.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 209 | 2020-03-01T17:14:12.000Z | 2021-11-08T20:35:42.000Z | 07_train/privacy/tensorflow_privacy/privacy/membership_inference_attack/membership_inference_attack.py | ichen20/oreilly_book | 8098d8096d9decca6aa5afbb267b9f05ce0570f2 | [
"Apache-2.0"
] | 686 | 2020-03-03T17:24:51.000Z | 2021-11-25T23:39:12.000Z | # Copyright 2020, The TensorFlow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Code that runs membership inference attacks based on the model outputs.
This file belongs to the new API for membership inference attacks. This file
will be renamed to membership_inference_attack.py after the old API is removed.
"""
from typing import Iterable
import numpy as np
from sklearn import metrics
from tensorflow_privacy.privacy.membership_inference_attack import models
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackInputData
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackResults
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackType
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import \
PrivacyReportMetadata
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import RocCurve
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleAttackResult
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleSliceSpec
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SlicingSpec
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_single_slice_specs
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_slice
def _get_slice_spec(data: AttackInputData) -> SingleSliceSpec:
if hasattr(data, 'slice_spec'):
return data.slice_spec
return SingleSliceSpec()
def _run_trained_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
"""Classification attack done by ML models."""
attacker = None
if attack_type == AttackType.LOGISTIC_REGRESSION:
attacker = models.LogisticRegressionAttacker()
elif attack_type == AttackType.MULTI_LAYERED_PERCEPTRON:
attacker = models.MultilayerPerceptronAttacker()
elif attack_type == AttackType.RANDOM_FOREST:
attacker = models.RandomForestAttacker()
elif attack_type == AttackType.K_NEAREST_NEIGHBORS:
attacker = models.KNearestNeighborsAttacker()
else:
raise NotImplementedError('Attack type %s not implemented yet.' %
attack_type)
prepared_attacker_data = models.create_attacker_data(
attack_input, balance=balance_attacker_training)
attacker.train_model(prepared_attacker_data.features_train,
prepared_attacker_data.is_training_labels_train)
# Run the attacker on (permuted) test examples.
predictions_test = attacker.predict(prepared_attacker_data.features_test)
# Generate ROC curves with predictions.
fpr, tpr, thresholds = metrics.roc_curve(
prepared_attacker_data.is_training_labels_test, predictions_test)
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=attack_type,
roc_curve=roc_curve)
def _run_threshold_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_loss_train(), attack_input.get_loss_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ATTACK,
roc_curve=roc_curve)
def _run_threshold_entropy_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_entropy_train(), attack_input.get_entropy_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ENTROPY_ATTACK,
roc_curve=roc_curve)
def _run_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
attack_input.validate()
if attack_type.is_trained_attack:
return _run_trained_attack(attack_input, attack_type,
balance_attacker_training)
if attack_type == AttackType.THRESHOLD_ENTROPY_ATTACK:
return _run_threshold_entropy_attack(attack_input)
return _run_threshold_attack(attack_input)
def run_attacks(attack_input: AttackInputData,
slicing_spec: SlicingSpec = None,
attack_types: Iterable[AttackType] = (
AttackType.THRESHOLD_ATTACK,),
privacy_report_metadata: PrivacyReportMetadata = None,
balance_attacker_training: bool = True) -> AttackResults:
"""Runs membership inference attacks on a classification model.
It runs attacks specified by attack_types on each attack_input slice which is
specified by slicing_spec.
Args:
attack_input: input data for running an attack
slicing_spec: specifies attack_input slices to run attack on
attack_types: attacks to run
privacy_report_metadata: the metadata of the model under attack.
balance_attacker_training: Whether the training and test sets for the
membership inference attacker should have a balanced (roughly equal)
number of samples from the training and test sets used to develop
the model under attack.
Returns:
the attack result.
"""
attack_input.validate()
attack_results = []
if slicing_spec is None:
slicing_spec = SlicingSpec(entire_dataset=True)
input_slice_specs = get_single_slice_specs(slicing_spec,
attack_input.num_classes)
for single_slice_spec in input_slice_specs:
attack_input_slice = get_slice(attack_input, single_slice_spec)
for attack_type in attack_types:
attack_results.append(
_run_attack(attack_input_slice, attack_type,
balance_attacker_training))
privacy_report_metadata = _compute_missing_privacy_report_metadata(
privacy_report_metadata, attack_input)
return AttackResults(
single_attack_results=attack_results,
privacy_report_metadata=privacy_report_metadata)
def _compute_missing_privacy_report_metadata(
metadata: PrivacyReportMetadata,
attack_input: AttackInputData) -> PrivacyReportMetadata:
"""Populates metadata fields if they are missing."""
if metadata is None:
metadata = PrivacyReportMetadata()
if metadata.accuracy_train is None:
metadata.accuracy_train = _get_accuracy(attack_input.logits_train,
attack_input.labels_train)
if metadata.accuracy_test is None:
metadata.accuracy_test = _get_accuracy(attack_input.logits_test,
attack_input.labels_test)
if metadata.loss_train is None:
metadata.loss_train = np.average(attack_input.get_loss_train())
if metadata.loss_test is None:
metadata.loss_test = np.average(attack_input.get_loss_test())
return metadata
def _get_accuracy(logits, labels):
"""Computes the accuracy if it is missing."""
if logits is None or labels is None:
return None
return metrics.accuracy_score(labels, np.argmax(logits, axis=1))
| 41.203046 | 105 | 0.756684 |
from typing import Iterable
import numpy as np
from sklearn import metrics
from tensorflow_privacy.privacy.membership_inference_attack import models
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackInputData
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackResults
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import AttackType
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import \
PrivacyReportMetadata
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import RocCurve
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleAttackResult
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SingleSliceSpec
from tensorflow_privacy.privacy.membership_inference_attack.data_structures import SlicingSpec
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_single_slice_specs
from tensorflow_privacy.privacy.membership_inference_attack.dataset_slicing import get_slice
def _get_slice_spec(data: AttackInputData) -> SingleSliceSpec:
if hasattr(data, 'slice_spec'):
return data.slice_spec
return SingleSliceSpec()
def _run_trained_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
attacker = None
if attack_type == AttackType.LOGISTIC_REGRESSION:
attacker = models.LogisticRegressionAttacker()
elif attack_type == AttackType.MULTI_LAYERED_PERCEPTRON:
attacker = models.MultilayerPerceptronAttacker()
elif attack_type == AttackType.RANDOM_FOREST:
attacker = models.RandomForestAttacker()
elif attack_type == AttackType.K_NEAREST_NEIGHBORS:
attacker = models.KNearestNeighborsAttacker()
else:
raise NotImplementedError('Attack type %s not implemented yet.' %
attack_type)
prepared_attacker_data = models.create_attacker_data(
attack_input, balance=balance_attacker_training)
attacker.train_model(prepared_attacker_data.features_train,
prepared_attacker_data.is_training_labels_train)
predictions_test = attacker.predict(prepared_attacker_data.features_test)
fpr, tpr, thresholds = metrics.roc_curve(
prepared_attacker_data.is_training_labels_test, predictions_test)
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=attack_type,
roc_curve=roc_curve)
def _run_threshold_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_loss_train(), attack_input.get_loss_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ATTACK,
roc_curve=roc_curve)
def _run_threshold_entropy_attack(attack_input: AttackInputData):
fpr, tpr, thresholds = metrics.roc_curve(
np.concatenate((np.zeros(attack_input.get_train_size()),
np.ones(attack_input.get_test_size()))),
np.concatenate(
(attack_input.get_entropy_train(), attack_input.get_entropy_test())))
roc_curve = RocCurve(tpr=tpr, fpr=fpr, thresholds=thresholds)
return SingleAttackResult(
slice_spec=_get_slice_spec(attack_input),
attack_type=AttackType.THRESHOLD_ENTROPY_ATTACK,
roc_curve=roc_curve)
def _run_attack(attack_input: AttackInputData,
attack_type: AttackType,
balance_attacker_training: bool = True):
attack_input.validate()
if attack_type.is_trained_attack:
return _run_trained_attack(attack_input, attack_type,
balance_attacker_training)
if attack_type == AttackType.THRESHOLD_ENTROPY_ATTACK:
return _run_threshold_entropy_attack(attack_input)
return _run_threshold_attack(attack_input)
def run_attacks(attack_input: AttackInputData,
slicing_spec: SlicingSpec = None,
attack_types: Iterable[AttackType] = (
AttackType.THRESHOLD_ATTACK,),
privacy_report_metadata: PrivacyReportMetadata = None,
balance_attacker_training: bool = True) -> AttackResults:
attack_input.validate()
attack_results = []
if slicing_spec is None:
slicing_spec = SlicingSpec(entire_dataset=True)
input_slice_specs = get_single_slice_specs(slicing_spec,
attack_input.num_classes)
for single_slice_spec in input_slice_specs:
attack_input_slice = get_slice(attack_input, single_slice_spec)
for attack_type in attack_types:
attack_results.append(
_run_attack(attack_input_slice, attack_type,
balance_attacker_training))
privacy_report_metadata = _compute_missing_privacy_report_metadata(
privacy_report_metadata, attack_input)
return AttackResults(
single_attack_results=attack_results,
privacy_report_metadata=privacy_report_metadata)
def _compute_missing_privacy_report_metadata(
metadata: PrivacyReportMetadata,
attack_input: AttackInputData) -> PrivacyReportMetadata:
if metadata is None:
metadata = PrivacyReportMetadata()
if metadata.accuracy_train is None:
metadata.accuracy_train = _get_accuracy(attack_input.logits_train,
attack_input.labels_train)
if metadata.accuracy_test is None:
metadata.accuracy_test = _get_accuracy(attack_input.logits_test,
attack_input.labels_test)
if metadata.loss_train is None:
metadata.loss_train = np.average(attack_input.get_loss_train())
if metadata.loss_test is None:
metadata.loss_test = np.average(attack_input.get_loss_test())
return metadata
def _get_accuracy(logits, labels):
if logits is None or labels is None:
return None
return metrics.accuracy_score(labels, np.argmax(logits, axis=1))
| true | true |
f73196366aa14046b85680925a5d5d15177b1086 | 1,779 | py | Python | chainer/snap2model.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | null | null | null | chainer/snap2model.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | null | null | null | chainer/snap2model.py | fujibo/poseHG | e582a6ca1badc9a894b8b7e2a5e0acf9eb348c5c | [
"BSD-3-Clause"
] | 1 | 2018-06-30T07:07:25.000Z | 2018-06-30T07:07:25.000Z | import numpy as np
import tempfile
def snap2model_parser(path_snapshot, path_model=None):
"""convert snapshot to model
:param path_snapshot: str
:param path_model: str, default None
:return: file descriptor (path_model is None) or None (otherwise)
"""
snapshot = np.load(path_snapshot)
model = dict()
for key in snapshot.keys():
parse = key.split('/')
if parse[0] == 'updater' and parse[1] == 'optimizer:main':
if parse[2] == 'model':
model_key = '/'.join(parse[3:-1])
model[model_key] = snapshot[key]
if path_model is None:
outfile = tempfile.TemporaryFile()
np.savez(outfile, **model)
outfile.seek(0)
return outfile
else:
np.savez(path_model, **model)
return None
def snap2model_trainer(path_snapshot, path_model=None):
import chainer
from dataset import MPIIDataset
from train import TrainChain
from net import StackedHG
train_data = MPIIDataset(split='train')
model = StackedHG(16)
train_chain = TrainChain(model)
optimizer = chainer.optimizers.RMSprop(lr=2.5e-4)
optimizer.setup(train_chain)
# original batch size 6
train_iter = chainer.iterators.SerialIterator(train_data, 1, repeat=True, shuffle=True)
updater = chainer.training.StandardUpdater(train_iter, optimizer, device=-1)
trainer = chainer.training.Trainer(updater, (100, 'epoch'), out='')
chainer.serializers.load_npz(path_snapshot, trainer)
if path_model is None:
outfile = tempfile.TemporaryFile()
chainer.serializers.save_npz(outfile, model)
outfile.seek(0)
return outfile
else:
chainer.serializers.save_npz(path_model, model)
return None
| 29.65 | 91 | 0.658797 | import numpy as np
import tempfile
def snap2model_parser(path_snapshot, path_model=None):
snapshot = np.load(path_snapshot)
model = dict()
for key in snapshot.keys():
parse = key.split('/')
if parse[0] == 'updater' and parse[1] == 'optimizer:main':
if parse[2] == 'model':
model_key = '/'.join(parse[3:-1])
model[model_key] = snapshot[key]
if path_model is None:
outfile = tempfile.TemporaryFile()
np.savez(outfile, **model)
outfile.seek(0)
return outfile
else:
np.savez(path_model, **model)
return None
def snap2model_trainer(path_snapshot, path_model=None):
import chainer
from dataset import MPIIDataset
from train import TrainChain
from net import StackedHG
train_data = MPIIDataset(split='train')
model = StackedHG(16)
train_chain = TrainChain(model)
optimizer = chainer.optimizers.RMSprop(lr=2.5e-4)
optimizer.setup(train_chain)
train_iter = chainer.iterators.SerialIterator(train_data, 1, repeat=True, shuffle=True)
updater = chainer.training.StandardUpdater(train_iter, optimizer, device=-1)
trainer = chainer.training.Trainer(updater, (100, 'epoch'), out='')
chainer.serializers.load_npz(path_snapshot, trainer)
if path_model is None:
outfile = tempfile.TemporaryFile()
chainer.serializers.save_npz(outfile, model)
outfile.seek(0)
return outfile
else:
chainer.serializers.save_npz(path_model, model)
return None
| true | true |
f73196906eb86fee01bdd6f96402a0ac464b613b | 1,524 | py | Python | symengine/__init__.py | Meldanya/symengine.py | ec3ce7aef387878abd10a3991e4c3610c98cc70d | [
"MIT"
] | null | null | null | symengine/__init__.py | Meldanya/symengine.py | ec3ce7aef387878abd10a3991e4c3610c98cc70d | [
"MIT"
] | 1 | 2015-10-17T13:22:48.000Z | 2015-10-18T11:43:13.000Z | symengine/__init__.py | Meldanya/symengine.py | ec3ce7aef387878abd10a3991e4c3610c98cc70d | [
"MIT"
] | null | null | null | from .lib.symengine_wrapper import (
have_mpfr, have_mpc, have_flint, have_piranha, have_llvm,
I, E, pi, oo, zoo, nan, Symbol, Dummy, S, sympify, SympifyError,
Integer, Rational, Float, Number, RealNumber, RealDouble, ComplexDouble,
add, Add, Mul, Pow, function_symbol,
Max, Min, DenseMatrix, Matrix,
ImmutableMatrix, ImmutableDenseMatrix, MutableDenseMatrix,
MatrixBase, Basic, DictBasic, symarray, series, diff, zeros,
eye, diag, ones, Derivative, Subs, expand, has_symbol,
UndefFunction, Function, latex,
have_numpy, true, false, Equality, Unequality, GreaterThan,
LessThan, StrictGreaterThan, StrictLessThan, Eq, Ne, Ge, Le,
Gt, Lt, And, Or, Not, Nand, Nor, Xor, Xnor, perfect_power, integer_nthroot,
isprime, sqrt_mod, Expr, cse, count_ops, ccode, Piecewise, Contains, Interval, FiniteSet,
FunctionSymbol as AppliedUndef,
golden_ratio as GoldenRatio,
catalan as Catalan,
eulergamma as EulerGamma
)
from .utilities import var, symbols
from .functions import *
from .printing import init_printing
if have_mpfr:
from .lib.symengine_wrapper import RealMPFR
if have_mpc:
from .lib.symengine_wrapper import ComplexMPC
if have_numpy:
from .lib.symengine_wrapper import (Lambdify, LambdifyCSE)
def lambdify(args, exprs, **kwargs):
return Lambdify(args, *exprs, **kwargs)
__version__ = "0.4.0"
def test():
import pytest
import os
return not pytest.cmdline.main(
[os.path.dirname(os.path.abspath(__file__))])
| 33.866667 | 93 | 0.723097 | from .lib.symengine_wrapper import (
have_mpfr, have_mpc, have_flint, have_piranha, have_llvm,
I, E, pi, oo, zoo, nan, Symbol, Dummy, S, sympify, SympifyError,
Integer, Rational, Float, Number, RealNumber, RealDouble, ComplexDouble,
add, Add, Mul, Pow, function_symbol,
Max, Min, DenseMatrix, Matrix,
ImmutableMatrix, ImmutableDenseMatrix, MutableDenseMatrix,
MatrixBase, Basic, DictBasic, symarray, series, diff, zeros,
eye, diag, ones, Derivative, Subs, expand, has_symbol,
UndefFunction, Function, latex,
have_numpy, true, false, Equality, Unequality, GreaterThan,
LessThan, StrictGreaterThan, StrictLessThan, Eq, Ne, Ge, Le,
Gt, Lt, And, Or, Not, Nand, Nor, Xor, Xnor, perfect_power, integer_nthroot,
isprime, sqrt_mod, Expr, cse, count_ops, ccode, Piecewise, Contains, Interval, FiniteSet,
FunctionSymbol as AppliedUndef,
golden_ratio as GoldenRatio,
catalan as Catalan,
eulergamma as EulerGamma
)
from .utilities import var, symbols
from .functions import *
from .printing import init_printing
if have_mpfr:
from .lib.symengine_wrapper import RealMPFR
if have_mpc:
from .lib.symengine_wrapper import ComplexMPC
if have_numpy:
from .lib.symengine_wrapper import (Lambdify, LambdifyCSE)
def lambdify(args, exprs, **kwargs):
return Lambdify(args, *exprs, **kwargs)
__version__ = "0.4.0"
def test():
import pytest
import os
return not pytest.cmdline.main(
[os.path.dirname(os.path.abspath(__file__))])
| true | true |
f731975854283587ddab9488f9c9c75d42aa8ed2 | 7,107 | py | Python | databricks/koalas/missing/series.py | dennyglee/koalas | 9781c1ece43b1fa2dce78a0813602e03db0b1fa3 | [
"Apache-2.0"
] | 1 | 2019-05-29T05:46:19.000Z | 2019-05-29T05:46:19.000Z | databricks/koalas/missing/series.py | dennyglee/koalas | 9781c1ece43b1fa2dce78a0813602e03db0b1fa3 | [
"Apache-2.0"
] | null | null | null | databricks/koalas/missing/series.py | dennyglee/koalas | 9781c1ece43b1fa2dce78a0813602e03db0b1fa3 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (C) 2019 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from databricks.koalas.missing import _unsupported_function, _unsupported_property, common
def unsupported_function(method_name, deprecated=False, reason=""):
return _unsupported_function(class_name='pd.Series', method_name=method_name,
deprecated=deprecated, reason=reason)
def unsupported_property(property_name, deprecated=False, reason=""):
return _unsupported_property(class_name='pd.Series', property_name=property_name,
deprecated=deprecated, reason=reason)
class _MissingPandasLikeSeries(object):
# Properties
axes = unsupported_property('axes')
iat = unsupported_property('iat')
# Deprecated properties
blocks = unsupported_property('blocks', deprecated=True)
ftypes = unsupported_property('ftypes', deprecated=True)
ftype = unsupported_property('ftype', deprecated=True)
is_copy = unsupported_property('is_copy', deprecated=True)
ix = unsupported_property('ix', deprecated=True)
asobject = unsupported_property('asobject', deprecated=True)
strides = unsupported_property('strides', deprecated=True)
imag = unsupported_property('imag', deprecated=True)
itemsize = unsupported_property('itemsize', deprecated=True)
data = unsupported_property('data', deprecated=True)
base = unsupported_property('base', deprecated=True)
flags = unsupported_property('flags', deprecated=True)
# Functions
align = unsupported_function('align')
argsort = unsupported_function('argsort')
asfreq = unsupported_function('asfreq')
asof = unsupported_function('asof')
at_time = unsupported_function('at_time')
autocorr = unsupported_function('autocorr')
between_time = unsupported_function('between_time')
bfill = unsupported_function('bfill')
combine = unsupported_function('combine')
combine_first = unsupported_function('combine_first')
cov = unsupported_function('cov')
divmod = unsupported_function('divmod')
dot = unsupported_function('dot')
droplevel = unsupported_function('droplevel')
duplicated = unsupported_function('duplicated')
ewm = unsupported_function('ewm')
factorize = unsupported_function('factorize')
ffill = unsupported_function('ffill')
filter = unsupported_function('filter')
first = unsupported_function('first')
get = unsupported_function('get')
infer_objects = unsupported_function('infer_objects')
interpolate = unsupported_function('interpolate')
items = unsupported_function('items')
iteritems = unsupported_function('iteritems')
last = unsupported_function('last')
last_valid_index = unsupported_function('last_valid_index')
mad = unsupported_function('mad')
pct_change = unsupported_function('pct_change')
prod = unsupported_function('prod')
product = unsupported_function('product')
ravel = unsupported_function('ravel')
rdivmod = unsupported_function('rdivmod')
reindex = unsupported_function('reindex')
reindex_like = unsupported_function('reindex_like')
rename_axis = unsupported_function('rename_axis')
reorder_levels = unsupported_function('reorder_levels')
repeat = unsupported_function('repeat')
resample = unsupported_function('resample')
searchsorted = unsupported_function('searchsorted')
sem = unsupported_function('sem')
set_axis = unsupported_function('set_axis')
slice_shift = unsupported_function('slice_shift')
squeeze = unsupported_function('squeeze')
swapaxes = unsupported_function('swapaxes')
swaplevel = unsupported_function('swaplevel')
tail = unsupported_function('tail')
take = unsupported_function('take')
to_hdf = unsupported_function('to_hdf')
to_period = unsupported_function('to_period')
to_sql = unsupported_function('to_sql')
to_timestamp = unsupported_function('to_timestamp')
tshift = unsupported_function('tshift')
tz_convert = unsupported_function('tz_convert')
tz_localize = unsupported_function('tz_localize')
unstack = unsupported_function('unstack')
view = unsupported_function('view')
# Deprecated functions
as_blocks = unsupported_function('as_blocks', deprecated=True)
as_matrix = unsupported_function('as_matrix', deprecated=True)
clip_lower = unsupported_function('clip_lower', deprecated=True)
clip_upper = unsupported_function('clip_upper', deprecated=True)
compress = unsupported_function('compress', deprecated=True)
convert_objects = unsupported_function('convert_objects', deprecated=True)
get_ftype_counts = unsupported_function('get_ftype_counts', deprecated=True)
get_value = unsupported_function('get_value', deprecated=True)
nonzero = unsupported_function('nonzero', deprecated=True)
reindex_axis = unsupported_function('reindex_axis', deprecated=True)
select = unsupported_function('select', deprecated=True)
set_value = unsupported_function('set_value', deprecated=True)
valid = unsupported_function('valid', deprecated=True)
get_values = unsupported_function('get_values', deprecated=True)
to_dense = unsupported_function('to_dense', deprecated=True)
to_sparse = unsupported_function('to_sparse', deprecated=True)
to_msgpack = unsupported_function('to_msgpack', deprecated=True)
compound = unsupported_function('compound', deprecated=True)
put = unsupported_function('put', deprecated=True)
item = unsupported_function('item', deprecated=True)
ptp = unsupported_function('ptp', deprecated=True)
argmax = unsupported_function('argmax', deprecated=True)
argmin = unsupported_function('argmin', deprecated=True)
# Properties we won't support.
values = common.values(unsupported_property)
array = common.array(unsupported_property)
real = unsupported_property(
'real',
reason="If you want to collect your data as an NumPy array, use 'to_numpy()' instead.")
nbytes = unsupported_property(
'nbytes',
reason="'nbytes' requires to compute whole dataset. You can calculate manually it, "
"with its 'itemsize', by explicitly executing its count. Use Spark's web UI "
"to monitor disk and memory usage of your application in general.")
# Functions we won't support.
memory_usage = common.memory_usage(unsupported_function)
to_pickle = common.to_pickle(unsupported_function)
to_xarray = common.to_xarray(unsupported_function)
__iter__ = common.__iter__(unsupported_function)
| 47.066225 | 95 | 0.74307 |
from databricks.koalas.missing import _unsupported_function, _unsupported_property, common
def unsupported_function(method_name, deprecated=False, reason=""):
return _unsupported_function(class_name='pd.Series', method_name=method_name,
deprecated=deprecated, reason=reason)
def unsupported_property(property_name, deprecated=False, reason=""):
return _unsupported_property(class_name='pd.Series', property_name=property_name,
deprecated=deprecated, reason=reason)
class _MissingPandasLikeSeries(object):
axes = unsupported_property('axes')
iat = unsupported_property('iat')
blocks = unsupported_property('blocks', deprecated=True)
ftypes = unsupported_property('ftypes', deprecated=True)
ftype = unsupported_property('ftype', deprecated=True)
is_copy = unsupported_property('is_copy', deprecated=True)
ix = unsupported_property('ix', deprecated=True)
asobject = unsupported_property('asobject', deprecated=True)
strides = unsupported_property('strides', deprecated=True)
imag = unsupported_property('imag', deprecated=True)
itemsize = unsupported_property('itemsize', deprecated=True)
data = unsupported_property('data', deprecated=True)
base = unsupported_property('base', deprecated=True)
flags = unsupported_property('flags', deprecated=True)
align = unsupported_function('align')
argsort = unsupported_function('argsort')
asfreq = unsupported_function('asfreq')
asof = unsupported_function('asof')
at_time = unsupported_function('at_time')
autocorr = unsupported_function('autocorr')
between_time = unsupported_function('between_time')
bfill = unsupported_function('bfill')
combine = unsupported_function('combine')
combine_first = unsupported_function('combine_first')
cov = unsupported_function('cov')
divmod = unsupported_function('divmod')
dot = unsupported_function('dot')
droplevel = unsupported_function('droplevel')
duplicated = unsupported_function('duplicated')
ewm = unsupported_function('ewm')
factorize = unsupported_function('factorize')
ffill = unsupported_function('ffill')
filter = unsupported_function('filter')
first = unsupported_function('first')
get = unsupported_function('get')
infer_objects = unsupported_function('infer_objects')
interpolate = unsupported_function('interpolate')
items = unsupported_function('items')
iteritems = unsupported_function('iteritems')
last = unsupported_function('last')
last_valid_index = unsupported_function('last_valid_index')
mad = unsupported_function('mad')
pct_change = unsupported_function('pct_change')
prod = unsupported_function('prod')
product = unsupported_function('product')
ravel = unsupported_function('ravel')
rdivmod = unsupported_function('rdivmod')
reindex = unsupported_function('reindex')
reindex_like = unsupported_function('reindex_like')
rename_axis = unsupported_function('rename_axis')
reorder_levels = unsupported_function('reorder_levels')
repeat = unsupported_function('repeat')
resample = unsupported_function('resample')
searchsorted = unsupported_function('searchsorted')
sem = unsupported_function('sem')
set_axis = unsupported_function('set_axis')
slice_shift = unsupported_function('slice_shift')
squeeze = unsupported_function('squeeze')
swapaxes = unsupported_function('swapaxes')
swaplevel = unsupported_function('swaplevel')
tail = unsupported_function('tail')
take = unsupported_function('take')
to_hdf = unsupported_function('to_hdf')
to_period = unsupported_function('to_period')
to_sql = unsupported_function('to_sql')
to_timestamp = unsupported_function('to_timestamp')
tshift = unsupported_function('tshift')
tz_convert = unsupported_function('tz_convert')
tz_localize = unsupported_function('tz_localize')
unstack = unsupported_function('unstack')
view = unsupported_function('view')
as_blocks = unsupported_function('as_blocks', deprecated=True)
as_matrix = unsupported_function('as_matrix', deprecated=True)
clip_lower = unsupported_function('clip_lower', deprecated=True)
clip_upper = unsupported_function('clip_upper', deprecated=True)
compress = unsupported_function('compress', deprecated=True)
convert_objects = unsupported_function('convert_objects', deprecated=True)
get_ftype_counts = unsupported_function('get_ftype_counts', deprecated=True)
get_value = unsupported_function('get_value', deprecated=True)
nonzero = unsupported_function('nonzero', deprecated=True)
reindex_axis = unsupported_function('reindex_axis', deprecated=True)
select = unsupported_function('select', deprecated=True)
set_value = unsupported_function('set_value', deprecated=True)
valid = unsupported_function('valid', deprecated=True)
get_values = unsupported_function('get_values', deprecated=True)
to_dense = unsupported_function('to_dense', deprecated=True)
to_sparse = unsupported_function('to_sparse', deprecated=True)
to_msgpack = unsupported_function('to_msgpack', deprecated=True)
compound = unsupported_function('compound', deprecated=True)
put = unsupported_function('put', deprecated=True)
item = unsupported_function('item', deprecated=True)
ptp = unsupported_function('ptp', deprecated=True)
argmax = unsupported_function('argmax', deprecated=True)
argmin = unsupported_function('argmin', deprecated=True)
values = common.values(unsupported_property)
array = common.array(unsupported_property)
real = unsupported_property(
'real',
reason="If you want to collect your data as an NumPy array, use 'to_numpy()' instead.")
nbytes = unsupported_property(
'nbytes',
reason="'nbytes' requires to compute whole dataset. You can calculate manually it, "
"with its 'itemsize', by explicitly executing its count. Use Spark's web UI "
"to monitor disk and memory usage of your application in general.")
memory_usage = common.memory_usage(unsupported_function)
to_pickle = common.to_pickle(unsupported_function)
to_xarray = common.to_xarray(unsupported_function)
__iter__ = common.__iter__(unsupported_function)
| true | true |
f73197e08ced9d0164ad4b85df778656c785b84a | 3,279 | py | Python | cube_root.py | scottwedge/CSC110 | 8d1e39de183e3ea477dac993d0fa9b721feca1ca | [
"MIT"
] | null | null | null | cube_root.py | scottwedge/CSC110 | 8d1e39de183e3ea477dac993d0fa9b721feca1ca | [
"MIT"
] | 1 | 2020-09-04T02:04:33.000Z | 2020-09-04T02:04:33.000Z | cube_root.py | scottwedge/CSC110 | 8d1e39de183e3ea477dac993d0fa9b721feca1ca | [
"MIT"
] | 1 | 2020-09-04T01:51:55.000Z | 2020-09-04T01:51:55.000Z |
# Sample program illustrating programmer-defined functions
# along with code that calls the functions
#
# The 'cube_root' function uses a 'return' statement. Its
# only job is to calculate and "return" the cube root of a
# number. It does not print anything. Notice that statements
# that "call" the 'cube_root' function need to USE the value
# returned. They can do this by a) saving it in a variable,
# b) using it in a 'print' statement, or c) using the value in
# ANY general expression. Imagine that the value returned by
# the function REPLACES the function call wherever it occurs.
# This is EXACTLY the same way you use built-in functions like
# 'input()', 'abs()', 'round()', etc.
#
# The 'show_table' function does NOT use a 'return' statement.
# It's job is to PRINT a table. Different functions may be
# used in different ways.
#
# CSC 110
# Winter 2013
# The cube_root function calculates and RETURNS the cube root
# of a number. If the value of 'x' is negative, a negative
# real number is returned.
def cube_root(x):
if x < 0:
result = -( (-x)**(1.0/3.0) )
else:
result = x ** (1.0/3.0)
return result
# Main program
def main():
print('Let\'s examine the cube roots of some numbers.\n')
# CALL the function and save the value returned in a variable:
num = 27
root = cube_root(num) # The ARGUMENT is a variable
print('Cube root of ' + str(num) + ' is ' + format(root, '.1f'))
root = cube_root(num + 98) # The argument is an expression
print(root)
# Use the function call directly in a 'print' statement:
print('Cube root of ' + str(num) + ' is ' + format(cube_root(num), '.1f'))
# Use multiple function calls in an expression:
print('The answer is', cube_root(8) + cube_root(1000) / 2)
print('The answer (rounded) is', round(cube_root(8) + cube_root(1000) / 2))
# Here is a table of some cube roots:
print('\n n cube_root(n)') # print header row
num = 8
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 31
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1727
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1728
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1729
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
# here is a table of cube roots
print("using loop")
x = [8,31,1727,1728,1729]
for num in x:
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
# Here are a couple of longer tables:
show_table(0, 1000, 10)
show_table(42875, 1000000, 20)
# This function shows a table of cube roots.
# The first two parameters are the minimum and maximum values for 'n'.
# The third parameter is the number of rows to show in the table.
def show_table(minN, maxN, rows):
# Calculate the step size. There are (rows - 1) intervals:
step = (maxN - minN) / (rows - 1.0)
print('\n n cube_root(n)') # print header row
# Loop 'rows' times to print the rows in the table:
for i in range(rows):
n = minN + i * step # calculate the value of 'n' for row 'i'
print(format(n, '12.3f'), format(cube_root(n), '8.3f'))
# Run the program
main() | 35.641304 | 79 | 0.643489 |
# used in different ways.
#
# CSC 110
# Winter 2013
# The cube_root function calculates and RETURNS the cube root
# of a number. If the value of 'x' is negative, a negative
# real number is returned.
def cube_root(x):
if x < 0:
result = -( (-x)**(1.0/3.0) )
else:
result = x ** (1.0/3.0)
return result
# Main program
def main():
print('Let\'s examine the cube roots of some numbers.\n')
num = 27
root = cube_root(num)
print('Cube root of ' + str(num) + ' is ' + format(root, '.1f'))
root = cube_root(num + 98)
print(root)
print('Cube root of ' + str(num) + ' is ' + format(cube_root(num), '.1f'))
print('The answer is', cube_root(8) + cube_root(1000) / 2)
print('The answer (rounded) is', round(cube_root(8) + cube_root(1000) / 2))
print('\n n cube_root(n)')
num = 8
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 31
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1727
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1728
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
num = 1729
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
print("using loop")
x = [8,31,1727,1728,1729]
for num in x:
print(format(num, '8.1f'), format(cube_root(num), '10.3f'))
show_table(0, 1000, 10)
show_table(42875, 1000000, 20)
def show_table(minN, maxN, rows):
step = (maxN - minN) / (rows - 1.0)
print('\n n cube_root(n)')
for i in range(rows):
n = minN + i * step
print(format(n, '12.3f'), format(cube_root(n), '8.3f'))
main() | true | true |
f73198cc95d621ef4641d770730e34b065a75e7a | 1,470 | py | Python | MultiMediaAnalyse/Task2/main.py | wsh-nie/Assignments | e1f800ca9abe54218f9b695dce843ec96a99710c | [
"MIT"
] | null | null | null | MultiMediaAnalyse/Task2/main.py | wsh-nie/Assignments | e1f800ca9abe54218f9b695dce843ec96a99710c | [
"MIT"
] | null | null | null | MultiMediaAnalyse/Task2/main.py | wsh-nie/Assignments | e1f800ca9abe54218f9b695dce843ec96a99710c | [
"MIT"
] | null | null | null | import cv2
def SIFT(imgname1, imgname2):
sift = cv2.xfeatures2d.SIFT_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = sift.detectAndCompute(img1, None)
kp2, des2 = sift.detectAndCompute(img2, None)
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
search_params = dict(checks=50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
good = []
for m,n in matches:
if m.distance < 0.70*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
def ORB(imgname1, imgname2):
orb = cv2.ORB_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = orb.detectAndCompute(img1,None)
kp2, des2 = orb.detectAndCompute(img2,None)
bf = cv2.BFMatcher()
matches = bf.knnMatch(des1,des2, k=2)
good = []
for m,n in matches:
if m.distance < 0.8*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
if __name__ == '__main__':
imgname1 = '1.jpg'
imgname2 = '2.jpg'
img3 = SIFT(imgname1, imgname2)
img4 = ORB(imgname1, imgname2)
cv2.imshow("SIFT", img3)
cv2.imwrite("sift.jpg",img3)
cv2.imshow("ORB", img4)
cv2.imwrite("orb.jpg",img4)
cv2.waitKey(0)
cv2.destroyAllWindows()
| 25.344828 | 66 | 0.646259 | import cv2
def SIFT(imgname1, imgname2):
sift = cv2.xfeatures2d.SIFT_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = sift.detectAndCompute(img1, None)
kp2, des2 = sift.detectAndCompute(img2, None)
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm=FLANN_INDEX_KDTREE, trees=5)
search_params = dict(checks=50)
flann = cv2.FlannBasedMatcher(index_params, search_params)
matches = flann.knnMatch(des1,des2,k=2)
good = []
for m,n in matches:
if m.distance < 0.70*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
def ORB(imgname1, imgname2):
orb = cv2.ORB_create()
img1 = cv2.imread(imgname1)
img2 = cv2.imread(imgname2)
kp1, des1 = orb.detectAndCompute(img1,None)
kp2, des2 = orb.detectAndCompute(img2,None)
bf = cv2.BFMatcher()
matches = bf.knnMatch(des1,des2, k=2)
good = []
for m,n in matches:
if m.distance < 0.8*n.distance:
good.append([m])
img3 = cv2.drawMatchesKnn(img1,kp1,img2,kp2,good,None,flags=2)
return img3
if __name__ == '__main__':
imgname1 = '1.jpg'
imgname2 = '2.jpg'
img3 = SIFT(imgname1, imgname2)
img4 = ORB(imgname1, imgname2)
cv2.imshow("SIFT", img3)
cv2.imwrite("sift.jpg",img3)
cv2.imshow("ORB", img4)
cv2.imwrite("orb.jpg",img4)
cv2.waitKey(0)
cv2.destroyAllWindows()
| true | true |
f73198d4417ffcba32dc97836964e3ca95693c56 | 3,900 | py | Python | romp/lib/loss_funcs/params_loss.py | vltmedia/ROMP | 1d2d96bd39f67a0a86ce7e397e3af856b3c5ee00 | [
"MIT"
] | 385 | 2021-03-30T15:40:31.000Z | 2022-03-31T21:52:52.000Z | romp/lib/loss_funcs/params_loss.py | vltmedia/ROMP | 1d2d96bd39f67a0a86ce7e397e3af856b3c5ee00 | [
"MIT"
] | 154 | 2021-03-31T11:28:27.000Z | 2022-03-31T08:00:45.000Z | romp/lib/loss_funcs/params_loss.py | vltmedia/ROMP | 1d2d96bd39f67a0a86ce7e397e3af856b3c5ee00 | [
"MIT"
] | 102 | 2021-04-15T06:43:00.000Z | 2022-03-31T12:40:08.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
import time
import pickle
import numpy as np
import config
import constants
from config import args
from utils import batch_rodrigues, rotation_matrix_to_angle_axis
def batch_l2_loss(real,predict):
loss_batch = torch.norm(real-predict, p=2, dim=1)
return loss_batch.mean()
def batch_l2_loss_param(real,predict):
# convert to rot mat, multiple angular maps to the same rotation with Pi as a period.
batch_size = real.shape[0]
real = batch_rodrigues(real.reshape(-1,3)).contiguous()#(N*J)*3 -> (N*J)*3*3
predict = batch_rodrigues(predict.reshape(-1,3)).contiguous()#(N*J)*3 -> (N*J)*3*3
loss = torch.norm((real-predict).view(-1,9), p=2, dim=-1)#self.sl1loss(real,predict)#
loss = loss.reshape(batch_size, -1).mean(-1)
return loss
def _calc_MPJAE(rel_pose_pred,rel_pose_real):
global_pose_rotmat_pred = trans_relative_rot_to_global_rotmat(rel_pose_pred, with_global_rot=True)
global_pose_rotmat_real = trans_relative_rot_to_global_rotmat(rel_pose_real, with_global_rot=True)
MPJAE_error = _calc_joint_angle_error(global_pose_rotmat_pred, global_pose_rotmat_real).cpu().numpy()
return MPJAE_error
def trans_relative_rot_to_global_rotmat(params, with_global_rot=False):
'''
calculate absolute rotation matrix in the global coordinate frame of K body parts.
The rotation is the map from the local bone coordinate frame to the global one.
K= 9 parts in the following order:
root (JOINT 0) , left hip (JOINT 1), right hip (JOINT 2), left knee (JOINT 4), right knee (JOINT 5),
left shoulder (JOINT 16), right shoulder (JOINT 17), left elbow (JOINT 18), right elbow (JOINT 19).
parent kinetic tree [-1, 0, 0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 9, 9, 12, 13, 14, 16, 17, 18, 19, 20, 21]
'''
batch_size, param_num = params.shape[0], params.shape[1]//3
pose_rotmat = batch_rodrigues(params.reshape(-1,3)).view(batch_size, param_num, 3, 3).contiguous()
if with_global_rot:
sellect_joints = np.array([0,1,2,4,5,16,17,18,19],dtype=np.int)
results = [pose_rotmat[:, 0]]
for idx in range(param_num-1):
i_val = int(idx + 1)
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
else:
sellect_joints = np.array([1,2,4,5,16,17,18,19],dtype=np.int)-1
results = [torch.eye(3,3)[None].cuda().repeat(batch_size,1,1)]
for i_val in range(param_num-1):
#i_val = int(idx + 1)
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val+1]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
global_rotmat = torch.stack(results, axis=1)[:, sellect_joints].contiguous()
return global_rotmat
def _calc_joint_angle_error(pred_mat, gt_mat, return_axis_angle=False):
"""
Compute the geodesic distance between the two input matrices.
:param pred_mat: predicted rotation matrices. Shape: ( Seq, 9g, 3, 3)
:param gt_mat: ground truth rotation matrices. Shape: ( Seq, 9, 3, 3)
:return: Mean geodesic distance between input matrices.
"""
# Reshape the matrices into B x 3 x 3 arrays
r1 = pred_mat.reshape(-1,3,3)
r2 = gt_mat.reshape(-1,3,3)
# Transpose gt matrices
r2t = r2.permute(0,2,1)
r = torch.matmul(r1, r2t)
# Convert rotation matrix to axis angle representation and find the angle
axis_angles = rotation_matrix_to_angle_axis(r)
angles = torch.norm(axis_angles, dim=-1)*(180./np.pi)
if return_axis_angle:
return angles,axis_angles
return angles
| 42.857143 | 120 | 0.691538 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import torch
import torch.nn as nn
import time
import pickle
import numpy as np
import config
import constants
from config import args
from utils import batch_rodrigues, rotation_matrix_to_angle_axis
def batch_l2_loss(real,predict):
loss_batch = torch.norm(real-predict, p=2, dim=1)
return loss_batch.mean()
def batch_l2_loss_param(real,predict):
batch_size = real.shape[0]
real = batch_rodrigues(real.reshape(-1,3)).contiguous()
predict = batch_rodrigues(predict.reshape(-1,3)).contiguous()
loss = torch.norm((real-predict).view(-1,9), p=2, dim=-1) loss = loss.reshape(batch_size, -1).mean(-1)
return loss
def _calc_MPJAE(rel_pose_pred,rel_pose_real):
global_pose_rotmat_pred = trans_relative_rot_to_global_rotmat(rel_pose_pred, with_global_rot=True)
global_pose_rotmat_real = trans_relative_rot_to_global_rotmat(rel_pose_real, with_global_rot=True)
MPJAE_error = _calc_joint_angle_error(global_pose_rotmat_pred, global_pose_rotmat_real).cpu().numpy()
return MPJAE_error
def trans_relative_rot_to_global_rotmat(params, with_global_rot=False):
batch_size, param_num = params.shape[0], params.shape[1]//3
pose_rotmat = batch_rodrigues(params.reshape(-1,3)).view(batch_size, param_num, 3, 3).contiguous()
if with_global_rot:
sellect_joints = np.array([0,1,2,4,5,16,17,18,19],dtype=np.int)
results = [pose_rotmat[:, 0]]
for idx in range(param_num-1):
i_val = int(idx + 1)
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
else:
sellect_joints = np.array([1,2,4,5,16,17,18,19],dtype=np.int)-1
results = [torch.eye(3,3)[None].cuda().repeat(batch_size,1,1)]
for i_val in range(param_num-1):
joint_rot = pose_rotmat[:, i_val]
parent = constants.kintree_parents[i_val+1]
glob_transf_mat = torch.matmul(results[parent], joint_rot)
results.append(glob_transf_mat)
global_rotmat = torch.stack(results, axis=1)[:, sellect_joints].contiguous()
return global_rotmat
def _calc_joint_angle_error(pred_mat, gt_mat, return_axis_angle=False):
r1 = pred_mat.reshape(-1,3,3)
r2 = gt_mat.reshape(-1,3,3)
r2t = r2.permute(0,2,1)
r = torch.matmul(r1, r2t)
axis_angles = rotation_matrix_to_angle_axis(r)
angles = torch.norm(axis_angles, dim=-1)*(180./np.pi)
if return_axis_angle:
return angles,axis_angles
return angles
| true | true |
f7319a140d0f0b8ef8a302e45f0ea738b572ba7a | 63,805 | py | Python | ironic/drivers/modules/drac/raid.py | inmotionhosting/ironic | 1c7b5f82592e23ab66dddca56e0b059d3cb0710b | [
"Apache-2.0"
] | 1 | 2021-02-27T02:48:59.000Z | 2021-02-27T02:48:59.000Z | ironic/drivers/modules/drac/raid.py | inmotionhosting/ironic | 1c7b5f82592e23ab66dddca56e0b059d3cb0710b | [
"Apache-2.0"
] | null | null | null | ironic/drivers/modules/drac/raid.py | inmotionhosting/ironic | 1c7b5f82592e23ab66dddca56e0b059d3cb0710b | [
"Apache-2.0"
] | null | null | null | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC RAID specific methods
"""
from collections import defaultdict
import math
from futurist import periodics
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import raid as raid_common
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.drac import common as drac_common
from ironic.drivers.modules.drac import job as drac_job
drac_exceptions = importutils.try_import('dracclient.exceptions')
drac_constants = importutils.try_import('dracclient.constants')
LOG = logging.getLogger(__name__)
METRICS = metrics_utils.get_metrics_logger(__name__)
_CURRENT_RAID_CONTROLLER_MODE = "RAIDCurrentControllerMode"
_REQUESTED_RAID_CONTROLLER_MODE = "RAIDRequestedControllerMode"
_EHBA_MODE = "Enhanced HBA"
_RAID_MODE = "RAID"
RAID_LEVELS = {
'0': {
'min_disks': 1,
'max_disks': 1000,
'type': 'simple',
'overhead': 0
},
'1': {
'min_disks': 2,
'max_disks': 2,
'type': 'simple',
'overhead': 1
},
'5': {
'min_disks': 3,
'max_disks': 1000,
'type': 'simple',
'overhead': 1
},
'6': {
'min_disks': 4,
'max_disks': 1000,
'type': 'simple',
'overhead': 2
},
'1+0': {
'type': 'spanned',
'span_type': '1'
},
'5+0': {
'type': 'spanned',
'span_type': '5'
},
'6+0': {
'type': 'spanned',
'span_type': '6'
}
}
def list_raid_controllers(node):
"""List the RAID controllers of the node.
:param node: an ironic node object.
:returns: a list of RAIDController objects from dracclient.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.list_raid_controllers()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of RAID controllers '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_virtual_disks(node):
"""List the virtual disks of the node.
:param node: an ironic node object.
:returns: a list of VirtualDisk objects from dracclient.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.list_virtual_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of virtual disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_physical_disks(node):
"""List the physical disks of the node.
:param node: an ironic node object.
:returns: a list of PhysicalDisk objects from dracclient.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.list_physical_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of physical disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _is_raid_controller(node, raid_controller_fqdd, raid_controllers=None):
"""Find out if object's fqdd is for a raid controller or not
:param node: an ironic node object
:param raid_controller_fqdd: The object's fqdd we are testing to see
if it is a raid controller or not.
:param raid_controllers: A list of RAIDControllers used to check for
the presence of BOSS cards. If None, the
iDRAC will be queried for the list of
controllers.
:returns: boolean, True if the device is a RAID controller,
False if not.
"""
client = drac_common.get_drac_client(node)
try:
return client.is_raid_controller(raid_controller_fqdd,
raid_controllers)
except drac_exceptions.BaseClientException as exc:
LOG.error('Unable to determine if controller %(raid_controller_fqdd)s '
'on node %(node_uuid)s is a RAID controller. '
'Reason: %(error)s. ',
{'raid_controller_fqdd': raid_controller_fqdd,
'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _validate_job_queue(node, raid_controller=None):
"""Validate that there are no pending jobs for this controller.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
"""
kwargs = {}
if raid_controller:
kwargs["name_prefix"] = "Config:RAID:%s" % raid_controller
drac_job.validate_job_queue(node, **kwargs)
def create_virtual_disk(node, raid_controller, physical_disks, raid_level,
size_mb, disk_name=None, span_length=None,
span_depth=None):
"""Create a single virtual disk on a RAID controller.
The created virtual disk will be in pending state. The DRAC card will do
the actual configuration once the changes are applied by calling the
``commit_config`` method.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:param physical_disks: ids of the physical disks.
:param raid_level: RAID level of the virtual disk.
:param size_mb: size of the virtual disk.
:param disk_name: name of the virtual disk. (optional)
:param span_depth: Number of spans in virtual disk. (optional)
:param span_length: Number of disks per span. (optional)
:returns: a dictionary containing the commit_needed key with a boolean
value indicating whether a config job must be created for the
values to be applied.
:raises: DracOperationError on an error from python-dracclient.
"""
# This causes config to fail, because the boot mode is set via a config
# job.
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
try:
return client.create_virtual_disk(raid_controller, physical_disks,
raid_level, size_mb, disk_name,
span_length, span_depth)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to create virtual disk for node '
'%(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def delete_virtual_disk(node, virtual_disk):
"""Delete a single virtual disk on a RAID controller.
The deleted virtual disk will be in pending state. The DRAC card will do
the actual configuration once the changes are applied by calling the
``commit_config`` method.
:param node: an ironic node object.
:param virtual_disk: id of the virtual disk.
:returns: a dictionary containing the commit_needed key with a boolean
value indicating whether a config job must be created for the
values to be applied.
:raises: DracOperationError on an error from python-dracclient.
"""
# NOTE(mgoddard): Cannot specify raid_controller as we don't know it.
_validate_job_queue(node)
client = drac_common.get_drac_client(node)
try:
return client.delete_virtual_disk(virtual_disk)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete virtual disk '
'%(virtual_disk_fqdd)s for node %(node_uuid)s. '
'Reason: %(error)s.',
{'virtual_disk_fqdd': virtual_disk,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _reset_raid_config(node, raid_controller):
"""Delete all virtual disk and unassign all hotspares physical disk
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:returns: a dictionary containing
- The is_commit_required needed key with a
boolean value indicating whether a config job must be created
for the values to be applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted to
reset configuration.
:raises: DracOperationError on an error from python-dracclient.
"""
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.reset_raid_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete all virtual disk '
'and unassign all hotspares '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def clear_foreign_config(node, raid_controller):
"""Free up the foreign drives.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:returns: a dictionary containing
- The is_commit_required needed key with a
boolean value indicating whether a config job must be created
for the values to be applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted to
clear foreign configuration.
:raises: DracOperationError on an error from python-dracclient.
"""
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.clear_foreign_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to free foreign driver '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def set_raid_settings(node, controller_fqdd, settings):
"""Sets the RAID configuration
It sets the pending_value parameter for each of the attributes
passed in. For the values to be applied, a config job must
be created.
:param node: an ironic node object.
:param controller_fqdd: the ID of the RAID controller.
:param settings: a dictionary containing the proposed values, with
each key being the name of attribute and the value
being the proposed value.
:returns: a dictionary containing:
- The is_commit_required key with a boolean value indicating
whether a config job must be created for the values to be
applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted for the
values to be applied. Possible values are true and false.
:raises: DRACOperationFailed on error reported back by the DRAC
interface
"""
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.set_raid_settings(controller_fqdd, settings)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to set raid settings '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': controller_fqdd,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def list_raid_settings(node):
"""List the RAID configuration settings
:param node: an ironic node object.
:returns: a dictionary with the RAID settings using InstanceID as the
key. The attributes are RAIDEnumerableAttribute,
RAIDStringAttribute and RAIDIntegerAttribute objects.
:raises: DRACOperationFailed on error reported back by the DRAC
interface
"""
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.list_raid_settings()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to list raid settings '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def change_physical_disk_state(node, mode=None,
controllers_to_physical_disk_ids=None):
"""Convert disks RAID status
This method converts the requested physical disks from
RAID to JBOD or vice versa. It does this by only converting the
disks that are not already in the correct state.
:param node: an ironic node object.
:param mode: the mode to change the disks either to RAID or JBOD.
:param controllers_to_physical_disk_ids: Dictionary of controllers and
corresponding disk ids to convert to the requested mode.
:return: a dictionary containing:
- conversion_results, a dictionary that maps controller ids
to the conversion results for that controller.
The conversion results are a dict that contains:
- The is_commit_required key with the value always set to
True indicating that a config job must be created to
complete disk conversion.
- The is_reboot_required key with a RebootRequired
enumerated value indicating whether the server must be
rebooted to complete disk conversion.
:raises: DRACOperationError on an error from python-dracclient.
"""
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.change_physical_disk_state(
mode, controllers_to_physical_disk_ids)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to change physical drives '
'to %(mode)s mode for node %(node_uuid)s. '
'Reason: %(error)s.',
{'mode': mode, 'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def commit_config(node, raid_controller, reboot=False, realtime=False):
"""Apply all pending changes on a RAID controller.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:param reboot: indicates whether a reboot job should be automatically
created with the config job. (optional, defaults to False)
:param realtime: indicates RAID controller supports realtime.
(optional, defaults to False)
:returns: id of the created job
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
return client.commit_pending_raid_changes(
raid_controller=raid_controller,
reboot=reboot,
realtime=realtime)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to commit pending RAID config for'
' controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _change_physical_disk_mode(node, mode=None,
controllers_to_physical_disk_ids=None,
substep="completed"):
"""Physical drives conversion from RAID to JBOD or vice-versa.
:param node: an ironic node object.
:param mode: the mode to change the disks either to RAID or JBOD.
:param controllers_to_physical_disk_ids: Dictionary of controllers and
corresponding disk ids to convert to the requested mode.
:returns: states.CLEANWAIT if deletion is in progress asynchronously
or None if it is completed.
"""
change_disk_state = change_physical_disk_state(
node, mode, controllers_to_physical_disk_ids)
controllers = list()
conversion_results = change_disk_state['conversion_results']
for controller_id, result in conversion_results.items():
controller = {'raid_controller': controller_id,
'is_reboot_required': result['is_reboot_required'],
'is_commit_required': result['is_commit_required']}
controllers.append(controller)
return _commit_to_controllers(
node,
controllers, substep=substep)
def abandon_config(node, raid_controller):
"""Deletes all pending changes on a RAID controller.
:param node: an ironic node object.
:param raid_controller: id of the RAID controller.
:raises: DracOperationError on an error from python-dracclient.
"""
client = drac_common.get_drac_client(node)
try:
client.abandon_pending_raid_changes(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete pending RAID config '
'for controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _calculate_spans(raid_level, disks_count):
"""Calculates number of spans for a RAID level given a physical disk count
:param raid_level: RAID level of the virtual disk.
:param disk_count: number of physical disks used for the virtual disk.
:returns: number of spans.
"""
if raid_level in ['0', '1', '5', '6']:
return 1
elif raid_level in ['5+0', '6+0']:
return 2
elif raid_level in ['1+0']:
return disks_count >> 1
else:
reason = (_('Cannot calculate spans for RAID level "%s"') %
raid_level)
raise exception.DracOperationError(error=reason)
def _usable_disks_count(raid_level, disks_count):
"""Calculates the number of disks usable for a RAID level
...given a physical disk count
:param raid_level: RAID level of the virtual disk.
:param disk_count: number of physical disks used for the virtual disk.
:returns: number of disks.
"""
if raid_level in ['0', '1', '5', '6']:
return disks_count
elif raid_level in ['5+0', '6+0', '1+0']:
# largest even number less than disk_count
return (disks_count >> 1) << 1
else:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
def _raid_level_min_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['min_disks'] * spans_count
def _raid_level_max_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['max_disks'] * spans_count
def _raid_level_overhead(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['overhead'] * spans_count
def _max_volume_size_mb(raid_level, physical_disks, free_space_mb,
spans_count=1, stripe_size_kb=64 * units.Ki):
# restrict the size to the smallest available space
free_spaces = [free_space_mb[disk] for disk in physical_disks]
size_kb = min(free_spaces) * units.Ki
# NOTE(ifarkas): using math.floor so we get a volume size that does not
# exceed the available space
stripes_per_disk = int(math.floor(float(size_kb) / stripe_size_kb))
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(raid_level, spans_count)
return int(stripes_per_disk * stripe_size_kb
* (disks_count - overhead_disks_count) / units.Ki)
def _volume_usage_per_disk_mb(logical_disk, physical_disks, spans_count=1,
stripe_size_kb=64 * units.Ki):
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(logical_disk['raid_level'],
spans_count)
volume_size_kb = logical_disk['size_mb'] * units.Ki
# NOTE(ifarkas): using math.ceil so we get the largest disk usage
# possible, so we can avoid over-committing
stripes_per_volume = math.ceil(float(volume_size_kb) / stripe_size_kb)
stripes_per_disk = math.ceil(
float(stripes_per_volume) / (disks_count - overhead_disks_count))
return int(stripes_per_disk * stripe_size_kb / units.Ki)
def _find_configuration(logical_disks, physical_disks, pending_delete):
"""Find RAID configuration.
This method transforms the RAID configuration defined in Ironic to a format
that is required by dracclient. This includes matching the physical disks
to RAID volumes when it's not pre-defined, or in general calculating
missing properties.
:param logical_disks: list of logical disk definitions.
:param physical_disks: list of physical disk definitions.
:param pending_delete: Whether there is a pending deletion of virtual
disks that should be accounted for.
"""
# shared physical disks of RAID volumes size_gb='MAX' should be
# deprioritized during the matching process to reserve as much space as
# possible. Reserved means it won't be used during matching.
volumes_with_reserved_physical_disks = [
volume for volume in logical_disks
if ('physical_disks' in volume and volume['size_mb'] == 'MAX'
and volume.get('share_physical_disks', False))]
reserved_physical_disks = [
disk for disk in physical_disks
for volume in volumes_with_reserved_physical_disks
if disk.id in volume['physical_disks']]
# we require each logical disk contain only homogeneous physical disks, so
# sort them by type
physical_disks_by_type = {}
reserved_physical_disks_by_type = {}
free_space_mb = {}
for disk in physical_disks:
# calculate free disk space
free_space_mb[disk] = _get_disk_free_size_mb(disk, pending_delete)
disk_type = (disk.controller, disk.media_type, disk.interface_type,
disk.size_mb)
if disk_type not in physical_disks_by_type:
physical_disks_by_type[disk_type] = []
reserved_physical_disks_by_type[disk_type] = []
if disk in reserved_physical_disks:
reserved_physical_disks_by_type[disk_type].append(disk)
else:
physical_disks_by_type[disk_type].append(disk)
# exclude non-shared physical disks (predefined by the user) from
# physical_disks_by_type because they are not going to be used during
# matching
for volume in logical_disks:
if ('physical_disks' in volume
and not volume.get('share_physical_disks', False)):
for disk in physical_disks:
if disk.id in volume['physical_disks']:
disk_type = (disk.controller, disk.media_type,
disk.interface_type, disk.size_mb)
if disk in physical_disks_by_type[disk_type]:
physical_disks_by_type[disk_type].remove(disk)
processed_volumes = []
# step 1 - process volumes with predefined disks and exact size
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] != 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
# step 2 - process volumes without predefined disks
volumes_without_disks = [disk for disk in logical_disks
if 'physical_disks' not in disk]
if volumes_without_disks:
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type, free_space_mb,
pending_delete))
if not result:
# try again using the reserved physical disks in addition
for disk_type, disks in physical_disks_by_type.items():
physical_disks_by_type[disk_type] += (
reserved_physical_disks_by_type[disk_type])
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type,
free_space_mb,
pending_delete))
if not result:
error_msg = _('failed to find matching physical disks for all '
'logical disks')
LOG.error('DRAC driver failed to create RAID '
'configuration. Reason: %(error)s.',
{'error': error_msg})
raise exception.DracOperationError(error=error_msg)
processed_volumes += volumes_without_disks
# step 3 - process volumes with predefined disks and size_mb == 'MAX'
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] == 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
return processed_volumes
def _calculate_volume_props(logical_disk, physical_disks, free_space_mb):
selected_disks = [disk for disk in physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
if len(selected_disks) % spans_count != 0:
error_msg = _('invalid number of physical disks was provided')
raise exception.DracOperationError(error=error_msg)
disks_per_span = int(len(selected_disks) / spans_count)
# Best practice is to not pass span_length and span_depth when creating a
# RAID10. The iDRAC will dynamically calculate these values using maximum
# values obtained from the RAID controller.
logical_disk['span_depth'] = None
logical_disk['span_length'] = None
if logical_disk['raid_level'] != '1+0':
logical_disk['span_depth'] = spans_count
logical_disk['span_length'] = disks_per_span
max_volume_size_mb = _max_volume_size_mb(
logical_disk['raid_level'], selected_disks, free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] == 'MAX':
if max_volume_size_mb == 0:
error_msg = _("size set to 'MAX' but could not allocate physical "
"disk space")
raise exception.DracOperationError(error=error_msg)
logical_disk['size_mb'] = max_volume_size_mb
elif max_volume_size_mb < logical_disk['size_mb']:
if max_volume_size_mb == 0:
error_msg = _('not enough physical disk space for the logical '
'disk')
raise exception.DracOperationError(error=error_msg)
disk_usage = _volume_usage_per_disk_mb(logical_disk, selected_disks,
spans_count=spans_count)
for disk in selected_disks:
if free_space_mb[disk] < disk_usage:
error_msg = _('not enough free space on physical disks for the '
'logical disk')
raise exception.DracOperationError(error=error_msg)
else:
free_space_mb[disk] -= disk_usage
if 'controller' not in logical_disk:
logical_disk['controller'] = selected_disks[0].controller
def _assign_disks_to_volume(logical_disks, physical_disks_by_type,
free_space_mb, pending_delete):
logical_disk = logical_disks.pop(0)
raid_level = logical_disk['raid_level']
# iterate over all possible configurations
for (controller, disk_type,
interface_type, size_mb), disks in physical_disks_by_type.items():
if ('disk_type' in logical_disk
and logical_disk['disk_type'] != disk_type):
continue
if ('interface_type' in logical_disk
and logical_disk['interface_type'] != interface_type):
continue
# filter out disks without free disk space
disks = [disk for disk in disks if free_space_mb[disk] > 0]
# sort disks by free size which is important if we have max disks limit
# on a volume
disks = sorted(
disks,
key=lambda disk: free_space_mb[disk])
# filter out disks already in use if sharing is disabled
if ('share_physical_disks' not in logical_disk
or not logical_disk['share_physical_disks']):
initial_free_size_mb = {
disk: _get_disk_free_size_mb(disk, pending_delete)
for disk in disks
}
disks = [disk for disk in disks
if initial_free_size_mb[disk] == free_space_mb[disk]]
max_spans = _calculate_spans(raid_level, len(disks))
min_spans = min([2, max_spans])
min_disks = _raid_level_min_disks(raid_level,
spans_count=min_spans)
max_disks = _raid_level_max_disks(raid_level,
spans_count=max_spans)
candidate_max_disks = min([max_disks, len(disks)])
for disks_count in range(min_disks, candidate_max_disks + 1):
if ('number_of_physical_disks' in logical_disk
and (logical_disk['number_of_physical_disks']
!= disks_count)):
continue
# skip invalid disks_count
if disks_count != _usable_disks_count(logical_disk['raid_level'],
disks_count):
continue
selected_disks = disks[0:disks_count]
candidate_volume = logical_disk.copy()
candidate_free_space_mb = free_space_mb.copy()
candidate_volume['physical_disks'] = [disk.id for disk
in selected_disks]
try:
_calculate_volume_props(candidate_volume, selected_disks,
candidate_free_space_mb)
except exception.DracOperationError:
continue
if len(logical_disks) > 0:
result, candidate_free_space_mb = (
_assign_disks_to_volume(logical_disks,
physical_disks_by_type,
candidate_free_space_mb,
pending_delete))
if result:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
# put back the logical_disk to queue
logical_disks.insert(0, logical_disk)
return (False, free_space_mb)
def _filter_logical_disks(logical_disks, include_root_volume,
include_nonroot_volumes):
filtered_disks = []
for disk in logical_disks:
if include_root_volume and disk.get('is_root_volume'):
filtered_disks.append(disk)
if include_nonroot_volumes and not disk.get('is_root_volume'):
filtered_disks.append(disk)
return filtered_disks
def _create_config_job(node, controller, reboot=False, realtime=False,
raid_config_job_ids=[],
raid_config_parameters=[]):
job_id = commit_config(node, raid_controller=controller,
reboot=reboot, realtime=realtime)
raid_config_job_ids.append(job_id)
if controller not in raid_config_parameters:
raid_config_parameters.append(controller)
LOG.info('Change has been committed to RAID controller '
'%(controller)s on node %(node)s. '
'DRAC job id: %(job_id)s',
{'controller': controller, 'node': node.uuid,
'job_id': job_id})
return {'raid_config_job_ids': raid_config_job_ids,
'raid_config_parameters': raid_config_parameters}
def _validate_volume_size(node, logical_disks):
new_physical_disks = list_physical_disks(node)
free_space_mb = {}
new_processed_volumes = []
for disk in new_physical_disks:
free_space_mb[disk] = disk.free_size_mb
for logical_disk in logical_disks:
selected_disks = [disk for disk in new_physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
new_max_vol_size_mb = _max_volume_size_mb(
logical_disk['raid_level'],
selected_disks,
free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] > new_max_vol_size_mb:
logical_disk['size_mb'] = new_max_vol_size_mb
LOG.info("Logical size does not match so calculating volume "
"properties for current logical_disk")
_calculate_volume_props(
logical_disk, new_physical_disks, free_space_mb)
new_processed_volumes.append(logical_disk)
if new_processed_volumes:
return new_processed_volumes
return logical_disks
def _switch_to_raid_mode(node, controller_fqdd):
"""Convert the controller mode from Enhanced HBA to RAID mode
:param node: an ironic node object
:param controller_fqdd: the ID of the RAID controller.
:returns: a dictionary containing
- The raid_controller key with a ID of the
RAID controller value.
- The is_commit_required needed key with a
boolean value indicating whether a config job must be created
for the values to be applied.
- The is_reboot_required key with a RebootRequired enumerated
value indicating whether the server must be rebooted to
switch the controller mode to RAID.
"""
# wait for pending jobs to complete
drac_job.wait_for_job_completion(node)
raid_attr = "{}:{}".format(controller_fqdd,
_REQUESTED_RAID_CONTROLLER_MODE)
settings = {raid_attr: _RAID_MODE}
settings_results = set_raid_settings(
node, controller_fqdd, settings)
controller = {
'raid_controller': controller_fqdd,
'is_reboot_required': settings_results['is_reboot_required'],
'is_commit_required': settings_results['is_commit_required']}
return controller
def _commit_to_controllers(node, controllers, substep="completed"):
"""Commit changes to RAID controllers on the node.
:param node: an ironic node object
:param controllers: a list of dictionary containing
- The raid_controller key with raid controller
fqdd value indicating on which raid configuration
job needs to be perform.
- The is_commit_required needed key with a
boolean value indicating whether a config job must
be created.
- The is_reboot_required key with a RebootRequired
enumerated value indicating whether the server must
be rebooted only if raid controller does not support
realtime.
:param substep: contain sub cleaning or deploy step which executes any raid
configuration job if set after cleaning or deploy step.
(default to completed)
:returns: states.CLEANWAIT (cleaning) or states.DEPLOYWAIT (deployment) if
configuration is in progress asynchronously or None if it is
completed.
"""
# remove controller which does not require configuration job
controllers = [controller for controller in controllers
if controller['is_commit_required']]
if not controllers:
LOG.debug('No changes on any of the controllers on node %s',
node.uuid)
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
node.driver_internal_info = driver_internal_info
node.save()
return
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
if 'raid_config_job_ids' not in driver_internal_info:
driver_internal_info['raid_config_job_ids'] = []
optional = drac_constants.RebootRequired.optional
# all realtime controllers
all_realtime = all(
(cntlr['is_reboot_required'] == optional)
and not(cntlr.get('is_ehba_mode'))
for cntlr in controllers)
# check any controller with ehba mode
any_ehba_controllers = any(
cntrl.get('is_ehba_mode') is True for cntrl in controllers)
raid_config_job_ids = []
raid_config_parameters = []
if all_realtime:
for controller in controllers:
realtime_controller = controller['raid_controller']
job_details = _create_config_job(
node, controller=realtime_controller,
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
elif any_ehba_controllers:
commit_to_ehba_controllers = []
for controller in controllers:
if controller.get('is_ehba_mode'):
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
ehba_controller = _switch_to_raid_mode(
node, controller['raid_controller'])
commit_to_ehba_controllers.append(
ehba_controller['raid_controller'])
else:
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
for controller in commit_to_ehba_controllers:
LOG.debug("Create job with Reboot to apply configuration "
"changes for ehba controllers")
job_details = _create_config_job(
node, controller=controller,
reboot=(controller == commit_to_ehba_controllers[-1]),
realtime=False, raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
else:
for controller in controllers:
mix_controller = controller['raid_controller']
reboot = (controller == controllers[-1])
job_details = _create_config_job(
node, controller=mix_controller,
reboot=reboot, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
driver_internal_info['raid_config_job_ids'].extend(job_details[
'raid_config_job_ids'])
driver_internal_info['raid_config_parameters'].extend(job_details[
'raid_config_parameters'])
node.driver_internal_info = driver_internal_info
# Signal whether the node has been rebooted, that we do not need to execute
# the step again, and that this completion of this step is triggered
# through async polling.
# NOTE(mgoddard): set_async_step_flags calls node.save().
deploy_utils.set_async_step_flags(
node,
reboot=not all_realtime,
skip_current_step=True,
polling=True)
return deploy_utils.get_async_step_return_state(node)
def _create_virtual_disks(task, node):
logical_disks_to_create = node.driver_internal_info[
'logical_disks_to_create']
# Check valid properties attached to voiume after drives conversion
isVolValidationNeeded = node.driver_internal_info[
'volume_validation']
if isVolValidationNeeded:
logical_disks_to_create = _validate_volume_size(
node, logical_disks_to_create)
controllers = list()
for logical_disk in logical_disks_to_create:
controller = dict()
controller_cap = create_virtual_disk(
node,
raid_controller=logical_disk['controller'],
physical_disks=logical_disk['physical_disks'],
raid_level=logical_disk['raid_level'],
size_mb=logical_disk['size_mb'],
disk_name=logical_disk.get('name'),
span_length=logical_disk.get('span_length'),
span_depth=logical_disk.get('span_depth'))
controller['raid_controller'] = logical_disk['controller']
controller['is_reboot_required'] = controller_cap[
'is_reboot_required']
controller['is_commit_required'] = controller_cap[
'is_commit_required']
if controller not in controllers:
controllers.append(controller)
return _commit_to_controllers(node, controllers)
def _controller_in_hba_mode(raid_settings, controller_fqdd):
controller_mode = raid_settings.get(
'{}:{}'.format(controller_fqdd, _CURRENT_RAID_CONTROLLER_MODE))
return _EHBA_MODE in controller_mode.current_value
def _controller_supports_ehba_mode(settings, controller_fqdd):
raid_cntrl_attr = "{}:{}".format(controller_fqdd,
_CURRENT_RAID_CONTROLLER_MODE)
current_cntrl_mode = settings.get(raid_cntrl_attr)
if not current_cntrl_mode:
return False
else:
return _EHBA_MODE in current_cntrl_mode.possible_values
def _get_disk_free_size_mb(disk, pending_delete):
"""Return the size of free space on the disk in MB.
:param disk: a PhysicalDisk object.
:param pending_delete: Whether there is a pending deletion of all virtual
disks.
"""
return disk.size_mb if pending_delete else disk.free_size_mb
class DracWSManRAID(base.RAIDInterface):
def get_properties(self):
"""Return the properties of the interface."""
return drac_common.COMMON_PROPERTIES
@base.deploy_step(priority=0,
argsinfo=base.RAID_APPLY_CONFIGURATION_ARGSINFO)
def apply_configuration(self, task, raid_config, create_root_volume=True,
create_nonroot_volumes=False,
delete_existing=True):
return super(DracWSManRAID, self).apply_configuration(
task, raid_config, create_root_volume=create_root_volume,
create_nonroot_volumes=create_nonroot_volumes,
delete_existing=delete_existing)
@METRICS.timer('DracRAID.create_configuration')
@base.clean_step(priority=0, abortable=False, argsinfo={
'create_root_volume': {
'description': (
'This specifies whether to create the root volume. '
'Defaults to `True`.'
),
'required': False
},
'create_nonroot_volumes': {
'description': (
'This specifies whether to create the non-root volumes. '
'Defaults to `True`.'
),
'required': False
},
"delete_existing": {
"description": (
"Setting this to 'True' indicates to delete existing RAID "
"configuration prior to creating the new configuration. "
"Default value is 'False'."
),
"required": False,
}
})
def create_configuration(self, task,
create_root_volume=True,
create_nonroot_volumes=True,
delete_existing=False):
"""Create the RAID configuration.
This method creates the RAID configuration on the given node.
:param task: a TaskManager instance containing the node to act on.
:param create_root_volume: If True, a root volume is created
during RAID configuration. Otherwise, no root volume is
created. Default is True.
:param create_nonroot_volumes: If True, non-root volumes are
created. If False, no non-root volumes are created. Default
is True.
:param delete_existing: Setting this to True indicates to delete RAID
configuration prior to creating the new configuration. Default is
False.
:returns: states.CLEANWAIT (cleaning) or states.DEPLOYWAIT (deployment)
if creation is in progress asynchronously or None if it is
completed.
:raises: MissingParameterValue, if node.target_raid_config is missing
or empty.
:raises: DracOperationError on an error from python-dracclient.
"""
node = task.node
logical_disks = node.target_raid_config['logical_disks']
for disk in logical_disks:
if disk['size_gb'] == 'MAX' and 'physical_disks' not in disk:
raise exception.InvalidParameterValue(
_("create_configuration called with invalid "
"target_raid_configuration for node %(node_id)s. "
"'physical_disks' is missing from logical_disk while "
"'size_gb'='MAX' was requested: "
"%(logical_disk)s") % {'node_id': node.uuid,
'logical_disk': disk})
if disk['size_gb'] == 'MAX':
disk['size_mb'] = 'MAX'
else:
disk['size_mb'] = disk['size_gb'] * units.Ki
del disk['size_gb']
if delete_existing:
self._delete_configuration_no_commit(task)
physical_disks = list_physical_disks(node)
logical_disks = _find_configuration(logical_disks, physical_disks,
pending_delete=delete_existing)
logical_disks_to_create = _filter_logical_disks(
logical_disks, create_root_volume, create_nonroot_volumes)
controllers_to_physical_disk_ids = defaultdict(list)
for logical_disk in logical_disks_to_create:
# Not applicable to JBOD logical disks.
if logical_disk['raid_level'] == 'JBOD':
continue
for physical_disk_name in logical_disk['physical_disks']:
controllers_to_physical_disk_ids[
logical_disk['controller']].append(
physical_disk_name)
# adding logical_disks to driver_internal_info to create virtual disks
driver_internal_info = node.driver_internal_info
driver_internal_info[
"logical_disks_to_create"] = logical_disks_to_create
commit_results = None
if logical_disks_to_create:
LOG.debug(
"Converting physical disks configured to back RAID "
"logical disks to RAID mode for node %(node_uuid)s ",
{"node_uuid": node.uuid})
raid_mode = drac_constants.RaidStatus.raid
commit_results = _change_physical_disk_mode(
node, raid_mode,
controllers_to_physical_disk_ids,
substep="create_virtual_disks")
volume_validation = True if commit_results else False
driver_internal_info['volume_validation'] = volume_validation
node.driver_internal_info = driver_internal_info
node.save()
if commit_results:
return commit_results
else:
LOG.debug("Controller does not support drives conversion "
"so creating virtual disks")
return _create_virtual_disks(task, node)
@METRICS.timer('DracRAID.delete_configuration')
@base.clean_step(priority=0)
@base.deploy_step(priority=0)
def delete_configuration(self, task):
"""Delete the RAID configuration.
:param task: a TaskManager instance containing the node to act on.
:returns: states.CLEANWAIT (cleaning) or states.DEPLOYWAIT (deployment)
if deletion is in progress asynchronously or None if it is
completed.
:raises: DracOperationError on an error from python-dracclient.
"""
controllers = self._delete_configuration_no_commit(task)
return _commit_to_controllers(task.node, controllers,
substep="delete_foreign_config")
@METRICS.timer('DracRAID.get_logical_disks')
def get_logical_disks(self, task):
"""Get the RAID configuration of the node.
:param task: a TaskManager instance containing the node to act on.
:returns: A dictionary of properties.
:raises: DracOperationError on an error from python-dracclient.
"""
node = task.node
logical_disks = []
for disk in list_virtual_disks(node):
logical_disk = {
'id': disk.id,
'controller': disk.controller,
'size_gb': int(disk.size_mb / units.Ki),
'raid_level': disk.raid_level
}
if disk.name is not None:
logical_disk['name'] = disk.name
logical_disks.append(logical_disk)
return {'logical_disks': logical_disks}
@METRICS.timer('DracRAID._query_raid_config_job_status')
@periodics.periodic(
spacing=CONF.drac.query_raid_config_job_status_interval)
def _query_raid_config_job_status(self, manager, context):
"""Periodic task to check the progress of running RAID config jobs."""
filters = {'reserved': False, 'maintenance': False}
fields = ['driver_internal_info']
node_list = manager.iter_nodes(fields=fields, filters=filters)
for (node_uuid, driver, conductor_group,
driver_internal_info) in node_list:
try:
lock_purpose = 'checking async raid configuration jobs'
with task_manager.acquire(context, node_uuid,
purpose=lock_purpose,
shared=True) as task:
if not isinstance(task.driver.raid, DracWSManRAID):
continue
job_ids = driver_internal_info.get('raid_config_job_ids')
if not job_ids:
continue
self._check_node_raid_jobs(task)
except exception.NodeNotFound:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was not found and presumed deleted by "
"another process.", {'node': node_uuid})
except exception.NodeLocked:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was already locked by another process. "
"Skip.", {'node': node_uuid})
@METRICS.timer('DracRAID._check_node_raid_jobs')
def _check_node_raid_jobs(self, task):
"""Check the progress of running RAID config jobs of a node."""
node = task.node
raid_config_job_ids = node.driver_internal_info['raid_config_job_ids']
finished_job_ids = []
for config_job_id in raid_config_job_ids:
config_job = drac_job.get_job(node, job_id=config_job_id)
if config_job is None or config_job.status == 'Completed':
finished_job_ids.append(config_job_id)
elif config_job.status == 'Failed':
finished_job_ids.append(config_job_id)
self._set_raid_config_job_failure(node)
if not finished_job_ids:
return
task.upgrade_lock()
self._delete_cached_config_job_id(node, finished_job_ids)
if not node.driver_internal_info.get('raid_config_job_failure',
False):
if 'raid_config_substep' in node.driver_internal_info:
substep = node.driver_internal_info['raid_config_substep']
if substep == 'delete_foreign_config':
foreign_drives = self._execute_foreign_drives(task, node)
if foreign_drives is None:
return self._convert_drives(task, node)
elif substep == 'physical_disk_conversion':
self._convert_drives(task, node)
elif substep == "create_virtual_disks":
return _create_virtual_disks(task, node)
elif substep == 'completed':
self._complete_raid_substep(task, node)
else:
self._complete_raid_substep(task, node)
else:
self._clear_raid_substep(node)
self._clear_raid_config_job_failure(node)
self._set_failed(task, config_job)
def _execute_foreign_drives(self, task, node):
controllers = list()
jobs_required = False
for controller_id in node.driver_internal_info[
'raid_config_parameters']:
controller_cap = clear_foreign_config(
node, controller_id)
controller = {
'raid_controller': controller_id,
'is_reboot_required': controller_cap['is_reboot_required'],
'is_commit_required': controller_cap['is_commit_required']}
controllers.append(controller)
jobs_required = jobs_required or controller_cap[
'is_commit_required']
if not jobs_required:
LOG.info(
"No foreign drives detected, so "
"resume %s", "cleaning" if node.clean_step else "deployment")
return None
else:
return _commit_to_controllers(
node,
controllers,
substep='physical_disk_conversion')
def _complete_raid_substep(self, task, node):
self._clear_raid_substep(node)
self._resume(task)
def _convert_drives(self, task, node):
jbod = drac_constants.RaidStatus.jbod
drives_results = _change_physical_disk_mode(
node, mode=jbod)
if drives_results is None:
LOG.debug("Controller does not support drives "
"conversion on %(node_uuid)s",
{'node_uuid': node.uuid})
self._complete_raid_substep(task, node)
def _clear_raid_substep(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info.pop('raid_config_substep', None)
driver_internal_info.pop('raid_config_parameters', None)
node.driver_internal_info = driver_internal_info
node.save()
def _set_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_job_failure'] = True
node.driver_internal_info = driver_internal_info
node.save()
def _clear_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
del driver_internal_info['raid_config_job_failure']
node.driver_internal_info = driver_internal_info
node.save()
def _delete_cached_config_job_id(self, node, finished_config_job_ids=None):
if finished_config_job_ids is None:
finished_config_job_ids = []
driver_internal_info = node.driver_internal_info
unfinished_job_ids = [job_id for job_id
in driver_internal_info['raid_config_job_ids']
if job_id not in finished_config_job_ids]
driver_internal_info['raid_config_job_ids'] = unfinished_job_ids
node.driver_internal_info = driver_internal_info
node.save()
def _set_failed(self, task, config_job):
error_msg = (_("Failed config job: %(config_job_id)s. "
"Message: '%(message)s'.") %
{'config_job_id': config_job.id,
'message': config_job.message})
log_msg = ("RAID configuration job failed for node %(node)s. "
"%(error)s" %
{'node': task.node.uuid, 'error': error_msg})
if task.node.clean_step:
manager_utils.cleaning_error_handler(task, error_msg)
else:
manager_utils.deploying_error_handler(task, log_msg, error_msg)
def _resume(self, task):
raid_common.update_raid_info(
task.node, self.get_logical_disks(task))
if task.node.clean_step:
manager_utils.notify_conductor_resume_clean(task)
else:
manager_utils.notify_conductor_resume_deploy(task)
def _delete_configuration_no_commit(self, task):
"""Delete existing RAID configuration without committing the change.
:param task: A TaskManager instance.
:returns: A set of names of RAID controllers which need RAID changes to
be committed.
"""
node = task.node
controllers = list()
drac_raid_controllers = list_raid_controllers(node)
drac_raid_settings = list_raid_settings(node)
for cntrl in drac_raid_controllers:
if _is_raid_controller(node, cntrl.id, drac_raid_controllers):
controller = dict()
if _controller_supports_ehba_mode(
drac_raid_settings,
cntrl.id) and _controller_in_hba_mode(
drac_raid_settings, cntrl.id):
controller['is_ehba_mode'] = True
controller_cap = _reset_raid_config(node, cntrl.id)
controller["raid_controller"] = cntrl.id
controller["is_reboot_required"] = controller_cap[
"is_reboot_required"]
controller["is_commit_required"] = controller_cap[
"is_commit_required"]
controllers.append(controller)
return controllers
class DracRAID(DracWSManRAID):
"""Class alias of class DracWSManRAID.
This class provides ongoing support of the deprecated 'idrac' RAID
interface implementation entrypoint.
All bug fixes and new features should be implemented in its base
class, DracWSManRAID. That makes them available to both the
deprecated 'idrac' and new 'idrac-wsman' entrypoints. Such changes
should not be made to this class.
"""
def __init__(self):
super(DracRAID, self).__init__()
LOG.warning("RAID interface 'idrac' is deprecated and may be removed "
"in a future release. Use 'idrac-wsman' instead.")
| 40.926876 | 79 | 0.635797 |
from collections import defaultdict
import math
from futurist import periodics
from ironic_lib import metrics_utils
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common import raid as raid_common
from ironic.conductor import task_manager
from ironic.conductor import utils as manager_utils
from ironic.conf import CONF
from ironic.drivers import base
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules.drac import common as drac_common
from ironic.drivers.modules.drac import job as drac_job
drac_exceptions = importutils.try_import('dracclient.exceptions')
drac_constants = importutils.try_import('dracclient.constants')
LOG = logging.getLogger(__name__)
METRICS = metrics_utils.get_metrics_logger(__name__)
_CURRENT_RAID_CONTROLLER_MODE = "RAIDCurrentControllerMode"
_REQUESTED_RAID_CONTROLLER_MODE = "RAIDRequestedControllerMode"
_EHBA_MODE = "Enhanced HBA"
_RAID_MODE = "RAID"
RAID_LEVELS = {
'0': {
'min_disks': 1,
'max_disks': 1000,
'type': 'simple',
'overhead': 0
},
'1': {
'min_disks': 2,
'max_disks': 2,
'type': 'simple',
'overhead': 1
},
'5': {
'min_disks': 3,
'max_disks': 1000,
'type': 'simple',
'overhead': 1
},
'6': {
'min_disks': 4,
'max_disks': 1000,
'type': 'simple',
'overhead': 2
},
'1+0': {
'type': 'spanned',
'span_type': '1'
},
'5+0': {
'type': 'spanned',
'span_type': '5'
},
'6+0': {
'type': 'spanned',
'span_type': '6'
}
}
def list_raid_controllers(node):
client = drac_common.get_drac_client(node)
try:
return client.list_raid_controllers()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of RAID controllers '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_virtual_disks(node):
client = drac_common.get_drac_client(node)
try:
return client.list_virtual_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of virtual disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def list_physical_disks(node):
client = drac_common.get_drac_client(node)
try:
return client.list_physical_disks()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to get the list of physical disks '
'for node %(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _is_raid_controller(node, raid_controller_fqdd, raid_controllers=None):
client = drac_common.get_drac_client(node)
try:
return client.is_raid_controller(raid_controller_fqdd,
raid_controllers)
except drac_exceptions.BaseClientException as exc:
LOG.error('Unable to determine if controller %(raid_controller_fqdd)s '
'on node %(node_uuid)s is a RAID controller. '
'Reason: %(error)s. ',
{'raid_controller_fqdd': raid_controller_fqdd,
'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def _validate_job_queue(node, raid_controller=None):
kwargs = {}
if raid_controller:
kwargs["name_prefix"] = "Config:RAID:%s" % raid_controller
drac_job.validate_job_queue(node, **kwargs)
def create_virtual_disk(node, raid_controller, physical_disks, raid_level,
size_mb, disk_name=None, span_length=None,
span_depth=None):
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
try:
return client.create_virtual_disk(raid_controller, physical_disks,
raid_level, size_mb, disk_name,
span_length, span_depth)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to create virtual disk for node '
'%(node_uuid)s. Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def delete_virtual_disk(node, virtual_disk):
_validate_job_queue(node)
client = drac_common.get_drac_client(node)
try:
return client.delete_virtual_disk(virtual_disk)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete virtual disk '
'%(virtual_disk_fqdd)s for node %(node_uuid)s. '
'Reason: %(error)s.',
{'virtual_disk_fqdd': virtual_disk,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _reset_raid_config(node, raid_controller):
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.reset_raid_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete all virtual disk '
'and unassign all hotspares '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def clear_foreign_config(node, raid_controller):
try:
_validate_job_queue(node, raid_controller)
client = drac_common.get_drac_client(node)
return client.clear_foreign_config(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to free foreign driver '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def set_raid_settings(node, controller_fqdd, settings):
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.set_raid_settings(controller_fqdd, settings)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to set raid settings '
'on %(raid_controller_fqdd)s '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'raid_controller_fqdd': controller_fqdd,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def list_raid_settings(node):
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.list_raid_settings()
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to list raid settings '
'for node %(node_uuid)s. '
'Reason: %(error)s.',
{'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def change_physical_disk_state(node, mode=None,
controllers_to_physical_disk_ids=None):
try:
drac_job.validate_job_queue(node)
client = drac_common.get_drac_client(node)
return client.change_physical_disk_state(
mode, controllers_to_physical_disk_ids)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to change physical drives '
'to %(mode)s mode for node %(node_uuid)s. '
'Reason: %(error)s.',
{'mode': mode, 'node_uuid': node.uuid, 'error': exc})
raise exception.DracOperationError(error=exc)
def commit_config(node, raid_controller, reboot=False, realtime=False):
client = drac_common.get_drac_client(node)
try:
return client.commit_pending_raid_changes(
raid_controller=raid_controller,
reboot=reboot,
realtime=realtime)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to commit pending RAID config for'
' controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _change_physical_disk_mode(node, mode=None,
controllers_to_physical_disk_ids=None,
substep="completed"):
change_disk_state = change_physical_disk_state(
node, mode, controllers_to_physical_disk_ids)
controllers = list()
conversion_results = change_disk_state['conversion_results']
for controller_id, result in conversion_results.items():
controller = {'raid_controller': controller_id,
'is_reboot_required': result['is_reboot_required'],
'is_commit_required': result['is_commit_required']}
controllers.append(controller)
return _commit_to_controllers(
node,
controllers, substep=substep)
def abandon_config(node, raid_controller):
client = drac_common.get_drac_client(node)
try:
client.abandon_pending_raid_changes(raid_controller)
except drac_exceptions.BaseClientException as exc:
LOG.error('DRAC driver failed to delete pending RAID config '
'for controller %(raid_controller_fqdd)s on node '
'%(node_uuid)s. Reason: %(error)s.',
{'raid_controller_fqdd': raid_controller,
'node_uuid': node.uuid,
'error': exc})
raise exception.DracOperationError(error=exc)
def _calculate_spans(raid_level, disks_count):
if raid_level in ['0', '1', '5', '6']:
return 1
elif raid_level in ['5+0', '6+0']:
return 2
elif raid_level in ['1+0']:
return disks_count >> 1
else:
reason = (_('Cannot calculate spans for RAID level "%s"') %
raid_level)
raise exception.DracOperationError(error=reason)
def _usable_disks_count(raid_level, disks_count):
if raid_level in ['0', '1', '5', '6']:
return disks_count
elif raid_level in ['5+0', '6+0', '1+0']:
# largest even number less than disk_count
return (disks_count >> 1) << 1
else:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
def _raid_level_min_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['min_disks'] * spans_count
def _raid_level_max_disks(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['max_disks'] * spans_count
def _raid_level_overhead(raid_level, spans_count=1):
try:
raid_level_info = RAID_LEVELS[raid_level]
except KeyError:
reason = (_('RAID level %(raid_level)s is not supported by the '
'driver. Supported RAID levels: %(supported_raid_levels)s')
% {'raid_level': raid_level,
'supported_raid_levels': list(RAID_LEVELS)})
raise exception.DracOperationError(error=reason)
if raid_level_info['type'] == 'spanned':
if spans_count <= 1:
reason = _('Spanned RAID volumes cannot contain a single span')
raise exception.DracOperationError(error=reason)
span_type = raid_level_info['span_type']
raid_level_info = RAID_LEVELS[span_type]
return raid_level_info['overhead'] * spans_count
def _max_volume_size_mb(raid_level, physical_disks, free_space_mb,
spans_count=1, stripe_size_kb=64 * units.Ki):
# restrict the size to the smallest available space
free_spaces = [free_space_mb[disk] for disk in physical_disks]
size_kb = min(free_spaces) * units.Ki
# NOTE(ifarkas): using math.floor so we get a volume size that does not
# exceed the available space
stripes_per_disk = int(math.floor(float(size_kb) / stripe_size_kb))
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(raid_level, spans_count)
return int(stripes_per_disk * stripe_size_kb
* (disks_count - overhead_disks_count) / units.Ki)
def _volume_usage_per_disk_mb(logical_disk, physical_disks, spans_count=1,
stripe_size_kb=64 * units.Ki):
disks_count = len(physical_disks)
overhead_disks_count = _raid_level_overhead(logical_disk['raid_level'],
spans_count)
volume_size_kb = logical_disk['size_mb'] * units.Ki
# NOTE(ifarkas): using math.ceil so we get the largest disk usage
# possible, so we can avoid over-committing
stripes_per_volume = math.ceil(float(volume_size_kb) / stripe_size_kb)
stripes_per_disk = math.ceil(
float(stripes_per_volume) / (disks_count - overhead_disks_count))
return int(stripes_per_disk * stripe_size_kb / units.Ki)
def _find_configuration(logical_disks, physical_disks, pending_delete):
# shared physical disks of RAID volumes size_gb='MAX' should be
# deprioritized during the matching process to reserve as much space as
# possible. Reserved means it won't be used during matching.
volumes_with_reserved_physical_disks = [
volume for volume in logical_disks
if ('physical_disks' in volume and volume['size_mb'] == 'MAX'
and volume.get('share_physical_disks', False))]
reserved_physical_disks = [
disk for disk in physical_disks
for volume in volumes_with_reserved_physical_disks
if disk.id in volume['physical_disks']]
physical_disks_by_type = {}
reserved_physical_disks_by_type = {}
free_space_mb = {}
for disk in physical_disks:
free_space_mb[disk] = _get_disk_free_size_mb(disk, pending_delete)
disk_type = (disk.controller, disk.media_type, disk.interface_type,
disk.size_mb)
if disk_type not in physical_disks_by_type:
physical_disks_by_type[disk_type] = []
reserved_physical_disks_by_type[disk_type] = []
if disk in reserved_physical_disks:
reserved_physical_disks_by_type[disk_type].append(disk)
else:
physical_disks_by_type[disk_type].append(disk)
for volume in logical_disks:
if ('physical_disks' in volume
and not volume.get('share_physical_disks', False)):
for disk in physical_disks:
if disk.id in volume['physical_disks']:
disk_type = (disk.controller, disk.media_type,
disk.interface_type, disk.size_mb)
if disk in physical_disks_by_type[disk_type]:
physical_disks_by_type[disk_type].remove(disk)
processed_volumes = []
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] != 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
volumes_without_disks = [disk for disk in logical_disks
if 'physical_disks' not in disk]
if volumes_without_disks:
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type, free_space_mb,
pending_delete))
if not result:
for disk_type, disks in physical_disks_by_type.items():
physical_disks_by_type[disk_type] += (
reserved_physical_disks_by_type[disk_type])
result, free_space_mb = (
_assign_disks_to_volume(volumes_without_disks,
physical_disks_by_type,
free_space_mb,
pending_delete))
if not result:
error_msg = _('failed to find matching physical disks for all '
'logical disks')
LOG.error('DRAC driver failed to create RAID '
'configuration. Reason: %(error)s.',
{'error': error_msg})
raise exception.DracOperationError(error=error_msg)
processed_volumes += volumes_without_disks
for volume in [volume for volume in logical_disks
if ('physical_disks' in volume
and volume['size_mb'] == 'MAX')]:
_calculate_volume_props(volume, physical_disks, free_space_mb)
processed_volumes.append(volume)
return processed_volumes
def _calculate_volume_props(logical_disk, physical_disks, free_space_mb):
selected_disks = [disk for disk in physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
if len(selected_disks) % spans_count != 0:
error_msg = _('invalid number of physical disks was provided')
raise exception.DracOperationError(error=error_msg)
disks_per_span = int(len(selected_disks) / spans_count)
logical_disk['span_depth'] = None
logical_disk['span_length'] = None
if logical_disk['raid_level'] != '1+0':
logical_disk['span_depth'] = spans_count
logical_disk['span_length'] = disks_per_span
max_volume_size_mb = _max_volume_size_mb(
logical_disk['raid_level'], selected_disks, free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] == 'MAX':
if max_volume_size_mb == 0:
error_msg = _("size set to 'MAX' but could not allocate physical "
"disk space")
raise exception.DracOperationError(error=error_msg)
logical_disk['size_mb'] = max_volume_size_mb
elif max_volume_size_mb < logical_disk['size_mb']:
if max_volume_size_mb == 0:
error_msg = _('not enough physical disk space for the logical '
'disk')
raise exception.DracOperationError(error=error_msg)
disk_usage = _volume_usage_per_disk_mb(logical_disk, selected_disks,
spans_count=spans_count)
for disk in selected_disks:
if free_space_mb[disk] < disk_usage:
error_msg = _('not enough free space on physical disks for the '
'logical disk')
raise exception.DracOperationError(error=error_msg)
else:
free_space_mb[disk] -= disk_usage
if 'controller' not in logical_disk:
logical_disk['controller'] = selected_disks[0].controller
def _assign_disks_to_volume(logical_disks, physical_disks_by_type,
free_space_mb, pending_delete):
logical_disk = logical_disks.pop(0)
raid_level = logical_disk['raid_level']
for (controller, disk_type,
interface_type, size_mb), disks in physical_disks_by_type.items():
if ('disk_type' in logical_disk
and logical_disk['disk_type'] != disk_type):
continue
if ('interface_type' in logical_disk
and logical_disk['interface_type'] != interface_type):
continue
disks = [disk for disk in disks if free_space_mb[disk] > 0]
disks = sorted(
disks,
key=lambda disk: free_space_mb[disk])
if ('share_physical_disks' not in logical_disk
or not logical_disk['share_physical_disks']):
initial_free_size_mb = {
disk: _get_disk_free_size_mb(disk, pending_delete)
for disk in disks
}
disks = [disk for disk in disks
if initial_free_size_mb[disk] == free_space_mb[disk]]
max_spans = _calculate_spans(raid_level, len(disks))
min_spans = min([2, max_spans])
min_disks = _raid_level_min_disks(raid_level,
spans_count=min_spans)
max_disks = _raid_level_max_disks(raid_level,
spans_count=max_spans)
candidate_max_disks = min([max_disks, len(disks)])
for disks_count in range(min_disks, candidate_max_disks + 1):
if ('number_of_physical_disks' in logical_disk
and (logical_disk['number_of_physical_disks']
!= disks_count)):
continue
if disks_count != _usable_disks_count(logical_disk['raid_level'],
disks_count):
continue
selected_disks = disks[0:disks_count]
candidate_volume = logical_disk.copy()
candidate_free_space_mb = free_space_mb.copy()
candidate_volume['physical_disks'] = [disk.id for disk
in selected_disks]
try:
_calculate_volume_props(candidate_volume, selected_disks,
candidate_free_space_mb)
except exception.DracOperationError:
continue
if len(logical_disks) > 0:
result, candidate_free_space_mb = (
_assign_disks_to_volume(logical_disks,
physical_disks_by_type,
candidate_free_space_mb,
pending_delete))
if result:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
logical_disks.append(candidate_volume)
return (True, candidate_free_space_mb)
else:
logical_disks.insert(0, logical_disk)
return (False, free_space_mb)
def _filter_logical_disks(logical_disks, include_root_volume,
include_nonroot_volumes):
filtered_disks = []
for disk in logical_disks:
if include_root_volume and disk.get('is_root_volume'):
filtered_disks.append(disk)
if include_nonroot_volumes and not disk.get('is_root_volume'):
filtered_disks.append(disk)
return filtered_disks
def _create_config_job(node, controller, reboot=False, realtime=False,
raid_config_job_ids=[],
raid_config_parameters=[]):
job_id = commit_config(node, raid_controller=controller,
reboot=reboot, realtime=realtime)
raid_config_job_ids.append(job_id)
if controller not in raid_config_parameters:
raid_config_parameters.append(controller)
LOG.info('Change has been committed to RAID controller '
'%(controller)s on node %(node)s. '
'DRAC job id: %(job_id)s',
{'controller': controller, 'node': node.uuid,
'job_id': job_id})
return {'raid_config_job_ids': raid_config_job_ids,
'raid_config_parameters': raid_config_parameters}
def _validate_volume_size(node, logical_disks):
new_physical_disks = list_physical_disks(node)
free_space_mb = {}
new_processed_volumes = []
for disk in new_physical_disks:
free_space_mb[disk] = disk.free_size_mb
for logical_disk in logical_disks:
selected_disks = [disk for disk in new_physical_disks
if disk.id in logical_disk['physical_disks']]
spans_count = _calculate_spans(
logical_disk['raid_level'], len(selected_disks))
new_max_vol_size_mb = _max_volume_size_mb(
logical_disk['raid_level'],
selected_disks,
free_space_mb,
spans_count=spans_count)
if logical_disk['size_mb'] > new_max_vol_size_mb:
logical_disk['size_mb'] = new_max_vol_size_mb
LOG.info("Logical size does not match so calculating volume "
"properties for current logical_disk")
_calculate_volume_props(
logical_disk, new_physical_disks, free_space_mb)
new_processed_volumes.append(logical_disk)
if new_processed_volumes:
return new_processed_volumes
return logical_disks
def _switch_to_raid_mode(node, controller_fqdd):
drac_job.wait_for_job_completion(node)
raid_attr = "{}:{}".format(controller_fqdd,
_REQUESTED_RAID_CONTROLLER_MODE)
settings = {raid_attr: _RAID_MODE}
settings_results = set_raid_settings(
node, controller_fqdd, settings)
controller = {
'raid_controller': controller_fqdd,
'is_reboot_required': settings_results['is_reboot_required'],
'is_commit_required': settings_results['is_commit_required']}
return controller
def _commit_to_controllers(node, controllers, substep="completed"):
controllers = [controller for controller in controllers
if controller['is_commit_required']]
if not controllers:
LOG.debug('No changes on any of the controllers on node %s',
node.uuid)
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
node.driver_internal_info = driver_internal_info
node.save()
return
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_substep'] = substep
driver_internal_info['raid_config_parameters'] = []
if 'raid_config_job_ids' not in driver_internal_info:
driver_internal_info['raid_config_job_ids'] = []
optional = drac_constants.RebootRequired.optional
all_realtime = all(
(cntlr['is_reboot_required'] == optional)
and not(cntlr.get('is_ehba_mode'))
for cntlr in controllers)
any_ehba_controllers = any(
cntrl.get('is_ehba_mode') is True for cntrl in controllers)
raid_config_job_ids = []
raid_config_parameters = []
if all_realtime:
for controller in controllers:
realtime_controller = controller['raid_controller']
job_details = _create_config_job(
node, controller=realtime_controller,
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
elif any_ehba_controllers:
commit_to_ehba_controllers = []
for controller in controllers:
if controller.get('is_ehba_mode'):
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=True,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
ehba_controller = _switch_to_raid_mode(
node, controller['raid_controller'])
commit_to_ehba_controllers.append(
ehba_controller['raid_controller'])
else:
job_details = _create_config_job(
node, controller=controller['raid_controller'],
reboot=False, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
for controller in commit_to_ehba_controllers:
LOG.debug("Create job with Reboot to apply configuration "
"changes for ehba controllers")
job_details = _create_config_job(
node, controller=controller,
reboot=(controller == commit_to_ehba_controllers[-1]),
realtime=False, raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
else:
for controller in controllers:
mix_controller = controller['raid_controller']
reboot = (controller == controllers[-1])
job_details = _create_config_job(
node, controller=mix_controller,
reboot=reboot, realtime=False,
raid_config_job_ids=raid_config_job_ids,
raid_config_parameters=raid_config_parameters)
driver_internal_info['raid_config_job_ids'].extend(job_details[
'raid_config_job_ids'])
driver_internal_info['raid_config_parameters'].extend(job_details[
'raid_config_parameters'])
node.driver_internal_info = driver_internal_info
deploy_utils.set_async_step_flags(
node,
reboot=not all_realtime,
skip_current_step=True,
polling=True)
return deploy_utils.get_async_step_return_state(node)
def _create_virtual_disks(task, node):
logical_disks_to_create = node.driver_internal_info[
'logical_disks_to_create']
isVolValidationNeeded = node.driver_internal_info[
'volume_validation']
if isVolValidationNeeded:
logical_disks_to_create = _validate_volume_size(
node, logical_disks_to_create)
controllers = list()
for logical_disk in logical_disks_to_create:
controller = dict()
controller_cap = create_virtual_disk(
node,
raid_controller=logical_disk['controller'],
physical_disks=logical_disk['physical_disks'],
raid_level=logical_disk['raid_level'],
size_mb=logical_disk['size_mb'],
disk_name=logical_disk.get('name'),
span_length=logical_disk.get('span_length'),
span_depth=logical_disk.get('span_depth'))
controller['raid_controller'] = logical_disk['controller']
controller['is_reboot_required'] = controller_cap[
'is_reboot_required']
controller['is_commit_required'] = controller_cap[
'is_commit_required']
if controller not in controllers:
controllers.append(controller)
return _commit_to_controllers(node, controllers)
def _controller_in_hba_mode(raid_settings, controller_fqdd):
controller_mode = raid_settings.get(
'{}:{}'.format(controller_fqdd, _CURRENT_RAID_CONTROLLER_MODE))
return _EHBA_MODE in controller_mode.current_value
def _controller_supports_ehba_mode(settings, controller_fqdd):
raid_cntrl_attr = "{}:{}".format(controller_fqdd,
_CURRENT_RAID_CONTROLLER_MODE)
current_cntrl_mode = settings.get(raid_cntrl_attr)
if not current_cntrl_mode:
return False
else:
return _EHBA_MODE in current_cntrl_mode.possible_values
def _get_disk_free_size_mb(disk, pending_delete):
return disk.size_mb if pending_delete else disk.free_size_mb
class DracWSManRAID(base.RAIDInterface):
def get_properties(self):
return drac_common.COMMON_PROPERTIES
@base.deploy_step(priority=0,
argsinfo=base.RAID_APPLY_CONFIGURATION_ARGSINFO)
def apply_configuration(self, task, raid_config, create_root_volume=True,
create_nonroot_volumes=False,
delete_existing=True):
return super(DracWSManRAID, self).apply_configuration(
task, raid_config, create_root_volume=create_root_volume,
create_nonroot_volumes=create_nonroot_volumes,
delete_existing=delete_existing)
@METRICS.timer('DracRAID.create_configuration')
@base.clean_step(priority=0, abortable=False, argsinfo={
'create_root_volume': {
'description': (
'This specifies whether to create the root volume. '
'Defaults to `True`.'
),
'required': False
},
'create_nonroot_volumes': {
'description': (
'This specifies whether to create the non-root volumes. '
'Defaults to `True`.'
),
'required': False
},
"delete_existing": {
"description": (
"Setting this to 'True' indicates to delete existing RAID "
"configuration prior to creating the new configuration. "
"Default value is 'False'."
),
"required": False,
}
})
def create_configuration(self, task,
create_root_volume=True,
create_nonroot_volumes=True,
delete_existing=False):
node = task.node
logical_disks = node.target_raid_config['logical_disks']
for disk in logical_disks:
if disk['size_gb'] == 'MAX' and 'physical_disks' not in disk:
raise exception.InvalidParameterValue(
_("create_configuration called with invalid "
"target_raid_configuration for node %(node_id)s. "
"'physical_disks' is missing from logical_disk while "
"'size_gb'='MAX' was requested: "
"%(logical_disk)s") % {'node_id': node.uuid,
'logical_disk': disk})
if disk['size_gb'] == 'MAX':
disk['size_mb'] = 'MAX'
else:
disk['size_mb'] = disk['size_gb'] * units.Ki
del disk['size_gb']
if delete_existing:
self._delete_configuration_no_commit(task)
physical_disks = list_physical_disks(node)
logical_disks = _find_configuration(logical_disks, physical_disks,
pending_delete=delete_existing)
logical_disks_to_create = _filter_logical_disks(
logical_disks, create_root_volume, create_nonroot_volumes)
controllers_to_physical_disk_ids = defaultdict(list)
for logical_disk in logical_disks_to_create:
if logical_disk['raid_level'] == 'JBOD':
continue
for physical_disk_name in logical_disk['physical_disks']:
controllers_to_physical_disk_ids[
logical_disk['controller']].append(
physical_disk_name)
driver_internal_info = node.driver_internal_info
driver_internal_info[
"logical_disks_to_create"] = logical_disks_to_create
commit_results = None
if logical_disks_to_create:
LOG.debug(
"Converting physical disks configured to back RAID "
"logical disks to RAID mode for node %(node_uuid)s ",
{"node_uuid": node.uuid})
raid_mode = drac_constants.RaidStatus.raid
commit_results = _change_physical_disk_mode(
node, raid_mode,
controllers_to_physical_disk_ids,
substep="create_virtual_disks")
volume_validation = True if commit_results else False
driver_internal_info['volume_validation'] = volume_validation
node.driver_internal_info = driver_internal_info
node.save()
if commit_results:
return commit_results
else:
LOG.debug("Controller does not support drives conversion "
"so creating virtual disks")
return _create_virtual_disks(task, node)
@METRICS.timer('DracRAID.delete_configuration')
@base.clean_step(priority=0)
@base.deploy_step(priority=0)
def delete_configuration(self, task):
controllers = self._delete_configuration_no_commit(task)
return _commit_to_controllers(task.node, controllers,
substep="delete_foreign_config")
@METRICS.timer('DracRAID.get_logical_disks')
def get_logical_disks(self, task):
node = task.node
logical_disks = []
for disk in list_virtual_disks(node):
logical_disk = {
'id': disk.id,
'controller': disk.controller,
'size_gb': int(disk.size_mb / units.Ki),
'raid_level': disk.raid_level
}
if disk.name is not None:
logical_disk['name'] = disk.name
logical_disks.append(logical_disk)
return {'logical_disks': logical_disks}
@METRICS.timer('DracRAID._query_raid_config_job_status')
@periodics.periodic(
spacing=CONF.drac.query_raid_config_job_status_interval)
def _query_raid_config_job_status(self, manager, context):
filters = {'reserved': False, 'maintenance': False}
fields = ['driver_internal_info']
node_list = manager.iter_nodes(fields=fields, filters=filters)
for (node_uuid, driver, conductor_group,
driver_internal_info) in node_list:
try:
lock_purpose = 'checking async raid configuration jobs'
with task_manager.acquire(context, node_uuid,
purpose=lock_purpose,
shared=True) as task:
if not isinstance(task.driver.raid, DracWSManRAID):
continue
job_ids = driver_internal_info.get('raid_config_job_ids')
if not job_ids:
continue
self._check_node_raid_jobs(task)
except exception.NodeNotFound:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was not found and presumed deleted by "
"another process.", {'node': node_uuid})
except exception.NodeLocked:
LOG.info("During query_raid_config_job_status, node "
"%(node)s was already locked by another process. "
"Skip.", {'node': node_uuid})
@METRICS.timer('DracRAID._check_node_raid_jobs')
def _check_node_raid_jobs(self, task):
node = task.node
raid_config_job_ids = node.driver_internal_info['raid_config_job_ids']
finished_job_ids = []
for config_job_id in raid_config_job_ids:
config_job = drac_job.get_job(node, job_id=config_job_id)
if config_job is None or config_job.status == 'Completed':
finished_job_ids.append(config_job_id)
elif config_job.status == 'Failed':
finished_job_ids.append(config_job_id)
self._set_raid_config_job_failure(node)
if not finished_job_ids:
return
task.upgrade_lock()
self._delete_cached_config_job_id(node, finished_job_ids)
if not node.driver_internal_info.get('raid_config_job_failure',
False):
if 'raid_config_substep' in node.driver_internal_info:
substep = node.driver_internal_info['raid_config_substep']
if substep == 'delete_foreign_config':
foreign_drives = self._execute_foreign_drives(task, node)
if foreign_drives is None:
return self._convert_drives(task, node)
elif substep == 'physical_disk_conversion':
self._convert_drives(task, node)
elif substep == "create_virtual_disks":
return _create_virtual_disks(task, node)
elif substep == 'completed':
self._complete_raid_substep(task, node)
else:
self._complete_raid_substep(task, node)
else:
self._clear_raid_substep(node)
self._clear_raid_config_job_failure(node)
self._set_failed(task, config_job)
def _execute_foreign_drives(self, task, node):
controllers = list()
jobs_required = False
for controller_id in node.driver_internal_info[
'raid_config_parameters']:
controller_cap = clear_foreign_config(
node, controller_id)
controller = {
'raid_controller': controller_id,
'is_reboot_required': controller_cap['is_reboot_required'],
'is_commit_required': controller_cap['is_commit_required']}
controllers.append(controller)
jobs_required = jobs_required or controller_cap[
'is_commit_required']
if not jobs_required:
LOG.info(
"No foreign drives detected, so "
"resume %s", "cleaning" if node.clean_step else "deployment")
return None
else:
return _commit_to_controllers(
node,
controllers,
substep='physical_disk_conversion')
def _complete_raid_substep(self, task, node):
self._clear_raid_substep(node)
self._resume(task)
def _convert_drives(self, task, node):
jbod = drac_constants.RaidStatus.jbod
drives_results = _change_physical_disk_mode(
node, mode=jbod)
if drives_results is None:
LOG.debug("Controller does not support drives "
"conversion on %(node_uuid)s",
{'node_uuid': node.uuid})
self._complete_raid_substep(task, node)
def _clear_raid_substep(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info.pop('raid_config_substep', None)
driver_internal_info.pop('raid_config_parameters', None)
node.driver_internal_info = driver_internal_info
node.save()
def _set_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
driver_internal_info['raid_config_job_failure'] = True
node.driver_internal_info = driver_internal_info
node.save()
def _clear_raid_config_job_failure(self, node):
driver_internal_info = node.driver_internal_info
del driver_internal_info['raid_config_job_failure']
node.driver_internal_info = driver_internal_info
node.save()
def _delete_cached_config_job_id(self, node, finished_config_job_ids=None):
if finished_config_job_ids is None:
finished_config_job_ids = []
driver_internal_info = node.driver_internal_info
unfinished_job_ids = [job_id for job_id
in driver_internal_info['raid_config_job_ids']
if job_id not in finished_config_job_ids]
driver_internal_info['raid_config_job_ids'] = unfinished_job_ids
node.driver_internal_info = driver_internal_info
node.save()
def _set_failed(self, task, config_job):
error_msg = (_("Failed config job: %(config_job_id)s. "
"Message: '%(message)s'.") %
{'config_job_id': config_job.id,
'message': config_job.message})
log_msg = ("RAID configuration job failed for node %(node)s. "
"%(error)s" %
{'node': task.node.uuid, 'error': error_msg})
if task.node.clean_step:
manager_utils.cleaning_error_handler(task, error_msg)
else:
manager_utils.deploying_error_handler(task, log_msg, error_msg)
def _resume(self, task):
raid_common.update_raid_info(
task.node, self.get_logical_disks(task))
if task.node.clean_step:
manager_utils.notify_conductor_resume_clean(task)
else:
manager_utils.notify_conductor_resume_deploy(task)
def _delete_configuration_no_commit(self, task):
node = task.node
controllers = list()
drac_raid_controllers = list_raid_controllers(node)
drac_raid_settings = list_raid_settings(node)
for cntrl in drac_raid_controllers:
if _is_raid_controller(node, cntrl.id, drac_raid_controllers):
controller = dict()
if _controller_supports_ehba_mode(
drac_raid_settings,
cntrl.id) and _controller_in_hba_mode(
drac_raid_settings, cntrl.id):
controller['is_ehba_mode'] = True
controller_cap = _reset_raid_config(node, cntrl.id)
controller["raid_controller"] = cntrl.id
controller["is_reboot_required"] = controller_cap[
"is_reboot_required"]
controller["is_commit_required"] = controller_cap[
"is_commit_required"]
controllers.append(controller)
return controllers
class DracRAID(DracWSManRAID):
def __init__(self):
super(DracRAID, self).__init__()
LOG.warning("RAID interface 'idrac' is deprecated and may be removed "
"in a future release. Use 'idrac-wsman' instead.")
| true | true |
f7319a3273ea4a13ac90d44d92e546e36cb45026 | 20,249 | py | Python | Decompiler/BattleActionScript.py | AGraber/EDDecompiler | 6e00c7c6ba8a12cea7722fcef34ed8ff4ac6bc66 | [
"MIT"
] | 5 | 2021-05-23T19:34:38.000Z | 2021-12-05T05:57:36.000Z | Decompiler/BattleActionScript.py | AGraber/EDDecompiler | 6e00c7c6ba8a12cea7722fcef34ed8ff4ac6bc66 | [
"MIT"
] | null | null | null | Decompiler/BattleActionScript.py | AGraber/EDDecompiler | 6e00c7c6ba8a12cea7722fcef34ed8ff4ac6bc66 | [
"MIT"
] | 1 | 2021-05-06T05:50:14.000Z | 2021-05-06T05:50:14.000Z | from Assembler.Assembler2s import *
from Base.EDAOBase import *
import Instruction.ActionOpTableEDAO as edao
import BattleMonsterStatus as MSFile
INVALID_ACTION_OFFSET = 0xFFFF
EMPTY_ACTION = INVALID_ACTION_OFFSET
class CharacterPositionFactor:
def __init__(self, fs = None):
if fs == None:
return
self.X = fs.ReadByte()
self.Y = fs.ReadByte()
class BattleActionScriptInfo:
ActionFileType_Normal = 0
ActionFileType_Arts = 1
ActionFileType_Item = 2
def __init__(self):
self.ChrPosFactorOffset = 0
self.ActionListOffset = 0
self.UnknownTableOffset = 0
self.ActionStartOffset = 0
self.PreloadChipList = []
self.ModelFileList = []
self.UnknownTableList = []
self.ActionList = []
self.ChrPosFactor = []
self.CraftActions = []
self.GlobalLabelTable = {}
self.ChrName = None
self.ASFileName = ''
self.ActionFileType = self.ActionFileType_Normal
def open(self, asname):
fs = fileio.FileStream()
fs.Open(asname)
self.ASFileName = asname
asname = os.path.basename(asname).lower()
if asname == 'as90000.dat':
self.ActionFileType = self.ActionFileType_Arts
elif asname == 'as90001.dat':
self.ActionFileType = self.ActionFileType_Item
else:
self.ActionFileType = self.ActionFileType_Normal
self.ActionListOffset = fs.ReadUShort()
if self.ActionFileType == self.ActionFileType_Normal:
self.ChrPosFactorOffset = fs.ReadUShort()
self.UnknownTableOffset = fs.ReadUShort()
while True:
index = fs.ReadULong()
if index == 0xFFFFFFFF:
break
self.PreloadChipList.append(ChipFileIndex(index))
fs.seek(self.ChrPosFactorOffset)
for i in range(8):
self.ChrPosFactor.append(CharacterPositionFactor(fs))
minoffset = 0xFFFFFFFF
fs.seek(self.ActionListOffset)
while True:
if fs.tell() >= minoffset:
break
offset = fs.ReadUShort()
if offset == 0:
break
minoffset = min(minoffset, offset)
self.ActionList.append(offset)
if len(self.ActionList) == 0:
raise Exception('action number == 0')
self.CraftActions = self.DisassembleCraftActions(fs)
return
for i in range(0x69, fs.Length):
if i not in offsetlist:
print('%X' % i)
#input()
input()
def GetBuiltinNames(self):
if self.ActionFileType == self.ActionFileType_Arts:
BuiltinArtsNames = []
try:
offsetlist = []
t_magic = os.path.abspath(os.path.dirname(os.path.abspath(self.ASFileName)) + '\\..\\..\\text\\t_magic._dt')
if not os.path.exists(t_magic):
if t_magic.endswith('\\patch\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
elif t_magic.endswith('\\patch2\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch2\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
magic = fileio.FileStream()
magic.Open(t_magic)
for i in range(len(self.ActionList)):
offsetlist.append(magic.ReadUShort())
BuiltinArtsNames.append('')
NameConflict = {}
for i in range(len(offsetlist)):
offset = offsetlist[i]
if i != len(offsetlist) - 1 and offsetlist[i + 1] - offset < 0x1C:
continue
#print('%X' % offset)
magic.seek(offset + 0x18)
offset = magic.ReadUShort()
if offset == 0:
continue
magic.seek(offset)
name = magic.ReadMultiByte().replace(' ', '')
if name == '':
continue
if name not in NameConflict:
NameConflict[name] = 1
else:
NameConflict[name] += 1
name += '_%d' % NameConflict[name]
BuiltinArtsNames[i] = name
except:
BuiltinArtsNames = []
return BuiltinArtsNames
elif self.ActionFileType == self.ActionFileType_Normal:
BuiltinCraftNames = \
[
'SysCraft_Init', # 00 0
'SysCraft_Stand', # 01 1
'SysCraft_Move', # 02 2
'SysCraft_UnderAttack', # 03 3
'SysCraft_Dead', # 04 4
'SysCraft_NormalAttack', # 05 5
'SysCraft_ArtsAria', # 06 6
'SysCraft_ArtsCast', # 07 7
'SysCraft_Win', # 08 8
'SysCraft_EnterBattle', # 09 9
'SysCraft_UseItem', # 0A 10
'SysCraft_Stun', # 0B 11
'SysCraft_Unknown2', # 0C 12
'SysCraft_Reserve1', # 0D 13
'SysCraft_Reserve2', # 0E 14
'SysCraft_Counter', # 0F 15
'', # 10 16
'', # 11 17
'', # 12 18
'', # 13 19
'', # 14 20
'', # 15 21
'', # 16 22
'', # 17 23
'', # 18 24
'', # 19 25
'', # 1A 26
'', # 1B 27
'', # 1C 28
'', # 1D 29
'SysCraft_TeamRushInit', # 1E 30
'SysCraft_TeamRushAction', # 1F 31
]
return BuiltinCraftNames
return []
def DiasmInstructionCallback(self, data):
return
def DisassembleCraftActions(self, fs):
CraftNameMap = {}
msfile = None
try:
msfile = MSFile.BattleMonsterStatus()
msfile.open(os.path.dirname(self.ASFileName) + '\\ms' + os.path.basename(self.ASFileName)[2:])
self.ChrName = None if msfile.Name == '' or msfile.Name == ' ' else msfile.Name
except:
msfile = None
BuiltinCraftNames = self.GetBuiltinNames()
disasm = Disassembler(edao.edao_as_op_table, self.DiasmInstructionCallback)
index = -1
codeblocks = []
blockoffsetmap = {}
for func in self.ActionList:
index += 1
if func == INVALID_ACTION_OFFSET:
codeblocks.append(CodeBlock(INVALID_ACTION_OFFSET))
continue
if func in blockoffsetmap:
codeblocks.append(blockoffsetmap[func])
continue
fs.seek(func)
data = Disassembler.DisasmData()
data.Stream = fs
data.GlobalLabelTable = self.GlobalLabelTable
block = disasm.DisasmBlock2(data)
if index >= len(BuiltinCraftNames) or BuiltinCraftNames[index] == '':
name = 'Craft_%X_%d_%X' % (index, index, block.Offset)
if msfile != None:
craft = msfile.FindCraftByActionIndex(index)
if craft != None:
if craft.Name != '' and craft.Name != ' ':
name += '_' + craft.Name.replace(' ', '_').replace(' ', '_').replace('·', '')
block.Name = name
else:
block.Name = BuiltinCraftNames[index]
codeblocks.append(block)
blockoffsetmap[func] = block
return codeblocks
def FormatCodeBlocks(self):
disasm = Disassembler(edao.edao_as_op_table)
blocks = []
blockoffsetmap = {}
for block in sorted(self.CraftActions, key=lambda x: x.Offset):
if block.Offset == INVALID_ACTION_OFFSET:
continue
if block.Offset in blockoffsetmap:
continue
blockoffsetmap[block.Offset] = True
data = Disassembler.FormatData()
data.Block = block
data.Block.Instructions = sorted(data.Block.Instructions, key=lambda x: x.Offset)
data.GlobalLabelTable = self.GlobalLabelTable
name = GetValidLabelName(block.Name)
if not name.startswith('Craft_'): name = 'Craft_' + name
blocks.append(['def %s(): pass' % name])
blocks.append(disasm.FormatCodeBlock2(data))
#for x in disasmtbl: print('%08X' % x)
#input()
return blocks
def SaveToFile(self, filename):
lines = []
#lines.append('from %s import *' % os.path.splitext(os.path.basename(__file__))[0])
lines.append('from ActionHelper import *')
lines.append('')
lines.append('SetCodePage("%s")' % edao.CODE_PAGE)
lines.append('')
name = os.path.splitext(os.path.basename(filename))[0]
name = os.path.splitext(name)[0]
if self.ActionFileType == self.ActionFileType_Arts:
lines.append('CreateArtsAction("%s")' % (name + '.dat'))
else:
tmp = []
for pos in self.ChrPosFactor:
tmp.append('(%d, %d)' % (pos.X, pos.Y))
lines.append('CreateBattleAction("%s", (%s))' % (name + '.dat', ', '.join(tmp)))
lines.append('')
lines.append('AddPreloadChip((')
index = 0
for chip in self.PreloadChipList:
x = ljust_cn(' "%s",' % chip.Name(), 30)
x += ' # %02X %d' % (index, index)
lines.append(x)
index += 1
lines.append('))')
lines.append('')
lines.append('CraftAction((')
index = 0
for craft in self.CraftActions:
name = ('"%s"'% craft.Name) if craft.Offset != INVALID_ACTION_OFFSET else 'EMPTY_ACTION'
lines.append( ljust_cn(' %s,' % name, 40) + ('# %02X %d' % (index, index)))
index += 1
lines.append('))')
lines.append('')
blocks = self.FormatCodeBlocks()
for block in blocks:
lines += block
lines.append('SaveToFile()')
lines.append('')
txt = '\r\n'.join(lines)
lines = txt.replace('\r\n', '\n').replace('\r', '\n').split('\n')
for i in range(2, len(lines)):
if lines[i] != '':
lines[i] = ' %s' % lines[i]
lines.insert(2, 'def main():')
lines.append('Try(main)')
lines.append('')
if self.ChrName != None:
lines.insert(2, '# %s' % self.ChrName)
lines.insert(3, '')
fs = open(filename, 'wb')
fs.write(''.encode('utf_8_sig'))
fs.write('\r\n'.join(lines).encode('UTF8'))
############################################################################################
# support functions
############################################################################################
class BattleActionScriptInfoPort(BattleActionScriptInfo):
def __init__(self):
super().__init__()
self.FileName = ''
self.Labels = {} # map<name, offset>
self.DelayFixLabels = [] # list of LabelEntry
self.PrevousHandlerData = None
self.fs = None
actionfile = None
def label(labelname):
pos = actionfile.fs.tell()
if actionfile.PrevousHandlerData is not None:
pos += actionfile.PrevousHandlerData.FileStream.tell()
plog('%08X: %s' % (pos, labelname))
if labelname in actionfile.Labels and actionfile.Labels[labelname] != pos:
raise Exception('label name conflict: %s' % labelname)
actionfile.Labels[labelname] = pos
def getlabel(name):
return actionfile.Labels[name]
def CreateBattleAction(filename, ChrPosFactorList = None, ModelFileList = None, UnknownTableList = None):
if not IsTupleOrList(ChrPosFactorList):
raise Exception('ChrPosFactorList must be list')
global actionfile
actionfile = BattleActionScriptInfoPort()
start_argv = 1
global CODE_PAGE
cp = CODE_PAGE
if sys.argv[1].startswith('--cp='):
cp = sys.argv[1][5:]
start_argv = 2
elif sys.argv[1].startswith('--cppy='):
cppy = os.path.abspath(sys.argv[1][7:])
ccode = importlib.machinery.SourceFileLoader(os.path.basename(cppy).split('.')[0], cppy).load_module()
ccode.register()
cp = ccode.get_name()
start_argv = 2
if cp == 'NotSet':
cp = 'gbk'
CODE_PAGE = cp
edao.CODE_PAGE = cp
edao.edao_as_op_table.CodePage = cp
if len(sys.argv) > start_argv:
filename = os.path.join(sys.argv[start_argv], filename)
actionfile.fs = fileio.FileStream()
actionfile.fs.Open(filename, 'wb+')
actionfile.FileName = filename
for factor in ChrPosFactorList:
f = CharacterPositionFactor()
f.X = factor[0]
f.Y = factor[1]
actionfile.ChrPosFactor.append(f)
if IsTupleOrList(ModelFileList):
actionfile.ModelFileList = ModelFileList
if IsTupleOrList(UnknownTableList):
actionfile.UnknownTableList = UnknownTableList
asname = os.path.basename(filename).lower()
if asname == 'as90000.dat':
actionfile.ActionFileType = actionfile.ActionFileType_Arts
elif asname == 'as90001.dat':
actionfile.ActionFileType = actionfile.ActionFileType_Item
else:
actionfile.ActionFileType = actionfile.ActionFileType_Normal
def CreateArtsAction(filename):
global actionfile
actionfile = BattleActionScriptInfoPort()
actionfile.fs = fileio.FileStream()
actionfile.fs.Open(filename, 'wb+')
actionfile.ActionFileType = BattleActionScriptInfoPort.ActionFileType_Arts
actionfile.ActionListOffset = 2
actionfile.fs.WriteUShort(actionfile.ActionListOffset)
def AddPreloadChip(ChipFileList):
if not IsTupleOrList(ChipFileList):
raise Exception('ChipFileList must be list')
actionfile.ChipFileList = list(ChipFileList)
def CraftAction(CraftNameList):
if not IsTupleOrList(CraftNameList):
raise Exception('CraftNameList must be list')
actionfile.ActionList = list(CraftNameList)
fs = actionfile.fs
if actionfile.ActionFileType == actionfile.ActionFileType_Normal:
fs.seek(6)
for chip in actionfile.ChipFileList:
fs.WriteULong(ChipFileIndex(chip).Index())
fs.WriteULong(0xFFFFFFFF)
for model in actionfile.ModelFileList:
fs.WriteMultiByte(model, "cp932")
fs.WriteByte(0)
fs.WriteByte(0)
if len(actionfile.UnknownTableList) > 0:
actionfile.UnknownTableOffset = fs.tell()
for factor in actionfile.UnknownTableList:
fs.WriteUShort(factor)
else:
fs.seek(2)
actionfile.ActionListOffset = fs.tell()
for craft in CraftNameList:
if craft != INVALID_ACTION_OFFSET:
actionfile.DelayFixLabels.append(LabelEntry(craft, fs.tell()))
fs.WriteUShort(INVALID_ACTION_OFFSET)
fs.write(b'\x00\x00')
actionfile.ChrPosFactorOffset = fs.tell()
for factor in actionfile.ChrPosFactor:
fs.WriteByte(factor.X)
fs.WriteByte(factor.Y)
actionfile.ActionStartOffset = fs.tell()
fs.seek(0)
fs.WriteUShort(actionfile.ActionListOffset)
if actionfile.ActionFileType == actionfile.ActionFileType_Normal:
fs.WriteUShort(actionfile.ChrPosFactorOffset)
fs.WriteUShort(actionfile.UnknownTableOffset)
fs.seek(actionfile.ActionStartOffset)
for op, inst in edao.edao_as_op_table.items():
func = []
func.append('def %s(*args):' % inst.OpName)
func.append(' return OpCodeHandler(0x%02X, args)' % inst.OpCode)
func.append('')
exec('\r\n'.join(func))
opx = 'AS_%02X' % inst.OpCode
if inst.OpName != opx:
func[0] = 'def %s(*args):' % opx
exec('\r\n'.join(func))
def AssembleForExec(expr):
return eval(expr)
def OpCodeHandler(op, args):
entry = edao.edao_as_op_table[op]
data = HandlerData(HANDLER_REASON_ASSEMBLE)
data.Instruction = Instruction(op)
data.Arguments = list(args)
data.TableEntry = entry
data.Assemble = AssembleForExec
data.Instruction.OperandFormat = entry.Operand
UsePrevous = bool(actionfile.PrevousHandlerData != None)
if UsePrevous:
data.FileStream = actionfile.PrevousHandlerData.FileStream
data.Instruction.Labels = actionfile.PrevousHandlerData.Instruction.Labels
else:
data.FileStream = fileio.FileStream(b'')
actionfile.PrevousHandlerData = data
#print(entry.OpName)
inst = OpCodeHandlerPrivate(data)
if UsePrevous:
return inst
actionfile.PrevousHandlerData = None
offset = actionfile.fs.tell()
for lb in inst.Labels:
actionfile.DelayFixLabels.append(LabelEntry(lb.Label, lb.Offset + offset))
data.FileStream.seek(0)
actionfile.fs.write(data.FileStream.read())
return inst
def SaveToFile():
fs = actionfile.fs
for lb in actionfile.DelayFixLabels:
fs.seek(lb.Offset)
fs.WriteUShort(getlabel(lb.Label))
'''
if has_target: jump label
Jc(0x16, 0x1, 0x0, "loc_A4A")
'''
def procfile(file, cp=None):
if cp:
edao.CODE_PAGE = cp
edao.edao_op_table.CodePage = cp
console.setTitle(os.path.basename(file))
#print('disasm %s' % file)
asdat = BattleActionScriptInfo()
asdat.open(file)
outfile = os.path.splitext(file)[0] + ".py"
plog('SAVE %s' % outfile)
asdat.SaveToFile(outfile)
return outfile
if __name__ == '__main__':
# iterlib.forEachFileMP(procfile, sys.argv[1:], 'as*.dat')
cp = 'gbk'
start_argv = 1
if sys.argv[1].startswith('--cp='):
cp = sys.argv[1][5:]
start_argv = 2
elif sys.argv[1].startswith('--cppy='):
cppy = os.path.abspath(sys.argv[1][7:])
ccode = importlib.machinery.SourceFileLoader(os.path.basename(cppy).split('.')[0], cppy).load_module()
ccode.register()
cp = ccode.get_name()
start_argv = 2
edao.CODE_PAGE = cp
edao.edao_as_op_table.CodePage = cp
files = iterlib.forEachGetFiles(sys.argv[start_argv:], 'as*.dat')
#Log.OpenLog(sys.argv[start_argv] + '\..\log.txt')
for file in files:
plog('START %s' % file)
procfile(file)
plog('FINISHED %s' % file)
#Log.CloseLog()
| 31.056748 | 125 | 0.521754 | from Assembler.Assembler2s import *
from Base.EDAOBase import *
import Instruction.ActionOpTableEDAO as edao
import BattleMonsterStatus as MSFile
INVALID_ACTION_OFFSET = 0xFFFF
EMPTY_ACTION = INVALID_ACTION_OFFSET
class CharacterPositionFactor:
def __init__(self, fs = None):
if fs == None:
return
self.X = fs.ReadByte()
self.Y = fs.ReadByte()
class BattleActionScriptInfo:
ActionFileType_Normal = 0
ActionFileType_Arts = 1
ActionFileType_Item = 2
def __init__(self):
self.ChrPosFactorOffset = 0
self.ActionListOffset = 0
self.UnknownTableOffset = 0
self.ActionStartOffset = 0
self.PreloadChipList = []
self.ModelFileList = []
self.UnknownTableList = []
self.ActionList = []
self.ChrPosFactor = []
self.CraftActions = []
self.GlobalLabelTable = {}
self.ChrName = None
self.ASFileName = ''
self.ActionFileType = self.ActionFileType_Normal
def open(self, asname):
fs = fileio.FileStream()
fs.Open(asname)
self.ASFileName = asname
asname = os.path.basename(asname).lower()
if asname == 'as90000.dat':
self.ActionFileType = self.ActionFileType_Arts
elif asname == 'as90001.dat':
self.ActionFileType = self.ActionFileType_Item
else:
self.ActionFileType = self.ActionFileType_Normal
self.ActionListOffset = fs.ReadUShort()
if self.ActionFileType == self.ActionFileType_Normal:
self.ChrPosFactorOffset = fs.ReadUShort()
self.UnknownTableOffset = fs.ReadUShort()
while True:
index = fs.ReadULong()
if index == 0xFFFFFFFF:
break
self.PreloadChipList.append(ChipFileIndex(index))
fs.seek(self.ChrPosFactorOffset)
for i in range(8):
self.ChrPosFactor.append(CharacterPositionFactor(fs))
minoffset = 0xFFFFFFFF
fs.seek(self.ActionListOffset)
while True:
if fs.tell() >= minoffset:
break
offset = fs.ReadUShort()
if offset == 0:
break
minoffset = min(minoffset, offset)
self.ActionList.append(offset)
if len(self.ActionList) == 0:
raise Exception('action number == 0')
self.CraftActions = self.DisassembleCraftActions(fs)
return
for i in range(0x69, fs.Length):
if i not in offsetlist:
print('%X' % i)
input()
def GetBuiltinNames(self):
if self.ActionFileType == self.ActionFileType_Arts:
BuiltinArtsNames = []
try:
offsetlist = []
t_magic = os.path.abspath(os.path.dirname(os.path.abspath(self.ASFileName)) + '\\..\\..\\text\\t_magic._dt')
if not os.path.exists(t_magic):
if t_magic.endswith('\\patch\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
elif t_magic.endswith('\\patch2\\text\\t_magic._dt'):
t_magic = t_magic.replace('\\patch2\\text\\t_magic._dt', '\\data\\text\\t_magic._dt')
magic = fileio.FileStream()
magic.Open(t_magic)
for i in range(len(self.ActionList)):
offsetlist.append(magic.ReadUShort())
BuiltinArtsNames.append('')
NameConflict = {}
for i in range(len(offsetlist)):
offset = offsetlist[i]
if i != len(offsetlist) - 1 and offsetlist[i + 1] - offset < 0x1C:
continue
magic.seek(offset + 0x18)
offset = magic.ReadUShort()
if offset == 0:
continue
magic.seek(offset)
name = magic.ReadMultiByte().replace(' ', '')
if name == '':
continue
if name not in NameConflict:
NameConflict[name] = 1
else:
NameConflict[name] += 1
name += '_%d' % NameConflict[name]
BuiltinArtsNames[i] = name
except:
BuiltinArtsNames = []
return BuiltinArtsNames
elif self.ActionFileType == self.ActionFileType_Normal:
BuiltinCraftNames = \
[
'SysCraft_Init',
'SysCraft_Stand',
'SysCraft_Move',
'SysCraft_UnderAttack',
'SysCraft_Dead',
'SysCraft_NormalAttack',
'SysCraft_ArtsAria',
'SysCraft_ArtsCast',
'SysCraft_Win',
'SysCraft_EnterBattle',
'SysCraft_UseItem',
'SysCraft_Stun',
'SysCraft_Unknown2',
'SysCraft_Reserve1',
'SysCraft_Reserve2',
'SysCraft_Counter',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'',
'SysCraft_TeamRushInit',
'SysCraft_TeamRushAction',
]
return BuiltinCraftNames
return []
def DiasmInstructionCallback(self, data):
return
def DisassembleCraftActions(self, fs):
CraftNameMap = {}
msfile = None
try:
msfile = MSFile.BattleMonsterStatus()
msfile.open(os.path.dirname(self.ASFileName) + '\\ms' + os.path.basename(self.ASFileName)[2:])
self.ChrName = None if msfile.Name == '' or msfile.Name == ' ' else msfile.Name
except:
msfile = None
BuiltinCraftNames = self.GetBuiltinNames()
disasm = Disassembler(edao.edao_as_op_table, self.DiasmInstructionCallback)
index = -1
codeblocks = []
blockoffsetmap = {}
for func in self.ActionList:
index += 1
if func == INVALID_ACTION_OFFSET:
codeblocks.append(CodeBlock(INVALID_ACTION_OFFSET))
continue
if func in blockoffsetmap:
codeblocks.append(blockoffsetmap[func])
continue
fs.seek(func)
data = Disassembler.DisasmData()
data.Stream = fs
data.GlobalLabelTable = self.GlobalLabelTable
block = disasm.DisasmBlock2(data)
if index >= len(BuiltinCraftNames) or BuiltinCraftNames[index] == '':
name = 'Craft_%X_%d_%X' % (index, index, block.Offset)
if msfile != None:
craft = msfile.FindCraftByActionIndex(index)
if craft != None:
if craft.Name != '' and craft.Name != ' ':
name += '_' + craft.Name.replace(' ', '_').replace(' ', '_').replace('·', '')
block.Name = name
else:
block.Name = BuiltinCraftNames[index]
codeblocks.append(block)
blockoffsetmap[func] = block
return codeblocks
def FormatCodeBlocks(self):
disasm = Disassembler(edao.edao_as_op_table)
blocks = []
blockoffsetmap = {}
for block in sorted(self.CraftActions, key=lambda x: x.Offset):
if block.Offset == INVALID_ACTION_OFFSET:
continue
if block.Offset in blockoffsetmap:
continue
blockoffsetmap[block.Offset] = True
data = Disassembler.FormatData()
data.Block = block
data.Block.Instructions = sorted(data.Block.Instructions, key=lambda x: x.Offset)
data.GlobalLabelTable = self.GlobalLabelTable
name = GetValidLabelName(block.Name)
if not name.startswith('Craft_'): name = 'Craft_' + name
blocks.append(['def %s(): pass' % name])
blocks.append(disasm.FormatCodeBlock2(data))
return blocks
def SaveToFile(self, filename):
lines = []
lines.append('from ActionHelper import *')
lines.append('')
lines.append('SetCodePage("%s")' % edao.CODE_PAGE)
lines.append('')
name = os.path.splitext(os.path.basename(filename))[0]
name = os.path.splitext(name)[0]
if self.ActionFileType == self.ActionFileType_Arts:
lines.append('CreateArtsAction("%s")' % (name + '.dat'))
else:
tmp = []
for pos in self.ChrPosFactor:
tmp.append('(%d, %d)' % (pos.X, pos.Y))
lines.append('CreateBattleAction("%s", (%s))' % (name + '.dat', ', '.join(tmp)))
lines.append('')
lines.append('AddPreloadChip((')
index = 0
for chip in self.PreloadChipList:
x = ljust_cn(' "%s",' % chip.Name(), 30)
x += ' # %02X %d' % (index, index)
lines.append(x)
index += 1
lines.append('))')
lines.append('')
lines.append('CraftAction((')
index = 0
for craft in self.CraftActions:
name = ('"%s"'% craft.Name) if craft.Offset != INVALID_ACTION_OFFSET else 'EMPTY_ACTION'
lines.append( ljust_cn(' %s,' % name, 40) + ('# %02X %d' % (index, index)))
index += 1
lines.append('))')
lines.append('')
blocks = self.FormatCodeBlocks()
for block in blocks:
lines += block
lines.append('SaveToFile()')
lines.append('')
txt = '\r\n'.join(lines)
lines = txt.replace('\r\n', '\n').replace('\r', '\n').split('\n')
for i in range(2, len(lines)):
if lines[i] != '':
lines[i] = ' %s' % lines[i]
lines.insert(2, 'def main():')
lines.append('Try(main)')
lines.append('')
if self.ChrName != None:
lines.insert(2, '# %s' % self.ChrName)
lines.insert(3, '')
fs = open(filename, 'wb')
fs.write(''.encode('utf_8_sig'))
fs.write('\r\n'.join(lines).encode('UTF8'))
| true | true |
f7319b025a15cca212a3758b38a253304ad2ede4 | 2,382 | py | Python | src/streamlink/plugins/huya.py | melmorabity/streamlink | 24c59a23103922977991acc28741a323d8efa7a1 | [
"BSD-2-Clause"
] | null | null | null | src/streamlink/plugins/huya.py | melmorabity/streamlink | 24c59a23103922977991acc28741a323d8efa7a1 | [
"BSD-2-Clause"
] | null | null | null | src/streamlink/plugins/huya.py | melmorabity/streamlink | 24c59a23103922977991acc28741a323d8efa7a1 | [
"BSD-2-Clause"
] | null | null | null | import base64
import logging
import re
from html import unescape as html_unescape
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HTTPStream
from streamlink.utils.parse import parse_json
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r'https?://(?:www\.)?huya\.com/(?P<channel>[^/]+)'
))
class Huya(Plugin):
_re_stream = re.compile(r'"stream"\s?:\s?"([^"]+)"')
_schema_data = validate.Schema(
{
# 'status': int,
# 'msg': validate.any(None, str),
'data': [{
'gameStreamInfoList': [{
'sCdnType': str,
'sStreamName': str,
'sFlvUrl': str,
'sFlvUrlSuffix': str,
'sFlvAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
# 'sHlsUrl': str,
# 'sHlsUrlSuffix': str,
# 'sHlsAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
validate.optional('iIsMultiStream'): int,
'iPCPriorityRate': int,
}]
}],
# 'vMultiStreamInfo': [{
# 'sDisplayName': str,
# 'iBitRate': int,
# }],
},
validate.get('data'),
validate.get(0),
validate.get('gameStreamInfoList'),
)
QUALITY_WEIGHTS = {}
@classmethod
def stream_weight(cls, key):
weight = cls.QUALITY_WEIGHTS.get(key)
if weight:
return weight, 'huya'
return Plugin.stream_weight(key)
def _get_streams(self):
res = self.session.http.get(self.url)
data = self._re_stream.search(res.text)
if not data:
return
data = parse_json(base64.b64decode(data.group(1)), schema=self._schema_data)
for info in data:
log.trace(f'{info!r}')
flv_url = f'{info["sFlvUrl"]}/{info["sStreamName"]}.{info["sFlvUrlSuffix"]}?{info["sFlvAntiCode"]}'
name = f'source_{info["sCdnType"].lower()}'
self.QUALITY_WEIGHTS[name] = info['iPCPriorityRate']
yield name, HTTPStream(self.session, flv_url)
log.debug(f'QUALITY_WEIGHTS: {self.QUALITY_WEIGHTS!r}')
__plugin__ = Huya
| 31.76 | 111 | 0.554156 | import base64
import logging
import re
from html import unescape as html_unescape
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HTTPStream
from streamlink.utils.parse import parse_json
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r'https?://(?:www\.)?huya\.com/(?P<channel>[^/]+)'
))
class Huya(Plugin):
_re_stream = re.compile(r'"stream"\s?:\s?"([^"]+)"')
_schema_data = validate.Schema(
{
# 'status': int,
# 'msg': validate.any(None, str),
'data': [{
'gameStreamInfoList': [{
'sCdnType': str,
'sStreamName': str,
'sFlvUrl': str,
'sFlvUrlSuffix': str,
'sFlvAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
# 'sHlsUrl': str,
# 'sHlsUrlSuffix': str,
# 'sHlsAntiCode': validate.all(str, validate.transform(lambda v: html_unescape(v))),
validate.optional('iIsMultiStream'): int,
'iPCPriorityRate': int,
}]
}],
# 'vMultiStreamInfo': [{
# 'sDisplayName': str,
# 'iBitRate': int,
# }],
},
validate.get('data'),
validate.get(0),
validate.get('gameStreamInfoList'),
)
QUALITY_WEIGHTS = {}
@classmethod
def stream_weight(cls, key):
weight = cls.QUALITY_WEIGHTS.get(key)
if weight:
return weight, 'huya'
return Plugin.stream_weight(key)
def _get_streams(self):
res = self.session.http.get(self.url)
data = self._re_stream.search(res.text)
if not data:
return
data = parse_json(base64.b64decode(data.group(1)), schema=self._schema_data)
for info in data:
log.trace(f'{info!r}')
flv_url = f'{info["sFlvUrl"]}/{info["sStreamName"]}.{info["sFlvUrlSuffix"]}?{info["sFlvAntiCode"]}'
name = f'source_{info["sCdnType"].lower()}'
self.QUALITY_WEIGHTS[name] = info['iPCPriorityRate']
yield name, HTTPStream(self.session, flv_url)
log.debug(f'QUALITY_WEIGHTS: {self.QUALITY_WEIGHTS!r}')
__plugin__ = Huya
| true | true |
f7319b3ce9b7c689f47a1cdeca8dd710ca242644 | 6,175 | py | Python | salt/netapi/rest_cherrypy/event_processor.py | styro/salt | d087d94dca02ca8bf53a6c21b94944bc7957522c | [
"Apache-2.0"
] | 3 | 2016-09-03T06:26:42.000Z | 2019-06-30T13:04:53.000Z | salt/netapi/rest_cherrypy/event_processor.py | styro/salt | d087d94dca02ca8bf53a6c21b94944bc7957522c | [
"Apache-2.0"
] | null | null | null | salt/netapi/rest_cherrypy/event_processor.py | styro/salt | d087d94dca02ca8bf53a6c21b94944bc7957522c | [
"Apache-2.0"
] | 1 | 2021-12-02T15:30:00.000Z | 2021-12-02T15:30:00.000Z | # encoding: utf-8
from __future__ import absolute_import
import json
import logging
import salt.ext.six as six
import salt.netapi
logger = logging.getLogger(__name__)
class SaltInfo(object):
'''
Class to handle processing and publishing of "real time" Salt upates.
'''
def __init__(self, handler):
'''
handler is expected to be the server side end of a websocket
connection.
'''
self.handler = handler
# These represent a "real time" view into Salt's jobs.
self.jobs = {}
# This represents a "real time" view of minions connected to Salt.
self.minions = {}
def publish_minions(self):
'''
Publishes minions as a list of dicts.
'''
minions = []
for minion, minion_info in six.iteritems(self.minions):
curr_minion = {}
curr_minion.update(minion_info)
curr_minion.update({'id': minion})
minions.append(curr_minion)
ret = {'minions': minions}
self.handler.send(json.dumps(ret), False)
def publish(self, key, data):
'''
Publishes the data to the event stream.
'''
publish_data = {key: data}
self.handler.send(json.dumps(publish_data), False)
def process_minion_update(self, event_data):
'''
Associate grains data with a minion and publish minion update
'''
tag = event_data['tag']
event_info = event_data['data']
_, _, _, _, mid = tag.split('/')
if not self.minions.get(mid, None):
self.minions[mid] = {}
minion = self.minions[mid]
minion.update({'grains': event_info['return']})
self.publish_minions()
def process_ret_job_event(self, event_data):
'''
Process a /ret event returned by Salt for a particular minion.
These events contain the returned results from a particular execution.
'''
tag = event_data['tag']
event_info = event_data['data']
_, _, jid, _, mid = tag.split('/')
job = self.jobs.setdefault(jid, {})
minion = job.setdefault('minions', {}).setdefault(mid, {})
minion.update({'return': event_info['return']})
minion.update({'retcode': event_info['retcode']})
minion.update({'success': event_info['success']})
job_complete = all([minion['success'] for mid, minion
in six.iteritems(job['minions'])])
if job_complete:
job['state'] = 'complete'
self.publish('jobs', self.jobs)
def process_new_job_event(self, event_data):
'''
Creates a new job with properties from the event data
like jid, function, args, timestamp.
Also sets the initial state to started.
Minions that are participating in this job are also noted.
'''
job = None
tag = event_data['tag']
event_info = event_data['data']
minions = {}
for mid in event_info['minions']:
minions[mid] = {'success': False}
job = {
'jid': event_info['jid'],
'start_time': event_info['_stamp'],
'minions': minions, # is a dictionary keyed by mids
'fun': event_info['fun'],
'tgt': event_info['tgt'],
'tgt_type': event_info['tgt_type'],
'state': 'running',
}
self.jobs[event_info['jid']] = job
self.publish('jobs', self.jobs)
def process_key_event(self, event_data):
'''
Tag: salt/key
Data:
{'_stamp': '2014-05-20T22:45:04.345583',
'act': 'delete',
'id': 'compute.home',
'result': True}
'''
tag = event_data['tag']
event_info = event_data['data']
if event_info['act'] == 'delete':
self.minions.pop(event_info['id'], None)
elif event_info['act'] == 'accept':
self.minions.setdefault(event_info['id'], {})
self.publish_minions()
def process_presence_events(self, event_data, token, opts):
'''
Check if any minions have connected or dropped.
Send a message to the client if they have.
'''
tag = event_data['tag']
event_info = event_data['data']
minions_detected = event_info['present']
curr_minions = self.minions.keys()
changed = False
# check if any connections were dropped
dropped_minions = set(curr_minions) - set(minions_detected)
for minion in dropped_minions:
changed = True
self.minions.pop(minion, None)
# check if any new connections were made
new_minions = set(minions_detected) - set(curr_minions)
tgt = ','.join(new_minions)
if tgt:
changed = True
client = salt.netapi.NetapiClient(opts)
client.run(
{
'fun': 'grains.items',
'tgt': tgt,
'expr_type': 'list',
'mode': 'client',
'client': 'local',
'async': 'local_async',
'token': token,
})
if changed:
self.publish_minions()
def process(self, salt_data, token, opts):
'''
Process events and publish data
'''
parts = salt_data['tag'].split('/')
if len(parts) < 2:
return
# TBD: Simplify these conditional expressions
if parts[1] == 'job':
if parts[3] == 'new':
self.process_new_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.minions = {}
elif parts[3] == 'ret':
self.process_ret_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.process_minion_update(salt_data)
if parts[1] == 'key':
self.process_key_event(salt_data)
if parts[1] == 'presence':
self.process_presence_events(salt_data, token, opts)
| 29.830918 | 78 | 0.545263 |
from __future__ import absolute_import
import json
import logging
import salt.ext.six as six
import salt.netapi
logger = logging.getLogger(__name__)
class SaltInfo(object):
def __init__(self, handler):
self.handler = handler
self.jobs = {}
# This represents a "real time" view of minions connected to Salt.
self.minions = {}
def publish_minions(self):
minions = []
for minion, minion_info in six.iteritems(self.minions):
curr_minion = {}
curr_minion.update(minion_info)
curr_minion.update({'id': minion})
minions.append(curr_minion)
ret = {'minions': minions}
self.handler.send(json.dumps(ret), False)
def publish(self, key, data):
publish_data = {key: data}
self.handler.send(json.dumps(publish_data), False)
def process_minion_update(self, event_data):
tag = event_data['tag']
event_info = event_data['data']
_, _, _, _, mid = tag.split('/')
if not self.minions.get(mid, None):
self.minions[mid] = {}
minion = self.minions[mid]
minion.update({'grains': event_info['return']})
self.publish_minions()
def process_ret_job_event(self, event_data):
tag = event_data['tag']
event_info = event_data['data']
_, _, jid, _, mid = tag.split('/')
job = self.jobs.setdefault(jid, {})
minion = job.setdefault('minions', {}).setdefault(mid, {})
minion.update({'return': event_info['return']})
minion.update({'retcode': event_info['retcode']})
minion.update({'success': event_info['success']})
job_complete = all([minion['success'] for mid, minion
in six.iteritems(job['minions'])])
if job_complete:
job['state'] = 'complete'
self.publish('jobs', self.jobs)
def process_new_job_event(self, event_data):
job = None
tag = event_data['tag']
event_info = event_data['data']
minions = {}
for mid in event_info['minions']:
minions[mid] = {'success': False}
job = {
'jid': event_info['jid'],
'start_time': event_info['_stamp'],
'minions': minions, # is a dictionary keyed by mids
'fun': event_info['fun'],
'tgt': event_info['tgt'],
'tgt_type': event_info['tgt_type'],
'state': 'running',
}
self.jobs[event_info['jid']] = job
self.publish('jobs', self.jobs)
def process_key_event(self, event_data):
tag = event_data['tag']
event_info = event_data['data']
if event_info['act'] == 'delete':
self.minions.pop(event_info['id'], None)
elif event_info['act'] == 'accept':
self.minions.setdefault(event_info['id'], {})
self.publish_minions()
def process_presence_events(self, event_data, token, opts):
tag = event_data['tag']
event_info = event_data['data']
minions_detected = event_info['present']
curr_minions = self.minions.keys()
changed = False
# check if any connections were dropped
dropped_minions = set(curr_minions) - set(minions_detected)
for minion in dropped_minions:
changed = True
self.minions.pop(minion, None)
# check if any new connections were made
new_minions = set(minions_detected) - set(curr_minions)
tgt = ','.join(new_minions)
if tgt:
changed = True
client = salt.netapi.NetapiClient(opts)
client.run(
{
'fun': 'grains.items',
'tgt': tgt,
'expr_type': 'list',
'mode': 'client',
'client': 'local',
'async': 'local_async',
'token': token,
})
if changed:
self.publish_minions()
def process(self, salt_data, token, opts):
parts = salt_data['tag'].split('/')
if len(parts) < 2:
return
# TBD: Simplify these conditional expressions
if parts[1] == 'job':
if parts[3] == 'new':
self.process_new_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.minions = {}
elif parts[3] == 'ret':
self.process_ret_job_event(salt_data)
if salt_data['data']['fun'] == 'grains.items':
self.process_minion_update(salt_data)
if parts[1] == 'key':
self.process_key_event(salt_data)
if parts[1] == 'presence':
self.process_presence_events(salt_data, token, opts)
| true | true |
f7319b609eb62f710cfd393ed8f31297d6216369 | 2,636 | py | Python | equality/wallet/puzzles/prefarm/spend_prefarm.py | grayfallstown/equality-blockchain | 019425b703f6b013e441481ac43389a80415f2f1 | [
"Apache-2.0"
] | 10 | 2021-07-04T15:14:12.000Z | 2021-10-17T14:52:56.000Z | equality/wallet/puzzles/prefarm/spend_prefarm.py | grayfallstown/equality-blockchain | 019425b703f6b013e441481ac43389a80415f2f1 | [
"Apache-2.0"
] | 11 | 2021-07-04T19:31:36.000Z | 2022-01-11T02:46:23.000Z | equality/wallet/puzzles/prefarm/spend_prefarm.py | grayfallstown/equality-blockchain | 019425b703f6b013e441481ac43389a80415f2f1 | [
"Apache-2.0"
] | 11 | 2021-07-04T21:49:17.000Z | 2021-10-04T17:45:38.000Z | import asyncio
from blspy import G2Element
from clvm_tools import binutils
from equality.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from equality.rpc.full_node_rpc_client import FullNodeRpcClient
from equality.types.blockchain_format.program import Program
from equality.types.coin_solution import CoinSolution
from equality.types.spend_bundle import SpendBundle
from equality.util.bech32m import decode_puzzle_hash
from equality.util.config import load_config
from equality.util.default_root import DEFAULT_ROOT_PATH
from equality.util.ints import uint32, uint16
async def main() -> None:
rpc_port: uint16 = uint16(8555)
self_hostname = "localhost"
path = DEFAULT_ROOT_PATH
config = load_config(path, "config.yaml")
client = await FullNodeRpcClient.create(self_hostname, rpc_port, path, config)
try:
farmer_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[1]
pool_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[0]
pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)
print(farmer_prefarm.amount, farmer_amounts)
assert farmer_amounts == farmer_prefarm.amount // 2
assert pool_amounts == pool_prefarm.amount // 2
address1 = "xeq1rdatypul5c642jkeh4yp933zu3hw8vv8tfup8ta6zfampnyhjnusxdgns6" # Key 1
address2 = "xeq1duvy5ur5eyj7lp5geetfg84cj2d7xgpxt7pya3lr2y6ke3696w9qvda66e" # Key 2
ph1 = decode_puzzle_hash(address1)
ph2 = decode_puzzle_hash(address2)
p_farmer_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {farmer_amounts}) (51 0x{ph2.hex()} {farmer_amounts})))")
)
p_pool_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {pool_amounts}) (51 0x{ph2.hex()} {pool_amounts})))")
)
p_solution = Program.to(binutils.assemble("()"))
sb_farmer = SpendBundle([CoinSolution(farmer_prefarm, p_farmer_2, p_solution)], G2Element())
sb_pool = SpendBundle([CoinSolution(pool_prefarm, p_pool_2, p_solution)], G2Element())
print(sb_pool, sb_farmer)
res = await client.push_tx(sb_farmer)
# res = await client.push_tx(sb_pool)
print(res)
up = await client.get_coin_records_by_puzzle_hash(farmer_prefarm.puzzle_hash, True)
uf = await client.get_coin_records_by_puzzle_hash(pool_prefarm.puzzle_hash, True)
print(up)
print(uf)
finally:
client.close()
asyncio.run(main())
| 41.1875 | 113 | 0.725721 | import asyncio
from blspy import G2Element
from clvm_tools import binutils
from equality.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward
from equality.rpc.full_node_rpc_client import FullNodeRpcClient
from equality.types.blockchain_format.program import Program
from equality.types.coin_solution import CoinSolution
from equality.types.spend_bundle import SpendBundle
from equality.util.bech32m import decode_puzzle_hash
from equality.util.config import load_config
from equality.util.default_root import DEFAULT_ROOT_PATH
from equality.util.ints import uint32, uint16
async def main() -> None:
rpc_port: uint16 = uint16(8555)
self_hostname = "localhost"
path = DEFAULT_ROOT_PATH
config = load_config(path, "config.yaml")
client = await FullNodeRpcClient.create(self_hostname, rpc_port, path, config)
try:
farmer_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[1]
pool_prefarm = (await client.get_block_record_by_height(1)).reward_claims_incorporated[0]
pool_amounts = int(calculate_pool_reward(uint32(0)) / 2)
farmer_amounts = int(calculate_base_farmer_reward(uint32(0)) / 2)
print(farmer_prefarm.amount, farmer_amounts)
assert farmer_amounts == farmer_prefarm.amount // 2
assert pool_amounts == pool_prefarm.amount // 2
address1 = "xeq1rdatypul5c642jkeh4yp933zu3hw8vv8tfup8ta6zfampnyhjnusxdgns6"
address2 = "xeq1duvy5ur5eyj7lp5geetfg84cj2d7xgpxt7pya3lr2y6ke3696w9qvda66e"
ph1 = decode_puzzle_hash(address1)
ph2 = decode_puzzle_hash(address2)
p_farmer_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {farmer_amounts}) (51 0x{ph2.hex()} {farmer_amounts})))")
)
p_pool_2 = Program.to(
binutils.assemble(f"(q . ((51 0x{ph1.hex()} {pool_amounts}) (51 0x{ph2.hex()} {pool_amounts})))")
)
p_solution = Program.to(binutils.assemble("()"))
sb_farmer = SpendBundle([CoinSolution(farmer_prefarm, p_farmer_2, p_solution)], G2Element())
sb_pool = SpendBundle([CoinSolution(pool_prefarm, p_pool_2, p_solution)], G2Element())
print(sb_pool, sb_farmer)
res = await client.push_tx(sb_farmer)
print(res)
up = await client.get_coin_records_by_puzzle_hash(farmer_prefarm.puzzle_hash, True)
uf = await client.get_coin_records_by_puzzle_hash(pool_prefarm.puzzle_hash, True)
print(up)
print(uf)
finally:
client.close()
asyncio.run(main())
| true | true |
f7319cdf9d302bcce6db295df0177ccd74b1ba86 | 457 | py | Python | server/src/views/tests/test_delete.py | budtmo/GIoT | 4cfdeacbe53b2bee10613519d86fa1bb0d9057b4 | [
"Apache-2.0"
] | null | null | null | server/src/views/tests/test_delete.py | budtmo/GIoT | 4cfdeacbe53b2bee10613519d86fa1bb0d9057b4 | [
"Apache-2.0"
] | null | null | null | server/src/views/tests/test_delete.py | budtmo/GIoT | 4cfdeacbe53b2bee10613519d86fa1bb0d9057b4 | [
"Apache-2.0"
] | null | null | null | from src.views.tests import BaseTest
class TestDeleteDevice(BaseTest):
"""Tests to delete device from the list."""
def test_delete_device(self):
self.register_device()
res = self.test_app.delete('/device/{id}'.format(id=1))
self.assertEqual(res.status_code, 204)
def test_delete_non_existing_device(self):
res = self.test_app.delete('/device/{id}'.format(id=5))
self.assertEqual(res.status_code, 404)
| 30.466667 | 63 | 0.682713 | from src.views.tests import BaseTest
class TestDeleteDevice(BaseTest):
def test_delete_device(self):
self.register_device()
res = self.test_app.delete('/device/{id}'.format(id=1))
self.assertEqual(res.status_code, 204)
def test_delete_non_existing_device(self):
res = self.test_app.delete('/device/{id}'.format(id=5))
self.assertEqual(res.status_code, 404)
| true | true |
f7319ce68a4eb20de0f2eff9b0d27c720c5108bd | 596 | py | Python | sherlockpipe/search_zones/SearchZone.py | LuisCerdenoMota/SHERLOCK | 5fb52795d3ab44e27bc7dbc6f2c2e6c214995ba1 | [
"MIT"
] | 1 | 2021-01-14T16:44:48.000Z | 2021-01-14T16:44:48.000Z | sherlockpipe/search_zones/SearchZone.py | martindevora/SHERLOCK | 5e7492552cbce29e960684a44fd6ad875c8cf60e | [
"MIT"
] | null | null | null | sherlockpipe/search_zones/SearchZone.py | martindevora/SHERLOCK | 5e7492552cbce29e960684a44fd6ad875c8cf60e | [
"MIT"
] | null | null | null | from abc import ABC, abstractmethod
from sherlockpipe.star.starinfo import StarInfo
class SearchZone(ABC):
"""
Abstract class to be implemented for calculating minimum and maximum search periods for an input star.
"""
def __init__(self):
pass
@abstractmethod
def calculate_period_range(self, star_info: StarInfo):
"""
Calculates the minimum and maximum periods for the given star_info
@param star_info: the star where the range should be calculated
@return: a tuple of minimum_period and maximum_period
"""
pass
| 28.380952 | 106 | 0.692953 | from abc import ABC, abstractmethod
from sherlockpipe.star.starinfo import StarInfo
class SearchZone(ABC):
def __init__(self):
pass
@abstractmethod
def calculate_period_range(self, star_info: StarInfo):
pass
| true | true |
f7319d88946b78f26293dd74a23f8a820010f76c | 4,775 | py | Python | .environment/lib/python3.8/site-packages/docplex/mp/engine_factory.py | LuisMi1245/QPath-and-Snakes | 48f784da67d9720c955890a28543c9863e02a455 | [
"MIT"
] | null | null | null | .environment/lib/python3.8/site-packages/docplex/mp/engine_factory.py | LuisMi1245/QPath-and-Snakes | 48f784da67d9720c955890a28543c9863e02a455 | [
"MIT"
] | null | null | null | .environment/lib/python3.8/site-packages/docplex/mp/engine_factory.py | LuisMi1245/QPath-and-Snakes | 48f784da67d9720c955890a28543c9863e02a455 | [
"MIT"
] | null | null | null | # --------------------------------------------------------------------------
# Source file provided under Apache License, Version 2.0, January 2004,
# http://www.apache.org/licenses/
# (c) Copyright IBM Corp. 2015, 2016
# --------------------------------------------------------------------------
# gendoc: ignore
from docplex.mp.engine import NoSolveEngine, ZeroSolveEngine
from docplex.mp.utils import is_string
from docplex.mp.error_handler import docplex_fatal
class EngineFactory(object):
""" A factory class that manages creation of solver instances.
"""
_default_engine_map = {"nosolve": NoSolveEngine,
"zero": ZeroSolveEngine
}
def __init__(self, env=None):
self._engine_types_by_agent = self._default_engine_map.copy()
# no cplex engine type yet?
if env is not None:
self._resolve_cplex(env)
def _get_engine_type_from_agent(self, agent, default_engine, default_engine_name):
if agent is None:
return default_engine
elif is_string(agent):
agent_key = agent.lower()
engine_type = self._engine_types_by_agent.get(agent_key)
if engine_type:
return engine_type
elif 'cplex' == agent_key:
print('* warning: CPLEX runtime not found in path, using {0} instead'.format(default_engine_name))
return self._engine_types_by_agent.get(default_engine_name)
elif '.' in agent:
# assuming a qualified name, e.g. com.ibm.docplex.quantum.QuantumEngine
from docplex.mp.internal.mloader import import_class
try:
agent_class = import_class(agent)
return agent_class
except ValueError as ve:
print(
"Cannot load agent class {0}, expecting 'cplex' or valid class path, error: {1}".format(
agent, str(ve)))
raise ve
else:
docplex_fatal("Unexpected agent name: {0}, expecting 'cplex' or valid class path", agent)
else:
# try a class type
try:
# noinspection PyUnresolvedReferences
from inspect import isclass
if isclass(agent):
return agent
except ImportError:
if type(agent) == type:
return agent
# agent cannot be mapped to any class.
docplex_fatal("* unexpected agent: {0!r} -expecting 'cplex', class or class name", agent)
def _is_cplex_resolved(self):
return hasattr(self, "_cplex_engine_type")
def _resolve_cplex(self, env):
# INTERNAL
if env is None:
docplex_fatal("need an environment to resolve cplex, got None")
if not self._is_cplex_resolved():
if env.has_cplex:
env.check_cplex_version()
from docplex.mp.cplex_engine import CplexEngine
self._cplex_engine_type = CplexEngine
# noinspection PyTypeChecker
self._engine_types_by_agent["cplex"] = CplexEngine
else:
self._cplex_engine_type = None
def _ensure_cplex_resolved(self, env):
if not self._is_cplex_resolved():
self._resolve_cplex(env)
assert self._is_cplex_resolved()
def new_engine(self, agent, env, model, context=None):
self._ensure_cplex_resolved(env)
# compute a default engine and kwargs to use..
kwargs = {}
if self._cplex_engine_type:
# default is CPLEX if we have it
default_engine_type = self._cplex_engine_type
default_engine_name = 'cplex'
else:
default_engine_type = NoSolveEngine
default_engine_name = 'nosolve'
if context is not None:
kwargs['context'] = context
engine_type = self._get_engine_type_from_agent(agent=agent,
default_engine=default_engine_type,
default_engine_name=default_engine_name)
assert engine_type is not None
try:
return engine_type(model, **kwargs)
except TypeError:
docplex_fatal("agent: {0!s} failed to create instance from model, kwargs.", agent)
def extend(self, new_agent, new_engine):
# INTERNAL
assert new_engine is not None
self._engine_types_by_agent[new_agent] = new_engine
| 39.46281 | 115 | 0.553927 |
from docplex.mp.engine import NoSolveEngine, ZeroSolveEngine
from docplex.mp.utils import is_string
from docplex.mp.error_handler import docplex_fatal
class EngineFactory(object):
_default_engine_map = {"nosolve": NoSolveEngine,
"zero": ZeroSolveEngine
}
def __init__(self, env=None):
self._engine_types_by_agent = self._default_engine_map.copy()
if env is not None:
self._resolve_cplex(env)
def _get_engine_type_from_agent(self, agent, default_engine, default_engine_name):
if agent is None:
return default_engine
elif is_string(agent):
agent_key = agent.lower()
engine_type = self._engine_types_by_agent.get(agent_key)
if engine_type:
return engine_type
elif 'cplex' == agent_key:
print('* warning: CPLEX runtime not found in path, using {0} instead'.format(default_engine_name))
return self._engine_types_by_agent.get(default_engine_name)
elif '.' in agent:
from docplex.mp.internal.mloader import import_class
try:
agent_class = import_class(agent)
return agent_class
except ValueError as ve:
print(
"Cannot load agent class {0}, expecting 'cplex' or valid class path, error: {1}".format(
agent, str(ve)))
raise ve
else:
docplex_fatal("Unexpected agent name: {0}, expecting 'cplex' or valid class path", agent)
else:
try:
from inspect import isclass
if isclass(agent):
return agent
except ImportError:
if type(agent) == type:
return agent
docplex_fatal("* unexpected agent: {0!r} -expecting 'cplex', class or class name", agent)
def _is_cplex_resolved(self):
return hasattr(self, "_cplex_engine_type")
def _resolve_cplex(self, env):
if env is None:
docplex_fatal("need an environment to resolve cplex, got None")
if not self._is_cplex_resolved():
if env.has_cplex:
env.check_cplex_version()
from docplex.mp.cplex_engine import CplexEngine
self._cplex_engine_type = CplexEngine
self._engine_types_by_agent["cplex"] = CplexEngine
else:
self._cplex_engine_type = None
def _ensure_cplex_resolved(self, env):
if not self._is_cplex_resolved():
self._resolve_cplex(env)
assert self._is_cplex_resolved()
def new_engine(self, agent, env, model, context=None):
self._ensure_cplex_resolved(env)
kwargs = {}
if self._cplex_engine_type:
default_engine_type = self._cplex_engine_type
default_engine_name = 'cplex'
else:
default_engine_type = NoSolveEngine
default_engine_name = 'nosolve'
if context is not None:
kwargs['context'] = context
engine_type = self._get_engine_type_from_agent(agent=agent,
default_engine=default_engine_type,
default_engine_name=default_engine_name)
assert engine_type is not None
try:
return engine_type(model, **kwargs)
except TypeError:
docplex_fatal("agent: {0!s} failed to create instance from model, kwargs.", agent)
def extend(self, new_agent, new_engine):
assert new_engine is not None
self._engine_types_by_agent[new_agent] = new_engine
| true | true |
f7319da39f77cefd4e939a349a31747373e82aa7 | 4,711 | py | Python | simple_api/django_object/django_object.py | karlosss/simple_api | 03f87035c648f161d5e7a59b24f4e04bd34399f1 | [
"MIT"
] | 2 | 2020-11-13T14:00:06.000Z | 2020-12-19T11:50:22.000Z | simple_api/django_object/django_object.py | karlosss/simple_api | 03f87035c648f161d5e7a59b24f4e04bd34399f1 | [
"MIT"
] | 5 | 2021-02-04T14:27:43.000Z | 2021-06-04T23:22:24.000Z | simple_api/django_object/django_object.py | karlosss/simple_api | 03f87035c648f161d5e7a59b24f4e04bd34399f1 | [
"MIT"
] | 1 | 2021-01-06T13:54:38.000Z | 2021-01-06T13:54:38.000Z | from copy import deepcopy
from simple_api.django_object.actions import DetailAction, ListAction, CreateAction, UpdateAction, DeleteAction
from simple_api.django_object.datatypes import create_associated_list_type
from simple_api.django_object.filters import generate_filters
from simple_api.django_object.converter import determine_simple_api_fields
from simple_api.django_object.utils import get_pk_field
from simple_api.object.datatypes import StringType
from simple_api.object.object import Object, ObjectMeta
from simple_api.object.registry import object_storage
from simple_api.django_object.registry import model_django_object_storage
from simple_api.utils import ClassStub
class DjangoObjectMeta(type):
base_class = "simple_api.django_object.django_object.DjangoObject"
def __new__(mcs, name, bases, attrs, **kwargs):
cls = super().__new__(mcs, name, bases, attrs)
if kwargs.get("skip", False) or object_storage.key_for_class(attrs["__module__"], name) == mcs.base_class:
return cls
object_stub = ClassStub(name=cls.__name__, bases=(Object,))
# set the module of the generated Object class to match the module of the user class
object_stub.add_attr("__module__", cls.__module__)
assert cls.model is not None, "`model` must be set."
# if the class is meant to resolve relations, store it for the particular model
if cls.class_for_related:
model_django_object_storage.store(cls.model, cls)
cls.pk_field_name, cls.pk_field = get_pk_field(cls.model)
object_stub.add_attr("pk_field", cls.pk_field_name)
# make sure the primary key is included, otherwise `ModelObjectAction`s would just not work
if cls.only_fields and cls.pk_field_name not in cls.only_fields:
cls.only_fields = cls.only_fields + (cls.pk_field_name,)
elif cls.exclude_fields and cls.pk_field_name in cls.exclude_fields:
cls.exclude_fields = (f for f in cls.exclude_fields if f != cls.pk_field_name)
fields, input_fields, output_fields, field_validators = determine_simple_api_fields(
cls.model,
cls.only_fields, cls.exclude_fields,
cls.custom_fields, cls.input_custom_fields, cls.output_custom_fields,
)
output_fields["__str__"] = StringType(resolver=lambda *a, **kw: kw["parent_val"]())
for f in input_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.in_fields = {**fields, **input_fields}
for f in output_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.out_fields = {**fields, **output_fields}
object_stub.add_attr("fields", fields)
object_stub.add_attr("input_fields", input_fields)
object_stub.add_attr("output_fields", output_fields)
# create filters and List type for potential listing actions
cls.filter_type = ObjectMeta("{}Filters".format(cls.__name__), (Object,), {"fields": generate_filters(cls),
"hidden": True})
object_stub.add_attr("filter_type", cls.filter_type)
create_associated_list_type(cls)
actions = {}
if cls.detail_action is not None:
actions["detail"] = deepcopy(cls.detail_action)
if cls.list_action is not None:
actions["list"] = deepcopy(cls.list_action)
if cls.create_action is not None:
actions["create"] = deepcopy(cls.create_action)
if cls.update_action is not None:
actions["update"] = deepcopy(cls.update_action)
if cls.delete_action is not None:
actions["delete"] = deepcopy(cls.delete_action)
actions.update(cls.custom_actions)
converted_actions = {}
for action_name, action in actions.items():
action.set_parent_class(cls)
action.set_name(action_name)
converted_actions[action_name] = action.to_action()
object_stub.add_attr("actions", converted_actions)
cls._object = object_stub.build(ObjectMeta)
return cls
class DjangoObject(metaclass=DjangoObjectMeta):
model = None
auto_pk = True
class_for_related = True
only_fields = None
exclude_fields = None
custom_fields = {}
input_custom_fields = {}
output_custom_fields = {}
detail_action = DetailAction()
list_action = ListAction()
create_action = CreateAction()
update_action = UpdateAction()
delete_action = DeleteAction()
custom_actions = {}
@classmethod
def to_object(cls):
return cls._object
| 39.923729 | 115 | 0.68478 | from copy import deepcopy
from simple_api.django_object.actions import DetailAction, ListAction, CreateAction, UpdateAction, DeleteAction
from simple_api.django_object.datatypes import create_associated_list_type
from simple_api.django_object.filters import generate_filters
from simple_api.django_object.converter import determine_simple_api_fields
from simple_api.django_object.utils import get_pk_field
from simple_api.object.datatypes import StringType
from simple_api.object.object import Object, ObjectMeta
from simple_api.object.registry import object_storage
from simple_api.django_object.registry import model_django_object_storage
from simple_api.utils import ClassStub
class DjangoObjectMeta(type):
base_class = "simple_api.django_object.django_object.DjangoObject"
def __new__(mcs, name, bases, attrs, **kwargs):
cls = super().__new__(mcs, name, bases, attrs)
if kwargs.get("skip", False) or object_storage.key_for_class(attrs["__module__"], name) == mcs.base_class:
return cls
object_stub = ClassStub(name=cls.__name__, bases=(Object,))
object_stub.add_attr("__module__", cls.__module__)
assert cls.model is not None, "`model` must be set."
if cls.class_for_related:
model_django_object_storage.store(cls.model, cls)
cls.pk_field_name, cls.pk_field = get_pk_field(cls.model)
object_stub.add_attr("pk_field", cls.pk_field_name)
if cls.only_fields and cls.pk_field_name not in cls.only_fields:
cls.only_fields = cls.only_fields + (cls.pk_field_name,)
elif cls.exclude_fields and cls.pk_field_name in cls.exclude_fields:
cls.exclude_fields = (f for f in cls.exclude_fields if f != cls.pk_field_name)
fields, input_fields, output_fields, field_validators = determine_simple_api_fields(
cls.model,
cls.only_fields, cls.exclude_fields,
cls.custom_fields, cls.input_custom_fields, cls.output_custom_fields,
)
output_fields["__str__"] = StringType(resolver=lambda *a, **kw: kw["parent_val"]())
for f in input_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.in_fields = {**fields, **input_fields}
for f in output_fields:
assert f not in fields, "Redefinition of `{}` field.".format(f)
cls.out_fields = {**fields, **output_fields}
object_stub.add_attr("fields", fields)
object_stub.add_attr("input_fields", input_fields)
object_stub.add_attr("output_fields", output_fields)
cls.filter_type = ObjectMeta("{}Filters".format(cls.__name__), (Object,), {"fields": generate_filters(cls),
"hidden": True})
object_stub.add_attr("filter_type", cls.filter_type)
create_associated_list_type(cls)
actions = {}
if cls.detail_action is not None:
actions["detail"] = deepcopy(cls.detail_action)
if cls.list_action is not None:
actions["list"] = deepcopy(cls.list_action)
if cls.create_action is not None:
actions["create"] = deepcopy(cls.create_action)
if cls.update_action is not None:
actions["update"] = deepcopy(cls.update_action)
if cls.delete_action is not None:
actions["delete"] = deepcopy(cls.delete_action)
actions.update(cls.custom_actions)
converted_actions = {}
for action_name, action in actions.items():
action.set_parent_class(cls)
action.set_name(action_name)
converted_actions[action_name] = action.to_action()
object_stub.add_attr("actions", converted_actions)
cls._object = object_stub.build(ObjectMeta)
return cls
class DjangoObject(metaclass=DjangoObjectMeta):
model = None
auto_pk = True
class_for_related = True
only_fields = None
exclude_fields = None
custom_fields = {}
input_custom_fields = {}
output_custom_fields = {}
detail_action = DetailAction()
list_action = ListAction()
create_action = CreateAction()
update_action = UpdateAction()
delete_action = DeleteAction()
custom_actions = {}
@classmethod
def to_object(cls):
return cls._object
| true | true |
f7319df270d89691854f3a1b9a62c1231d3c7bfe | 8,177 | py | Python | python/paddle/fluid/tests/unittests/xpu/test_expand_v2_op_xpu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | 11 | 2016-08-29T07:43:26.000Z | 2016-08-29T07:51:24.000Z | python/paddle/fluid/tests/unittests/xpu/test_expand_v2_op_xpu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/xpu/test_expand_v2_op_xpu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
] | 1 | 2021-12-09T08:59:17.000Z | 2021-12-09T08:59:17.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import sys
import numpy as np
sys.path.append("..")
from op_test import OpTest
from op_test_xpu import XPUOpTest
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
import paddle
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
np.random.seed(10)
# CANN Op Support X: float32, int32, int64
# Situation 1: shape is a list(without tensor)
class XPUTestExpandV2Op(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'expand_v2'
self.use_dynamic_create_class = False
class TestExpandV2XPUOp(XPUOpTest):
def setUp(self):
self.init_dtype()
self.set_xpu()
self.op_type = "expand_v2"
self.place = paddle.XPUPlace(0)
self.init_data()
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype)
}
self.attrs = {'shape': self.shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_dtype(self):
self.dtype = self.in_type
def set_xpu(self):
self.__class__.use_xpu = True
self.__class__.no_need_check_grad = True
def init_data(self):
self.ori_shape = [100]
self.shape = [100]
self.expand_times = [1]
def test_check_output(self):
self.check_output_with_place(self.place)
class TestExpandV2OpRank2_DimExpanding(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [120]
self.shape = [2, 120]
self.expand_times = [2, 1]
class TestExpandV2OpRank2(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [1, 140]
self.shape = [12, 140]
self.expand_times = [12, 1]
class TestExpandV2OpRank3_Corner(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 10, 5)
self.shape = (2, 10, 5)
self.expand_times = (1, 1, 1)
class TestExpandV2OpRank4(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 5, 7)
self.shape = (-1, -1, -1, -1)
self.expand_times = (1, 1, 1, 1)
class TestExpandV2OpRank5(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 1, 15)
self.shape = (2, -1, 4, -1)
self.expand_times = (1, 1, 4, 1)
class TestExpandV2OpRank6(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (4, 1, 30)
self.shape = (2, -1, 4, 30)
self.expand_times = (2, 1, 4, 1)
# Situation 2: shape is a list(with tensor)
class TestExpandV2OpXPURank1_tensor_attr(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
expand_shapes_tensor = []
for index, ele in enumerate(self.expand_shape):
expand_shapes_tensor.append(("x" + str(index), np.ones(
(1)).astype('int32') * ele))
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'expand_shapes_tensor': expand_shapes_tensor,
}
self.attrs = {"shape": self.infer_expand_shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [1]
self.expand_shape = [100]
self.infer_expand_shape = [-1]
class TestExpandV2OpRank2_Corner_tensor_attr(
TestExpandV2OpXPURank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [1, 1]
self.expand_shape = [12, 14]
self.infer_expand_shape = [12, -1]
# Situation 3: shape is a tensor
class TestExpandV2XPUOp_tensor(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'Shape': np.array(self.expand_shape).astype("int32"),
}
self.attrs = {}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [2, 1]
self.expand_shape = [2, 100]
# Situation 5: input x is int32
# skip grad check for int32
class TestExpandV2OpInteger(XPUOpTest):
def init_type(self):
self.dtype = 'int32'
def setUp(self):
self.set_xpu()
self.init_type()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.inputs = {
'X': np.random.randint(10, size=(2, 4, 20)).astype(self.dtype)
}
self.attrs = {'shape': [2, 4, 20]}
output = np.tile(self.inputs['X'], (1, 1, 1))
self.outputs = {'Out': output}
def set_xpu(self):
self.__class__.use_xpu = True
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
pass
# Test python API
class TestExpandV2API(unittest.TestCase):
def test_static(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
input = np.random.random([12, 14]).astype("float32")
x = fluid.layers.data(name='x',
shape=[12, 14],
append_batch_size=False,
dtype="float32")
positive_2 = fluid.layers.fill_constant([1], "int32", 12)
expand_shape = fluid.layers.data(name="expand_shape",
shape=[2],
append_batch_size=False,
dtype="int32")
out_1 = paddle.expand(x, shape=[12, 14])
out_2 = paddle.expand(x, shape=[positive_2, 14])
out_3 = paddle.expand(x, shape=expand_shape)
g0 = fluid.backward.calc_gradient(out_2, x)
exe = fluid.Executor(place=paddle.XPUPlace(0))
res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
feed={
"x":
input,
"expand_shape":
np.array([12, 14]).astype("int32")
},
fetch_list=[out_1, out_2, out_3])
assert np.array_equal(res_1, np.tile(input, (1, 1)))
assert np.array_equal(res_2, np.tile(input, (1, 1)))
assert np.array_equal(res_3, np.tile(input, (1, 1)))
support_types = get_xpu_op_support_types('expand_v2')
for stype in support_types:
create_test_class(globals(), XPUTestExpandV2Op, stype)
if __name__ == "__main__":
unittest.main()
| 33.239837 | 97 | 0.559863 |
from __future__ import print_function
import unittest
import sys
import numpy as np
sys.path.append("..")
from op_test import OpTest
from op_test_xpu import XPUOpTest
import paddle.fluid as fluid
from paddle.fluid import Program, program_guard
import paddle
from xpu.get_test_cover_info import create_test_class, get_xpu_op_support_types, XPUOpTestWrapper
paddle.enable_static()
np.random.seed(10)
class XPUTestExpandV2Op(XPUOpTestWrapper):
def __init__(self):
self.op_name = 'expand_v2'
self.use_dynamic_create_class = False
class TestExpandV2XPUOp(XPUOpTest):
def setUp(self):
self.init_dtype()
self.set_xpu()
self.op_type = "expand_v2"
self.place = paddle.XPUPlace(0)
self.init_data()
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype)
}
self.attrs = {'shape': self.shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_dtype(self):
self.dtype = self.in_type
def set_xpu(self):
self.__class__.use_xpu = True
self.__class__.no_need_check_grad = True
def init_data(self):
self.ori_shape = [100]
self.shape = [100]
self.expand_times = [1]
def test_check_output(self):
self.check_output_with_place(self.place)
class TestExpandV2OpRank2_DimExpanding(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [120]
self.shape = [2, 120]
self.expand_times = [2, 1]
class TestExpandV2OpRank2(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = [1, 140]
self.shape = [12, 140]
self.expand_times = [12, 1]
class TestExpandV2OpRank3_Corner(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 10, 5)
self.shape = (2, 10, 5)
self.expand_times = (1, 1, 1)
class TestExpandV2OpRank4(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 5, 7)
self.shape = (-1, -1, -1, -1)
self.expand_times = (1, 1, 1, 1)
class TestExpandV2OpRank5(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (2, 4, 1, 15)
self.shape = (2, -1, 4, -1)
self.expand_times = (1, 1, 4, 1)
class TestExpandV2OpRank6(TestExpandV2XPUOp):
def init_data(self):
self.ori_shape = (4, 1, 30)
self.shape = (2, -1, 4, 30)
self.expand_times = (2, 1, 4, 1)
class TestExpandV2OpXPURank1_tensor_attr(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
expand_shapes_tensor = []
for index, ele in enumerate(self.expand_shape):
expand_shapes_tensor.append(("x" + str(index), np.ones(
(1)).astype('int32') * ele))
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'expand_shapes_tensor': expand_shapes_tensor,
}
self.attrs = {"shape": self.infer_expand_shape}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [1]
self.expand_shape = [100]
self.infer_expand_shape = [-1]
class TestExpandV2OpRank2_Corner_tensor_attr(
TestExpandV2OpXPURank1_tensor_attr):
def init_data(self):
self.ori_shape = [12, 14]
self.expand_times = [1, 1]
self.expand_shape = [12, 14]
self.infer_expand_shape = [12, -1]
class TestExpandV2XPUOp_tensor(TestExpandV2XPUOp):
def setUp(self):
self.set_xpu()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.init_data()
self.dtype = np.float32
self.inputs = {
'X': np.random.random(self.ori_shape).astype(self.dtype),
'Shape': np.array(self.expand_shape).astype("int32"),
}
self.attrs = {}
output = np.tile(self.inputs['X'], self.expand_times)
self.outputs = {'Out': output}
def init_data(self):
self.ori_shape = [100]
self.expand_times = [2, 1]
self.expand_shape = [2, 100]
class TestExpandV2OpInteger(XPUOpTest):
def init_type(self):
self.dtype = 'int32'
def setUp(self):
self.set_xpu()
self.init_type()
self.place = paddle.XPUPlace(0)
self.op_type = "expand_v2"
self.inputs = {
'X': np.random.randint(10, size=(2, 4, 20)).astype(self.dtype)
}
self.attrs = {'shape': [2, 4, 20]}
output = np.tile(self.inputs['X'], (1, 1, 1))
self.outputs = {'Out': output}
def set_xpu(self):
self.__class__.use_xpu = True
def test_check_output(self):
self.check_output_with_place(self.place)
def test_check_grad(self):
pass
class TestExpandV2API(unittest.TestCase):
def test_static(self):
with fluid.program_guard(fluid.Program(), fluid.Program()):
input = np.random.random([12, 14]).astype("float32")
x = fluid.layers.data(name='x',
shape=[12, 14],
append_batch_size=False,
dtype="float32")
positive_2 = fluid.layers.fill_constant([1], "int32", 12)
expand_shape = fluid.layers.data(name="expand_shape",
shape=[2],
append_batch_size=False,
dtype="int32")
out_1 = paddle.expand(x, shape=[12, 14])
out_2 = paddle.expand(x, shape=[positive_2, 14])
out_3 = paddle.expand(x, shape=expand_shape)
g0 = fluid.backward.calc_gradient(out_2, x)
exe = fluid.Executor(place=paddle.XPUPlace(0))
res_1, res_2, res_3 = exe.run(fluid.default_main_program(),
feed={
"x":
input,
"expand_shape":
np.array([12, 14]).astype("int32")
},
fetch_list=[out_1, out_2, out_3])
assert np.array_equal(res_1, np.tile(input, (1, 1)))
assert np.array_equal(res_2, np.tile(input, (1, 1)))
assert np.array_equal(res_3, np.tile(input, (1, 1)))
support_types = get_xpu_op_support_types('expand_v2')
for stype in support_types:
create_test_class(globals(), XPUTestExpandV2Op, stype)
if __name__ == "__main__":
unittest.main()
| true | true |
f7319df5d2cfd54a36d3021fcea5ac6d789dee56 | 7,191 | py | Python | mapillary_tools/commands/process.py | kudarisenmon/mapillary_tools | ef3201847ece95a0bbeb95187e38a0a84309a2b7 | [
"BSD-2-Clause"
] | null | null | null | mapillary_tools/commands/process.py | kudarisenmon/mapillary_tools | ef3201847ece95a0bbeb95187e38a0a84309a2b7 | [
"BSD-2-Clause"
] | null | null | null | mapillary_tools/commands/process.py | kudarisenmon/mapillary_tools | ef3201847ece95a0bbeb95187e38a0a84309a2b7 | [
"BSD-2-Clause"
] | null | null | null | import inspect
from mapillary_tools.process_user_properties import process_user_properties
from mapillary_tools.process_import_meta_properties import process_import_meta_properties
from mapillary_tools.process_geotag_properties import process_geotag_properties
from mapillary_tools.process_sequence_properties import process_sequence_properties
from mapillary_tools.process_upload_params import process_upload_params
from mapillary_tools.insert_MAPJson import insert_MAPJson
class Command:
name = 'process'
help = 'Main tool : Process image meta data and insert it in image EXIF.'
def add_basic_arguments(self, parser):
parser.add_argument(
'--rerun', help='rerun the processing', action='store_true', required=False)
# user properties
# user name for the import
parser.add_argument("--user_name", help="user name", required=True)
# organization level parameters
parser.add_argument(
'--organization_username', help="Specify organization user name", default=None, required=False)
parser.add_argument(
'--organization_key', help="Specify organization key", default=None, required=False)
parser.add_argument('--private',
help="Specify whether the import is private", action='store_true', default=False, required=False)
parser.add_argument(
'--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False)
def add_advanced_arguments(self, parser):
# master upload
parser.add_argument('--master_upload', help='Process images with a master key, note: only used by Mapillary employees',
action='store_true', default=False, required=False)
#import meta
parser.add_argument(
"--device_make", help="Specify device manufacturer. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
"--device_model", help="Specify device model. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
'--add_file_name', help="Add original file name to EXIF. Note this input has precedence over the input read from the import source file.", action='store_true', required=False)
parser.add_argument(
'--add_import_date', help="Add import date.", action='store_true', required=False)
parser.add_argument('--orientation', help='Specify the image orientation in degrees. Note this might result in image rotation. Note this input has precedence over the input read from the import source file.',
choices=[0, 90, 180, 270], type=int, default=None, required=False)
parser.add_argument(
"--GPS_accuracy", help="GPS accuracy in meters. Note this input has precedence over the input read from the import source file.", default=None, required=False)
# geotagging
parser.add_argument('--geotag_source', help='Provide the source of date/time and gps information needed for geotagging.', action='store',
choices=['exif', 'gpx', 'gopro_video', 'nmea'], default="exif", required=False)
parser.add_argument(
'--geotag_source_path', help='Provide the path to the file source of date/time and gps information needed for geotagging.', action='store',
default=None, required=False)
parser.add_argument(
'--local_time', help='Assume image timestamps are in your local time', action='store_true', default=False, required=False)
parser.add_argument('--sub_second_interval',
help='Sub second time between shots. Used to set image times with sub-second precision',
type=float, default=0.0, required=False)
parser.add_argument('--offset_time', default=0., type=float,
help='time offset between the camera and the gps device, in seconds.', required=False)
parser.add_argument('--offset_angle', default=0., type=float,
help='offset camera angle (90 for right facing, 180 for rear facing, -90 for left facing)', required=False)
parser.add_argument("--use_gps_start_time",
help="Use GPS trace starting time in case of derivating timestamp from filename.", action="store_true", default=False, required=False)
# sequence
parser.add_argument('--cutoff_distance', default=600., type=float,
help='maximum gps distance in meters within a sequence', required=False)
parser.add_argument('--cutoff_time', default=60., type=float,
help='maximum time interval in seconds within a sequence', required=False)
parser.add_argument('--interpolate_directions',
help='perform interploation of directions', action='store_true', required=False)
parser.add_argument('--flag_duplicates',
help='flag duplicates', action='store_true', required=False)
parser.add_argument('--duplicate_distance',
help='max distance for two images to be considered duplicates in meters', type=float, default=0.1, required=False)
parser.add_argument(
'--duplicate_angle', help='max angle for two images to be considered duplicates in degrees', type=float, default=5, required=False)
# EXIF insert
parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.',
action='store_true', default=False, required=False)
parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.',
action='store_true', default=False, required=False)
def run(self, args):
vars_args = vars(args)
process_user_properties(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_user_properties).args}))
process_import_meta_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_import_meta_properties).args}))
process_geotag_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_geotag_properties).args}))
process_sequence_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_sequence_properties).args}))
process_upload_params(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_upload_params).args}))
insert_MAPJson(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(insert_MAPJson).args}))
print("Process done.") | 67.205607 | 216 | 0.667779 | import inspect
from mapillary_tools.process_user_properties import process_user_properties
from mapillary_tools.process_import_meta_properties import process_import_meta_properties
from mapillary_tools.process_geotag_properties import process_geotag_properties
from mapillary_tools.process_sequence_properties import process_sequence_properties
from mapillary_tools.process_upload_params import process_upload_params
from mapillary_tools.insert_MAPJson import insert_MAPJson
class Command:
name = 'process'
help = 'Main tool : Process image meta data and insert it in image EXIF.'
def add_basic_arguments(self, parser):
parser.add_argument(
'--rerun', help='rerun the processing', action='store_true', required=False)
parser.add_argument("--user_name", help="user name", required=True)
parser.add_argument(
'--organization_username', help="Specify organization user name", default=None, required=False)
parser.add_argument(
'--organization_key', help="Specify organization key", default=None, required=False)
parser.add_argument('--private',
help="Specify whether the import is private", action='store_true', default=False, required=False)
parser.add_argument(
'--skip_subfolders', help='Skip all subfolders and import only the images in the given directory path.', action='store_true', default=False, required=False)
def add_advanced_arguments(self, parser):
parser.add_argument('--master_upload', help='Process images with a master key, note: only used by Mapillary employees',
action='store_true', default=False, required=False)
parser.add_argument(
"--device_make", help="Specify device manufacturer. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
"--device_model", help="Specify device model. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument(
'--add_file_name', help="Add original file name to EXIF. Note this input has precedence over the input read from the import source file.", action='store_true', required=False)
parser.add_argument(
'--add_import_date', help="Add import date.", action='store_true', required=False)
parser.add_argument('--orientation', help='Specify the image orientation in degrees. Note this might result in image rotation. Note this input has precedence over the input read from the import source file.',
choices=[0, 90, 180, 270], type=int, default=None, required=False)
parser.add_argument(
"--GPS_accuracy", help="GPS accuracy in meters. Note this input has precedence over the input read from the import source file.", default=None, required=False)
parser.add_argument('--geotag_source', help='Provide the source of date/time and gps information needed for geotagging.', action='store',
choices=['exif', 'gpx', 'gopro_video', 'nmea'], default="exif", required=False)
parser.add_argument(
'--geotag_source_path', help='Provide the path to the file source of date/time and gps information needed for geotagging.', action='store',
default=None, required=False)
parser.add_argument(
'--local_time', help='Assume image timestamps are in your local time', action='store_true', default=False, required=False)
parser.add_argument('--sub_second_interval',
help='Sub second time between shots. Used to set image times with sub-second precision',
type=float, default=0.0, required=False)
parser.add_argument('--offset_time', default=0., type=float,
help='time offset between the camera and the gps device, in seconds.', required=False)
parser.add_argument('--offset_angle', default=0., type=float,
help='offset camera angle (90 for right facing, 180 for rear facing, -90 for left facing)', required=False)
parser.add_argument("--use_gps_start_time",
help="Use GPS trace starting time in case of derivating timestamp from filename.", action="store_true", default=False, required=False)
parser.add_argument('--cutoff_distance', default=600., type=float,
help='maximum gps distance in meters within a sequence', required=False)
parser.add_argument('--cutoff_time', default=60., type=float,
help='maximum time interval in seconds within a sequence', required=False)
parser.add_argument('--interpolate_directions',
help='perform interploation of directions', action='store_true', required=False)
parser.add_argument('--flag_duplicates',
help='flag duplicates', action='store_true', required=False)
parser.add_argument('--duplicate_distance',
help='max distance for two images to be considered duplicates in meters', type=float, default=0.1, required=False)
parser.add_argument(
'--duplicate_angle', help='max angle for two images to be considered duplicates in degrees', type=float, default=5, required=False)
parser.add_argument('--skip_EXIF_insert', help='Skip inserting the extracted data into image EXIF.',
action='store_true', default=False, required=False)
parser.add_argument('--keep_original', help='Do not overwrite original images, instead save the processed images in a new directory by adding suffix "_processed" to the import_path.',
action='store_true', default=False, required=False)
def run(self, args):
vars_args = vars(args)
process_user_properties(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_user_properties).args}))
process_import_meta_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_import_meta_properties).args}))
process_geotag_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_geotag_properties).args}))
process_sequence_properties(
**({k: v for k, v in vars_args.iteritems() if k in inspect.getargspec(process_sequence_properties).args}))
process_upload_params(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(process_upload_params).args}))
insert_MAPJson(**({k: v for k, v in vars_args.iteritems()
if k in inspect.getargspec(insert_MAPJson).args}))
print("Process done.") | true | true |
f7319e3e7b4ea41930c29754506d03bd52eac137 | 1,166 | py | Python | envs/cell_place_gym/native/acp_state.py | Yvette1993/spinningup | 5094cf291fa24cf93d58b4507dab56dafe73dac1 | [
"MIT"
] | null | null | null | envs/cell_place_gym/native/acp_state.py | Yvette1993/spinningup | 5094cf291fa24cf93d58b4507dab56dafe73dac1 | [
"MIT"
] | null | null | null | envs/cell_place_gym/native/acp_state.py | Yvette1993/spinningup | 5094cf291fa24cf93d58b4507dab56dafe73dac1 | [
"MIT"
] | null | null | null | import numpy as np
from cell_place_gym.native.acp_placement import *
class acp_placement_state (object):
def __init__ (self, place):
self.place = place
self.design = place.design
l_inst_count = len (self.design.instances)
# Non-Placed Nets Matrix
self.c_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
# Placed Nets Matrix
self.n_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
def get_state (self):
nets = self.design.nets
self.c_matrix [:,:] = 0
self.n_matrix [:,:] = 0
for n in nets:
src = n.net_source
src_id = src.get_inst_id ()
src_placed = src.is_placed ()
src_position = src.get_position ()
for dst in n.net_dests:
dst_id = dst.get_inst_id ()
dst_placed = dst.is_placed ()
dst_position = dst.get_position ()
if src_placed and dst_placed:
self.n_matrix [src_id][dst_id] = 1
else:
self.n_matrix [src_id][dst_id] = 1
return
| 34.294118 | 71 | 0.542882 | import numpy as np
from cell_place_gym.native.acp_placement import *
class acp_placement_state (object):
def __init__ (self, place):
self.place = place
self.design = place.design
l_inst_count = len (self.design.instances)
self.c_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
self.n_matrix = np.zeros (shape = (l_inst_count, l_inst_count))
def get_state (self):
nets = self.design.nets
self.c_matrix [:,:] = 0
self.n_matrix [:,:] = 0
for n in nets:
src = n.net_source
src_id = src.get_inst_id ()
src_placed = src.is_placed ()
src_position = src.get_position ()
for dst in n.net_dests:
dst_id = dst.get_inst_id ()
dst_placed = dst.is_placed ()
dst_position = dst.get_position ()
if src_placed and dst_placed:
self.n_matrix [src_id][dst_id] = 1
else:
self.n_matrix [src_id][dst_id] = 1
return
| true | true |
f7319e483d3b8d072192a8a4e21ee30c26c57e8a | 2,638 | py | Python | homeassistant/components/sonos/entity.py | DavidDeSloovere/core | 909a20b36d4df6724c955c2ae28cb82fe6d50c2e | [
"Apache-2.0"
] | 1 | 2019-08-19T18:18:50.000Z | 2019-08-19T18:18:50.000Z | homeassistant/components/sonos/entity.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 70 | 2020-08-05T07:20:00.000Z | 2022-03-31T06:01:46.000Z | homeassistant/components/sonos/entity.py | jagadeeshvenkatesh/core | 1bd982668449815fee2105478569f8e4b5670add | [
"Apache-2.0"
] | 1 | 2021-10-29T22:10:26.000Z | 2021-10-29T22:10:26.000Z | """Entity representing a Sonos player."""
from __future__ import annotations
import logging
from pysonos.core import SoCo
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import (
DOMAIN,
SONOS_ENTITY_CREATED,
SONOS_ENTITY_UPDATE,
SONOS_STATE_UPDATED,
)
from .speaker import SonosSpeaker
_LOGGER = logging.getLogger(__name__)
class SonosEntity(Entity):
"""Representation of a Sonos entity."""
def __init__(self, speaker: SonosSpeaker) -> None:
"""Initialize a SonosEntity."""
self.speaker = speaker
async def async_added_to_hass(self) -> None:
"""Handle common setup when added to hass."""
await self.speaker.async_seen()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_ENTITY_UPDATE}-{self.soco.uid}",
self.async_update, # pylint: disable=no-member
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_STATE_UPDATED}-{self.soco.uid}",
self.async_write_ha_state,
)
)
@property
def soco(self) -> SoCo:
"""Return the speaker SoCo instance."""
return self.speaker.soco
@property
def device_info(self) -> DeviceInfo:
"""Return information about the device."""
return {
"identifiers": {(DOMAIN, self.soco.uid)},
"name": self.speaker.zone_name,
"model": self.speaker.model_name.replace("Sonos ", ""),
"sw_version": self.speaker.version,
"connections": {(dr.CONNECTION_NETWORK_MAC, self.speaker.mac_address)},
"manufacturer": "Sonos",
"suggested_area": self.speaker.zone_name,
}
@property
def available(self) -> bool:
"""Return whether this device is available."""
return self.speaker.available
@property
def should_poll(self) -> bool:
"""Return that we should not be polled (we handle that internally)."""
return False
class SonosSensorEntity(SonosEntity):
"""Representation of a Sonos sensor entity."""
async def async_added_to_hass(self) -> None:
"""Handle common setup when added to hass."""
await super().async_added_to_hass()
async_dispatcher_send(
self.hass, f"{SONOS_ENTITY_CREATED}-{self.soco.uid}", self.platform.domain
)
| 29.311111 | 86 | 0.628886 | from __future__ import annotations
import logging
from pysonos.core import SoCo
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import DeviceInfo, Entity
from .const import (
DOMAIN,
SONOS_ENTITY_CREATED,
SONOS_ENTITY_UPDATE,
SONOS_STATE_UPDATED,
)
from .speaker import SonosSpeaker
_LOGGER = logging.getLogger(__name__)
class SonosEntity(Entity):
def __init__(self, speaker: SonosSpeaker) -> None:
self.speaker = speaker
async def async_added_to_hass(self) -> None:
await self.speaker.async_seen()
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_ENTITY_UPDATE}-{self.soco.uid}",
self.async_update,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{SONOS_STATE_UPDATED}-{self.soco.uid}",
self.async_write_ha_state,
)
)
@property
def soco(self) -> SoCo:
return self.speaker.soco
@property
def device_info(self) -> DeviceInfo:
return {
"identifiers": {(DOMAIN, self.soco.uid)},
"name": self.speaker.zone_name,
"model": self.speaker.model_name.replace("Sonos ", ""),
"sw_version": self.speaker.version,
"connections": {(dr.CONNECTION_NETWORK_MAC, self.speaker.mac_address)},
"manufacturer": "Sonos",
"suggested_area": self.speaker.zone_name,
}
@property
def available(self) -> bool:
return self.speaker.available
@property
def should_poll(self) -> bool:
return False
class SonosSensorEntity(SonosEntity):
async def async_added_to_hass(self) -> None:
await super().async_added_to_hass()
async_dispatcher_send(
self.hass, f"{SONOS_ENTITY_CREATED}-{self.soco.uid}", self.platform.domain
)
| true | true |
f7319eef72e8d23633c8d8b5b9c928a276dda952 | 912 | py | Python | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/not_in_loop.py | ciskoinch8/vimrc | 5bf77a7e7bc70fac5173ab2e9ea05d7dda3e52b8 | [
"MIT"
] | 463 | 2015-01-15T08:17:42.000Z | 2022-03-28T15:10:20.000Z | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/not_in_loop.py | ciskoinch8/vimrc | 5bf77a7e7bc70fac5173ab2e9ea05d7dda3e52b8 | [
"MIT"
] | 52 | 2015-01-06T02:43:59.000Z | 2022-03-14T11:15:21.000Z | vimfiles/bundle/vim-python/submodules/pylint/tests/functional/n/not_in_loop.py | ciskoinch8/vimrc | 5bf77a7e7bc70fac5173ab2e9ea05d7dda3e52b8 | [
"MIT"
] | 249 | 2015-01-07T22:49:49.000Z | 2022-03-18T02:32:06.000Z | """Test that not-in-loop is detected properly."""
# pylint: disable=missing-docstring, invalid-name, too-few-public-methods
# pylint: disable=useless-else-on-loop, using-constant-test, useless-object-inheritance
# pylint: disable=no-else-continue
while True:
def ala():
continue # [not-in-loop]
while True:
pass
else:
continue # [not-in-loop]
def lala():
continue # [not-in-loop]
while True:
class A(object):
continue # [not-in-loop]
for _ in range(10):
pass
else:
continue # [not-in-loop]
for _ in range(42):
pass
else:
break # [not-in-loop]
if True:
continue # [not-in-loop]
else:
break # [not-in-loop]
for _ in range(10):
for _ in range(20):
pass
else:
continue
while True:
while True:
break
else:
break
break
else:
pass
for _ in range(1):
continue
for _ in range(42):
break
| 16.285714 | 87 | 0.612939 |
while True:
def ala():
continue
while True:
pass
else:
continue
def lala():
continue
while True:
class A(object):
continue
for _ in range(10):
pass
else:
continue
for _ in range(42):
pass
else:
break
if True:
continue
else:
break
for _ in range(10):
for _ in range(20):
pass
else:
continue
while True:
while True:
break
else:
break
break
else:
pass
for _ in range(1):
continue
for _ in range(42):
break
| true | true |
f7319f7dedc4d4bfa733e08b15bd1217c7611f0d | 860 | py | Python | wagtail_localize/migrations/0004_one_source_per_objectlocale.py | KalobTaulien/wagtail-localize | e513d18dea5f76f6941f1acf55f945150de767eb | [
"BSD-3-Clause"
] | 123 | 2019-11-21T12:55:04.000Z | 2022-03-23T08:08:47.000Z | wagtail_localize/migrations/0004_one_source_per_objectlocale.py | KalobTaulien/wagtail-localize | e513d18dea5f76f6941f1acf55f945150de767eb | [
"BSD-3-Clause"
] | 334 | 2019-11-20T10:40:08.000Z | 2022-03-27T17:33:01.000Z | wagtail_localize/migrations/0004_one_source_per_objectlocale.py | KalobTaulien/wagtail-localize | e513d18dea5f76f6941f1acf55f945150de767eb | [
"BSD-3-Clause"
] | 41 | 2020-01-16T17:24:52.000Z | 2022-03-28T13:09:59.000Z | # Generated by Django 3.0.8 on 2020-08-05 09:33
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('wagtail_localize', '0003_delete_translation_sources'),
]
operations = [
migrations.AddField(
model_name='translationsource',
name='last_updated_at',
field=models.DateTimeField(default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='translationsource',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterUniqueTogether(
name='translationsource',
unique_together={('object', 'locale')},
),
]
| 27.741935 | 74 | 0.626744 |
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('wagtail_localize', '0003_delete_translation_sources'),
]
operations = [
migrations.AddField(
model_name='translationsource',
name='last_updated_at',
field=models.DateTimeField(default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AlterField(
model_name='translationsource',
name='created_at',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterUniqueTogether(
name='translationsource',
unique_together={('object', 'locale')},
),
]
| true | true |
f731a03ce0e6675d8bd15187040163106262826b | 23,980 | py | Python | awx/main/models/notifications.py | sganesh999/awx | 783a6dc5111e1757982bf7f5e1bb4bb197d4f199 | [
"Apache-2.0"
] | null | null | null | awx/main/models/notifications.py | sganesh999/awx | 783a6dc5111e1757982bf7f5e1bb4bb197d4f199 | [
"Apache-2.0"
] | null | null | null | awx/main/models/notifications.py | sganesh999/awx | 783a6dc5111e1757982bf7f5e1bb4bb197d4f199 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
from copy import deepcopy
import datetime
import logging
import json
from django.db import models
from django.conf import settings
from django.core.mail.message import EmailMessage
from django.db import connection
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_str, force_text
from jinja2 import sandbox
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
# AWX
from awx.api.versioning import reverse
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
from awx.main.utils import encrypt_field, decrypt_field, set_environ
from awx.main.notifications.email_backend import CustomEmailBackend
from awx.main.notifications.slack_backend import SlackBackend
from awx.main.notifications.twilio_backend import TwilioBackend
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
from awx.main.notifications.hipchat_backend import HipChatBackend
from awx.main.notifications.webhook_backend import WebhookBackend
from awx.main.notifications.mattermost_backend import MattermostBackend
from awx.main.notifications.grafana_backend import GrafanaBackend
from awx.main.notifications.rocketchat_backend import RocketChatBackend
from awx.main.notifications.irc_backend import IrcBackend
from awx.main.fields import JSONField
logger = logging.getLogger('awx.main.models.notifications')
__all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique):
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend),
('pagerduty', _('Pagerduty'), PagerDutyBackend),
('grafana', _('Grafana'), GrafanaBackend),
('hipchat', _('HipChat'), HipChatBackend),
('webhook', _('Webhook'), WebhookBackend),
('mattermost', _('Mattermost'), MattermostBackend),
('rocketchat', _('Rocket.Chat'), RocketChatBackend),
('irc', _('IRC'), IrcBackend)]
NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1]) for x in NOTIFICATION_TYPES])
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
class Meta:
app_label = 'main'
unique_together = ('organization', 'name')
ordering = ("name",)
organization = models.ForeignKey(
'Organization',
blank=False,
null=True,
on_delete=models.CASCADE,
related_name='notification_templates',
)
notification_type = models.CharField(
max_length = 32,
choices=NOTIFICATION_TYPE_CHOICES,
)
notification_configuration = prevent_search(JSONField(blank=False))
def default_messages():
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
messages = JSONField(
null=True,
blank=True,
default=default_messages,
help_text=_('Optional custom messages for notification template.'))
def has_message(self, condition):
potential_template = self.messages.get(condition, {})
if potential_template == {}:
return False
if potential_template.get('message', {}) == {}:
return False
return True
def get_message(self, condition):
return self.messages.get(condition, {})
def get_absolute_url(self, request=None):
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
@property
def notification_class(self):
return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
update_fields = kwargs.get('update_fields', [])
# preserve existing notification messages if not overwritten by new messages
if not new_instance:
old_nt = NotificationTemplate.objects.get(pk=self.id)
old_messages = old_nt.messages
new_messages = self.messages
def merge_messages(local_old_messages, local_new_messages, local_event):
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
local_old_event_msgs = local_old_messages[local_event]
local_new_event_msgs = local_new_messages[local_event]
for msg_type in ['message', 'body']:
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
if old_messages is not None and new_messages is not None:
for event in ('started', 'success', 'error', 'workflow_approval'):
if not new_messages.get(event, {}) and old_messages.get(event, {}):
new_messages[event] = old_messages[event]
continue
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
new_messages.setdefault('workflow_approval', {})
for subevent in ('running', 'approved', 'timed_out', 'denied'):
old_wfa_messages = old_messages['workflow_approval']
new_wfa_messages = new_messages['workflow_approval']
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
new_wfa_messages[subevent] = old_wfa_messages[subevent]
continue
if old_wfa_messages:
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
else:
merge_messages(old_messages, new_messages, event)
new_messages.setdefault(event, None)
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if self.notification_configuration[field].startswith("$encrypted$"):
continue
if new_instance:
value = self.notification_configuration[field]
setattr(self, '_saved_{}_{}'.format("config", field), value)
self.notification_configuration[field] = ''
else:
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
self.notification_configuration[field] = encrypted
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
super(NotificationTemplate, self).save(*args, **kwargs)
if new_instance:
update_fields = []
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
self.notification_configuration[field] = saved_value
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
self.save(update_fields=update_fields)
@property
def recipients(self):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, msg, body):
notification = Notification(notification_template=self,
notification_type=self.notification_type,
recipients=smart_str(self.recipients),
subject=msg,
body=body)
notification.save()
return notification
def send(self, subject, body):
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if field in self.notification_configuration:
self.notification_configuration[field] = decrypt_field(self,
'notification_configuration',
subfield=field)
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
if not isinstance(recipients, list):
recipients = [recipients]
sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None)
notification_configuration = deepcopy(self.notification_configuration)
for field, params in self.notification_class.init_parameters.items():
if field not in notification_configuration:
if 'default' in params:
notification_configuration[field] = params['default']
backend_obj = self.notification_class(**notification_configuration)
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
with set_environ(**settings.AWX_TASK_ENV):
return backend_obj.send_messages([notification_obj])
def display_notification_configuration(self):
field_val = self.notification_configuration.copy()
for field in self.notification_class.init_parameters:
if field in field_val and force_text(field_val[field]).startswith('$encrypted$'):
field_val[field] = '$encrypted$'
return field_val
class Notification(CreatedModifiedModel):
'''
A notification event emitted when a NotificationTemplate is run
'''
NOTIFICATION_STATE_CHOICES = [
('pending', _('Pending')),
('successful', _('Successful')),
('failed', _('Failed')),
]
class Meta:
app_label = 'main'
ordering = ('pk',)
notification_template = models.ForeignKey(
'NotificationTemplate',
related_name='notifications',
on_delete=models.CASCADE,
editable=False
)
status = models.CharField(
max_length=20,
choices=NOTIFICATION_STATE_CHOICES,
default='pending',
editable=False,
)
error = models.TextField(
blank=True,
default='',
editable=False,
)
notifications_sent = models.IntegerField(
default=0,
editable=False,
)
notification_type = models.CharField(
max_length = 32,
choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES,
)
recipients = models.TextField(
blank=True,
default='',
editable=False,
)
subject = models.TextField(
blank=True,
default='',
editable=False,
)
body = JSONField(blank=True)
def get_absolute_url(self, request=None):
return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request)
class JobNotificationMixin(object):
STATUS_TO_TEMPLATE_TYPE = {'succeeded': 'success',
'running': 'started',
'failed': 'error'}
# Tree of fields that can be safely referenced in a notification message
JOB_FIELDS_WHITELIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']}]}]
@classmethod
def context_stub(cls):
"""Returns a stub context that can be used for validating notification messages.
Context has the same structure as the context that will actually be used to render
a notification message."""
context = {'job': {'allow_simultaneous': False,
'controller_node': 'foo_controller',
'created': datetime.datetime(2018, 11, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'custom_virtualenv': 'my_venv',
'description': 'Sample job description',
'diff_mode': False,
'elapsed': 0.403018,
'execution_node': 'awx',
'failed': False,
'finished': False,
'force_handlers': False,
'forks': 0,
'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
'id': 42,
'job_explanation': 'Sample job explanation',
'job_slice_count': 1,
'job_slice_number': 0,
'job_tags': '',
'job_type': 'run',
'launch_type': 'workflow',
'limit': 'bar_limit',
'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'name': 'Stub JobTemplate',
'playbook': 'ping.yml',
'scm_revision': '',
'skip_tags': '',
'start_at_task': '',
'started': '2019-07-29T17:38:14.137461Z',
'status': 'running',
'summary_fields': {'created_by': {'first_name': '',
'id': 1,
'last_name': '',
'username': 'admin'},
'instance_group': {'id': 1, 'name': 'tower'},
'inventory': {'description': 'Sample inventory description',
'has_active_failures': False,
'has_inventory_sources': False,
'hosts_with_active_failures': 0,
'id': 17,
'inventory_sources_with_failures': 0,
'kind': '',
'name': 'Stub Inventory',
'organization_id': 121,
'total_groups': 0,
'total_hosts': 1,
'total_inventory_sources': 0},
'job_template': {'description': 'Sample job template description',
'id': 39,
'name': 'Stub JobTemplate'},
'labels': {'count': 0, 'results': []},
'project': {'description': 'Sample project description',
'id': 38,
'name': 'Stub project',
'scm_type': 'git',
'status': 'successful'},
'unified_job_template': {'description': 'Sample unified job template description',
'id': 39,
'name': 'Stub Job Template',
'unified_job_type': 'job'}},
'timeout': 0,
'type': 'job',
'url': '/api/v2/jobs/13/',
'use_fact_cache': False,
'verbosity': 0},
'job_friendly_name': 'Job',
'url': 'https://towerhost/#/jobs/playbook/1010',
'approval_status': 'approved',
'approval_node_name': 'Approve Me',
'workflow_url': 'https://towerhost/#/workflows/1010',
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '',
'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00',
'project': 'Stub project',
'playbook': 'ping.yml',
'name': 'Stub Job Template',
'limit': '',
'inventory': 'Stub Inventory',
'id': 42,
'hosts': {},
'friendly_name': 'Job',
'finished': False,
'credential': 'Stub credential',
'created_by': 'admin'}"""}
return context
def context(self, serialized_job):
"""Returns a dictionary that can be used for rendering notification messages.
The context will contain whitelisted content retrieved from a serialized job object
(see JobNotificationMixin.JOB_FIELDS_WHITELIST), the job's friendly name,
and a url to the job run."""
job_context = {'host_status_counts': {}}
summary = None
if hasattr(self, 'job_host_summaries'):
summary = self.job_host_summaries.first()
if summary:
from awx.api.serializers import JobHostSummarySerializer
summary_data = JobHostSummarySerializer(summary).to_representation(summary)
job_context['host_status_counts'] = summary_data
context = {
'job': job_context,
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_metadata': json.dumps(self.notification_data(), indent=4)
}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:
if type(safe_field) is dict:
field, whitelist_subnode = safe_field.copy().popitem()
# ensure content present in job serialization
if field not in fields:
continue
subnode = fields[field]
node[field] = {}
build_context(node[field], subnode, whitelist_subnode)
else:
# ensure content present in job serialization
if safe_field not in fields:
continue
node[safe_field] = fields[safe_field]
build_context(context['job'], serialized_job, self.JOB_FIELDS_WHITELIST)
return context
def get_notification_templates(self):
raise RuntimeError("Define me")
def get_notification_friendly_name(self):
raise RuntimeError("Define me")
def notification_data(self):
raise RuntimeError("Define me")
def build_notification_message(self, nt, status):
env = sandbox.ImmutableSandboxedEnvironment()
from awx.api.serializers import UnifiedJobSerializer
job_serialization = UnifiedJobSerializer(self).to_representation(self)
context = self.context(job_serialization)
msg_template = body_template = None
msg = body = ''
# Use custom template if available
if nt.messages:
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
msg_template = template.get('message', None)
body_template = template.get('body', None)
# If custom template not provided, look up default template
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
if not msg_template:
msg_template = default_template.get('message', None)
if not body_template:
body_template = default_template.get('body', None)
if msg_template:
try:
msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
msg = ''
if body_template:
try:
body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
body = ''
return (msg, body)
def send_notification_templates(self, status):
from awx.main.tasks import send_notifications # avoid circular import
if status not in ['running', 'succeeded', 'failed']:
raise ValueError(_("status must be either running, succeeded or failed"))
try:
notification_templates = self.get_notification_templates()
except Exception:
logger.warn("No notification template defined for emitting notification")
return
if not notification_templates:
return
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
(msg, body) = self.build_notification_message(nt, status)
# Use kwargs to force late-binding
# https://stackoverflow.com/a/3431699/10669572
def send_it(local_nt=nt, local_msg=msg, local_body=body):
def _func():
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
job_id=self.id)
return _func
connection.on_commit(send_it())
| 49.240246 | 160 | 0.544287 |
from copy import deepcopy
import datetime
import logging
import json
from django.db import models
from django.conf import settings
from django.core.mail.message import EmailMessage
from django.db import connection
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import smart_str, force_text
from jinja2 import sandbox
from jinja2.exceptions import TemplateSyntaxError, UndefinedError, SecurityError
from awx.api.versioning import reverse
from awx.main.models.base import CommonModelNameNotUnique, CreatedModifiedModel, prevent_search
from awx.main.utils import encrypt_field, decrypt_field, set_environ
from awx.main.notifications.email_backend import CustomEmailBackend
from awx.main.notifications.slack_backend import SlackBackend
from awx.main.notifications.twilio_backend import TwilioBackend
from awx.main.notifications.pagerduty_backend import PagerDutyBackend
from awx.main.notifications.hipchat_backend import HipChatBackend
from awx.main.notifications.webhook_backend import WebhookBackend
from awx.main.notifications.mattermost_backend import MattermostBackend
from awx.main.notifications.grafana_backend import GrafanaBackend
from awx.main.notifications.rocketchat_backend import RocketChatBackend
from awx.main.notifications.irc_backend import IrcBackend
from awx.main.fields import JSONField
logger = logging.getLogger('awx.main.models.notifications')
__all__ = ['NotificationTemplate', 'Notification']
class NotificationTemplate(CommonModelNameNotUnique):
NOTIFICATION_TYPES = [('email', _('Email'), CustomEmailBackend),
('slack', _('Slack'), SlackBackend),
('twilio', _('Twilio'), TwilioBackend),
('pagerduty', _('Pagerduty'), PagerDutyBackend),
('grafana', _('Grafana'), GrafanaBackend),
('hipchat', _('HipChat'), HipChatBackend),
('webhook', _('Webhook'), WebhookBackend),
('mattermost', _('Mattermost'), MattermostBackend),
('rocketchat', _('Rocket.Chat'), RocketChatBackend),
('irc', _('IRC'), IrcBackend)]
NOTIFICATION_TYPE_CHOICES = sorted([(x[0], x[1]) for x in NOTIFICATION_TYPES])
CLASS_FOR_NOTIFICATION_TYPE = dict([(x[0], x[2]) for x in NOTIFICATION_TYPES])
class Meta:
app_label = 'main'
unique_together = ('organization', 'name')
ordering = ("name",)
organization = models.ForeignKey(
'Organization',
blank=False,
null=True,
on_delete=models.CASCADE,
related_name='notification_templates',
)
notification_type = models.CharField(
max_length = 32,
choices=NOTIFICATION_TYPE_CHOICES,
)
notification_configuration = prevent_search(JSONField(blank=False))
def default_messages():
return {'started': None, 'success': None, 'error': None, 'workflow_approval': None}
messages = JSONField(
null=True,
blank=True,
default=default_messages,
help_text=_('Optional custom messages for notification template.'))
def has_message(self, condition):
potential_template = self.messages.get(condition, {})
if potential_template == {}:
return False
if potential_template.get('message', {}) == {}:
return False
return True
def get_message(self, condition):
return self.messages.get(condition, {})
def get_absolute_url(self, request=None):
return reverse('api:notification_template_detail', kwargs={'pk': self.pk}, request=request)
@property
def notification_class(self):
return self.CLASS_FOR_NOTIFICATION_TYPE[self.notification_type]
def save(self, *args, **kwargs):
new_instance = not bool(self.pk)
update_fields = kwargs.get('update_fields', [])
if not new_instance:
old_nt = NotificationTemplate.objects.get(pk=self.id)
old_messages = old_nt.messages
new_messages = self.messages
def merge_messages(local_old_messages, local_new_messages, local_event):
if local_new_messages.get(local_event, {}) and local_old_messages.get(local_event, {}):
local_old_event_msgs = local_old_messages[local_event]
local_new_event_msgs = local_new_messages[local_event]
for msg_type in ['message', 'body']:
if msg_type not in local_new_event_msgs and local_old_event_msgs.get(msg_type, None):
local_new_event_msgs[msg_type] = local_old_event_msgs[msg_type]
if old_messages is not None and new_messages is not None:
for event in ('started', 'success', 'error', 'workflow_approval'):
if not new_messages.get(event, {}) and old_messages.get(event, {}):
new_messages[event] = old_messages[event]
continue
if event == 'workflow_approval' and old_messages.get('workflow_approval', None):
new_messages.setdefault('workflow_approval', {})
for subevent in ('running', 'approved', 'timed_out', 'denied'):
old_wfa_messages = old_messages['workflow_approval']
new_wfa_messages = new_messages['workflow_approval']
if not new_wfa_messages.get(subevent, {}) and old_wfa_messages.get(subevent, {}):
new_wfa_messages[subevent] = old_wfa_messages[subevent]
continue
if old_wfa_messages:
merge_messages(old_wfa_messages, new_wfa_messages, subevent)
else:
merge_messages(old_messages, new_messages, event)
new_messages.setdefault(event, None)
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if self.notification_configuration[field].startswith("$encrypted$"):
continue
if new_instance:
value = self.notification_configuration[field]
setattr(self, '_saved_{}_{}'.format("config", field), value)
self.notification_configuration[field] = ''
else:
encrypted = encrypt_field(self, 'notification_configuration', subfield=field)
self.notification_configuration[field] = encrypted
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
super(NotificationTemplate, self).save(*args, **kwargs)
if new_instance:
update_fields = []
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
saved_value = getattr(self, '_saved_{}_{}'.format("config", field), '')
self.notification_configuration[field] = saved_value
if 'notification_configuration' not in update_fields:
update_fields.append('notification_configuration')
self.save(update_fields=update_fields)
@property
def recipients(self):
return self.notification_configuration[self.notification_class.recipient_parameter]
def generate_notification(self, msg, body):
notification = Notification(notification_template=self,
notification_type=self.notification_type,
recipients=smart_str(self.recipients),
subject=msg,
body=body)
notification.save()
return notification
def send(self, subject, body):
for field in filter(lambda x: self.notification_class.init_parameters[x]['type'] == "password",
self.notification_class.init_parameters):
if field in self.notification_configuration:
self.notification_configuration[field] = decrypt_field(self,
'notification_configuration',
subfield=field)
recipients = self.notification_configuration.pop(self.notification_class.recipient_parameter)
if not isinstance(recipients, list):
recipients = [recipients]
sender = self.notification_configuration.pop(self.notification_class.sender_parameter, None)
notification_configuration = deepcopy(self.notification_configuration)
for field, params in self.notification_class.init_parameters.items():
if field not in notification_configuration:
if 'default' in params:
notification_configuration[field] = params['default']
backend_obj = self.notification_class(**notification_configuration)
notification_obj = EmailMessage(subject, backend_obj.format_body(body), sender, recipients)
with set_environ(**settings.AWX_TASK_ENV):
return backend_obj.send_messages([notification_obj])
def display_notification_configuration(self):
field_val = self.notification_configuration.copy()
for field in self.notification_class.init_parameters:
if field in field_val and force_text(field_val[field]).startswith('$encrypted$'):
field_val[field] = '$encrypted$'
return field_val
class Notification(CreatedModifiedModel):
NOTIFICATION_STATE_CHOICES = [
('pending', _('Pending')),
('successful', _('Successful')),
('failed', _('Failed')),
]
class Meta:
app_label = 'main'
ordering = ('pk',)
notification_template = models.ForeignKey(
'NotificationTemplate',
related_name='notifications',
on_delete=models.CASCADE,
editable=False
)
status = models.CharField(
max_length=20,
choices=NOTIFICATION_STATE_CHOICES,
default='pending',
editable=False,
)
error = models.TextField(
blank=True,
default='',
editable=False,
)
notifications_sent = models.IntegerField(
default=0,
editable=False,
)
notification_type = models.CharField(
max_length = 32,
choices=NotificationTemplate.NOTIFICATION_TYPE_CHOICES,
)
recipients = models.TextField(
blank=True,
default='',
editable=False,
)
subject = models.TextField(
blank=True,
default='',
editable=False,
)
body = JSONField(blank=True)
def get_absolute_url(self, request=None):
return reverse('api:notification_detail', kwargs={'pk': self.pk}, request=request)
class JobNotificationMixin(object):
STATUS_TO_TEMPLATE_TYPE = {'succeeded': 'success',
'running': 'started',
'failed': 'error'}
JOB_FIELDS_WHITELIST = ['id', 'type', 'url', 'created', 'modified', 'name', 'description', 'job_type', 'playbook',
'forks', 'limit', 'verbosity', 'job_tags', 'force_handlers', 'skip_tags', 'start_at_task',
'timeout', 'use_fact_cache', 'launch_type', 'status', 'failed', 'started', 'finished',
'elapsed', 'job_explanation', 'execution_node', 'controller_node', 'allow_simultaneous',
'scm_revision', 'diff_mode', 'job_slice_number', 'job_slice_count', 'custom_virtualenv',
'approval_status', 'approval_node_name', 'workflow_url',
{'host_status_counts': ['skipped', 'ok', 'changed', 'failed', 'failures', 'dark'
'processed', 'rescued', 'ignored']},
{'summary_fields': [{'inventory': ['id', 'name', 'description', 'has_active_failures',
'total_hosts', 'hosts_with_active_failures', 'total_groups',
'has_inventory_sources',
'total_inventory_sources', 'inventory_sources_with_failures',
'organization_id', 'kind']},
{'project': ['id', 'name', 'description', 'status', 'scm_type']},
{'job_template': ['id', 'name', 'description']},
{'unified_job_template': ['id', 'name', 'description', 'unified_job_type']},
{'instance_group': ['name', 'id']},
{'created_by': ['id', 'username', 'first_name', 'last_name']},
{'labels': ['count', 'results']}]}]
@classmethod
def context_stub(cls):
context = {'job': {'allow_simultaneous': False,
'controller_node': 'foo_controller',
'created': datetime.datetime(2018, 11, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'custom_virtualenv': 'my_venv',
'description': 'Sample job description',
'diff_mode': False,
'elapsed': 0.403018,
'execution_node': 'awx',
'failed': False,
'finished': False,
'force_handlers': False,
'forks': 0,
'host_status_counts': {'skipped': 1, 'ok': 5, 'changed': 3, 'failures': 0, 'dark': 0, 'failed': False, 'processed': 0, 'rescued': 0},
'id': 42,
'job_explanation': 'Sample job explanation',
'job_slice_count': 1,
'job_slice_number': 0,
'job_tags': '',
'job_type': 'run',
'launch_type': 'workflow',
'limit': 'bar_limit',
'modified': datetime.datetime(2018, 12, 13, 6, 4, 0, 0, tzinfo=datetime.timezone.utc),
'name': 'Stub JobTemplate',
'playbook': 'ping.yml',
'scm_revision': '',
'skip_tags': '',
'start_at_task': '',
'started': '2019-07-29T17:38:14.137461Z',
'status': 'running',
'summary_fields': {'created_by': {'first_name': '',
'id': 1,
'last_name': '',
'username': 'admin'},
'instance_group': {'id': 1, 'name': 'tower'},
'inventory': {'description': 'Sample inventory description',
'has_active_failures': False,
'has_inventory_sources': False,
'hosts_with_active_failures': 0,
'id': 17,
'inventory_sources_with_failures': 0,
'kind': '',
'name': 'Stub Inventory',
'organization_id': 121,
'total_groups': 0,
'total_hosts': 1,
'total_inventory_sources': 0},
'job_template': {'description': 'Sample job template description',
'id': 39,
'name': 'Stub JobTemplate'},
'labels': {'count': 0, 'results': []},
'project': {'description': 'Sample project description',
'id': 38,
'name': 'Stub project',
'scm_type': 'git',
'status': 'successful'},
'unified_job_template': {'description': 'Sample unified job template description',
'id': 39,
'name': 'Stub Job Template',
'unified_job_type': 'job'}},
'timeout': 0,
'type': 'job',
'url': '/api/v2/jobs/13/',
'use_fact_cache': False,
'verbosity': 0},
'job_friendly_name': 'Job',
'url': 'https://towerhost/#/jobs/playbook/1010',
'approval_status': 'approved',
'approval_node_name': 'Approve Me',
'workflow_url': 'https://towerhost/#/workflows/1010',
'job_metadata': """{'url': 'https://towerhost/$/jobs/playbook/13',
'traceback': '',
'status': 'running',
'started': '2019-08-07T21:46:38.362630+00:00',
'project': 'Stub project',
'playbook': 'ping.yml',
'name': 'Stub Job Template',
'limit': '',
'inventory': 'Stub Inventory',
'id': 42,
'hosts': {},
'friendly_name': 'Job',
'finished': False,
'credential': 'Stub credential',
'created_by': 'admin'}"""}
return context
def context(self, serialized_job):
job_context = {'host_status_counts': {}}
summary = None
if hasattr(self, 'job_host_summaries'):
summary = self.job_host_summaries.first()
if summary:
from awx.api.serializers import JobHostSummarySerializer
summary_data = JobHostSummarySerializer(summary).to_representation(summary)
job_context['host_status_counts'] = summary_data
context = {
'job': job_context,
'job_friendly_name': self.get_notification_friendly_name(),
'url': self.get_ui_url(),
'job_metadata': json.dumps(self.notification_data(), indent=4)
}
def build_context(node, fields, whitelisted_fields):
for safe_field in whitelisted_fields:
if type(safe_field) is dict:
field, whitelist_subnode = safe_field.copy().popitem()
if field not in fields:
continue
subnode = fields[field]
node[field] = {}
build_context(node[field], subnode, whitelist_subnode)
else:
if safe_field not in fields:
continue
node[safe_field] = fields[safe_field]
build_context(context['job'], serialized_job, self.JOB_FIELDS_WHITELIST)
return context
def get_notification_templates(self):
raise RuntimeError("Define me")
def get_notification_friendly_name(self):
raise RuntimeError("Define me")
def notification_data(self):
raise RuntimeError("Define me")
def build_notification_message(self, nt, status):
env = sandbox.ImmutableSandboxedEnvironment()
from awx.api.serializers import UnifiedJobSerializer
job_serialization = UnifiedJobSerializer(self).to_representation(self)
context = self.context(job_serialization)
msg_template = body_template = None
msg = body = ''
if nt.messages:
template = nt.messages.get(self.STATUS_TO_TEMPLATE_TYPE[status], {}) or {}
msg_template = template.get('message', None)
body_template = template.get('body', None)
default_template = nt.notification_class.default_messages[self.STATUS_TO_TEMPLATE_TYPE[status]]
if not msg_template:
msg_template = default_template.get('message', None)
if not body_template:
body_template = default_template.get('body', None)
if msg_template:
try:
msg = env.from_string(msg_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
msg = ''
if body_template:
try:
body = env.from_string(body_template).render(**context)
except (TemplateSyntaxError, UndefinedError, SecurityError):
body = ''
return (msg, body)
def send_notification_templates(self, status):
from awx.main.tasks import send_notifications
if status not in ['running', 'succeeded', 'failed']:
raise ValueError(_("status must be either running, succeeded or failed"))
try:
notification_templates = self.get_notification_templates()
except Exception:
logger.warn("No notification template defined for emitting notification")
return
if not notification_templates:
return
for nt in set(notification_templates.get(self.STATUS_TO_TEMPLATE_TYPE[status], [])):
(msg, body) = self.build_notification_message(nt, status)
def send_it(local_nt=nt, local_msg=msg, local_body=body):
def _func():
send_notifications.delay([local_nt.generate_notification(local_msg, local_body).id],
job_id=self.id)
return _func
connection.on_commit(send_it())
| true | true |
f731a055821f656efbcb5c83ad2f7ff8e59dd572 | 818 | py | Python | manage.py | neosergio/WisdomBox | f41bd828f5e264c7ad05262b29c8f02cf904b54a | [
"MIT"
] | null | null | null | manage.py | neosergio/WisdomBox | f41bd828f5e264c7ad05262b29c8f02cf904b54a | [
"MIT"
] | 2 | 2017-02-09T14:52:43.000Z | 2017-02-10T19:31:38.000Z | manage.py | neosergio/WisdomBox | f41bd828f5e264c7ad05262b29c8f02cf904b54a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wisdom.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django # noqa: F401
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| 35.565217 | 77 | 0.640587 |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wisdom.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| true | true |
f731a0e7d3248ef87117117d791e52aa967f0a12 | 2,465 | py | Python | forecaster.py | Adri1bo/HEMS_API | dca26e55696f9f2e36f29968a8c3a90871d6bc16 | [
"MIT"
] | 1 | 2021-02-17T08:47:47.000Z | 2021-02-17T08:47:47.000Z | forecaster.py | Adri1bo/HEMS_API | dca26e55696f9f2e36f29968a8c3a90871d6bc16 | [
"MIT"
] | null | null | null | forecaster.py | Adri1bo/HEMS_API | dca26e55696f9f2e36f29968a8c3a90871d6bc16 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Sun Jun 7 17:58:57 2020
@author: adria.bove
"""
from BBDD import BBDD
import analytics
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import datetime as dt
class consumption:
def __init__(self, nom):
self.nom_BBDD=nom
def forecaster(self,timestamp):
self.timestamp=str(timestamp)
# get the df
this_BBDD=BBDD(self.nom_BBDD)
a=pd.Series(dtype='float64')
k=0
list_weekdays=[(dt.datetime.now()+dt.timedelta(days=i+k)).weekday() for i in range(5)]
for weekday in list_weekdays:
df=this_BBDD.extract_weekday(weekday)
#send it to the mean_day function que farà el dia tipus
a=a.append(self.mean_day(df,k))
k=k+1
del(a[0])
a['dt']=a.index
a = a.reset_index(drop=True)
self.store_data(a,'loads_forecast')
return a
def mean_day(self,df,k): #amb el weekday he de fer millor de date_range i fer-lo sobre el valor del timer
df['timer'] = df.dt.apply(self.massive_rounder,groupsize=1,groups=int(60))
df = df.rename(columns={'P_load [kW]': 'P_load'})
df.P_load=pd.to_numeric(df.P_load)
mean_DAY=df.groupby('timer').P_load.mean()
mean_DAY=mean_DAY.to_frame()
start_date=dt.datetime.combine(dt.date.today(), dt.datetime.min.time())#+dt.timedelta(days=1)
mean_DAY['dta']=mean_DAY.index
mean_DAY.dta=mean_DAY.dta.apply(lambda x: dt.timedelta(minutes=x) + start_date + dt.timedelta(days=k))
mean_DAY.index=mean_DAY.dta
del(mean_DAY['dta'])
new_mean_DAY=mean_DAY.resample(self.timestamp+'T').pad()
return new_mean_DAY
def massive_rounder(self, element, groupsize, groups):
for i in range(groups):
if element.time().minute < (groupsize*(range(groups)[i]+1)):
return range(groups)[i] + element.time().hour*groups
def store_data(self,data,name):
this_BBDD=BBDD(name)
this_BBDD.store_data(data)
if __name__ == '__main__':
consumption_forecast=consumption('grid_BBDD')
b=consumption_forecast.forecaster(timestamp=15)
plt.plot(-b.P_load) | 30.8125 | 111 | 0.577688 |
from BBDD import BBDD
import analytics
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import datetime as dt
class consumption:
def __init__(self, nom):
self.nom_BBDD=nom
def forecaster(self,timestamp):
self.timestamp=str(timestamp)
this_BBDD=BBDD(self.nom_BBDD)
a=pd.Series(dtype='float64')
k=0
list_weekdays=[(dt.datetime.now()+dt.timedelta(days=i+k)).weekday() for i in range(5)]
for weekday in list_weekdays:
df=this_BBDD.extract_weekday(weekday)
a=a.append(self.mean_day(df,k))
k=k+1
del(a[0])
a['dt']=a.index
a = a.reset_index(drop=True)
self.store_data(a,'loads_forecast')
return a
def mean_day(self,df,k):
df['timer'] = df.dt.apply(self.massive_rounder,groupsize=1,groups=int(60))
df = df.rename(columns={'P_load [kW]': 'P_load'})
df.P_load=pd.to_numeric(df.P_load)
mean_DAY=df.groupby('timer').P_load.mean()
mean_DAY=mean_DAY.to_frame()
start_date=dt.datetime.combine(dt.date.today(), dt.datetime.min.time())
mean_DAY['dta']=mean_DAY.index
mean_DAY.dta=mean_DAY.dta.apply(lambda x: dt.timedelta(minutes=x) + start_date + dt.timedelta(days=k))
mean_DAY.index=mean_DAY.dta
del(mean_DAY['dta'])
new_mean_DAY=mean_DAY.resample(self.timestamp+'T').pad()
return new_mean_DAY
def massive_rounder(self, element, groupsize, groups):
for i in range(groups):
if element.time().minute < (groupsize*(range(groups)[i]+1)):
return range(groups)[i] + element.time().hour*groups
def store_data(self,data,name):
this_BBDD=BBDD(name)
this_BBDD.store_data(data)
if __name__ == '__main__':
consumption_forecast=consumption('grid_BBDD')
b=consumption_forecast.forecaster(timestamp=15)
plt.plot(-b.P_load) | true | true |
f731a16e0fc215d4c3817488810ef1839361e054 | 49,894 | py | Python | cogs/TwitchAlert.py | SnowyJaguar1034/KoalaBot | 840a5e30476492c60157687804a2445903279207 | [
"MIT"
] | null | null | null | cogs/TwitchAlert.py | SnowyJaguar1034/KoalaBot | 840a5e30476492c60157687804a2445903279207 | [
"MIT"
] | 13 | 2021-07-02T04:11:14.000Z | 2022-03-25T04:08:55.000Z | cogs/TwitchAlert.py | SnowyJaguar1034/KoalaBot | 840a5e30476492c60157687804a2445903279207 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Koala Bot Base Cog code and additional base cog functions
Commented using reStructuredText (reST)
"""
# Futures
# Built-in/Generic Imports
import os
import time
import re
import aiohttp
import logging
from concurrent.futures import ThreadPoolExecutor
logging.basicConfig(filename='TwitchAlert.log')
# Own modules
import KoalaBot
from utils.KoalaColours import *
from utils.KoalaUtils import error_embed, is_channel_in_guild, extract_id
from utils import KoalaDBManager
# Libs
from discord.ext import commands, tasks
from dotenv import load_dotenv
import asyncio
# Constants
load_dotenv()
DEFAULT_MESSAGE = ""
TWITCH_ICON = "https://cdn3.iconfinder.com/data/icons/social-messaging-ui-color-shapes-2-free" \
"/128/social-twitch-circle-512.png"
TWITCH_CLIENT_ID = os.environ.get('TWITCH_TOKEN')
TWITCH_SECRET = os.environ.get('TWITCH_SECRET')
TWITCH_USERNAME_REGEX = "^[a-z0-9][a-z0-9_]{3,24}$"
LOOP_CHECK_LIVE_DELAY = 1
TEAMS_LOOP_CHECK_LIVE_DELAY = 1
REFRESH_TEAMS_DELAY = 5
# Variables
def twitch_is_enabled(ctx):
"""
A command used to check if the guild has enabled twitch alert
e.g. @commands.check(KoalaBot.is_admin)
:param ctx: The context of the message
:return: True if admin or test, False otherwise
"""
try:
result = KoalaBot.check_guild_has_ext(ctx, "TwitchAlert")
except PermissionError:
result = False
return result
class TwitchAlert(commands.Cog):
"""
A discord.py cog for alerting when someone goes live on twitch
"""
def __init__(self, bot, database_manager=None):
"""
Initialises local variables
:param bot: The bot client for this cog
"""
if not database_manager:
database_manager = KoalaBot.database_manager
self.bot = bot
database_manager.create_base_tables()
database_manager.insert_extension("TwitchAlert", 0, True, True)
self.ta_database_manager = TwitchAlertDBManager(database_manager, bot)
self.ta_database_manager.create_tables()
self.loop_thread = None
self.loop_team_thread = None
self.running = False
self.stop_loop = False
@commands.command(name="twitchEditMsg", aliases=["edit_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def edit_default_message(self, ctx, raw_channel_id, *default_live_message):
"""
Edit the default message put in a Twitch Alert Notification
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
:param default_live_message: The default live message of users within this Twitch Alert,
leave empty for program default
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
channel_id = ctx.message.channel.id
default_live_message = (raw_channel_id,) + default_live_message
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
# Assigning default message if provided
if default_live_message is not None and default_live_message != (None,):
default_message = " ".join(default_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = None
# Creates a new Twitch Alert with the used guild ID and default message if provided
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id, default_message,
replace=True)
# Returns an embed with information altered
new_embed = discord.Embed(title="Default Message Edited", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchViewMsg", aliases=["view_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def view_default_message(self, ctx, raw_channel_id=None):
"""
Shows the current default message for Twitch Alerts
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
leave empty for program default
:return:
"""
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
# Creates a new Twitch Alert with the used guild ID and default message if provided
default_message = self.ta_database_manager.get_default_message(channel_id)[0][0]
# Returns an embed with information altered
new_embed = discord.Embed(title="Default Message", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
# new_embed.set_footer(text=f"Twitch Alert ID: {new_id}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAdd", aliases=['add_user_to_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_user_to_twitch_alert(self, ctx, raw_channel_id, twitch_username=None, *custom_live_message):
"""
Add a Twitch user to a Twitch Alert
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
:param twitch_username: The Twitch Username of the user being added (lowercase)
:param custom_live_message: the custom live message for this user's alert
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (twitch_username,) + custom_live_message
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, twitch_username):
raise discord.errors.InvalidArgument(
"The given twitch_username is not a valid username (please use lowercase)")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
# Setting the custom message as required
if custom_live_message is not None and custom_live_message != (None,):
custom_message = " ".join(custom_live_message)
default_message = custom_message
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
custom_message = None
self.ta_database_manager.add_user_to_ta(channel_id, twitch_username, custom_message, ctx.message.guild.id)
# Response Message
new_embed = discord.Embed(title="Added User to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}\n"
f"Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemove", aliases=['remove_user_from_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_user_from_twitch_alert(self, ctx, raw_channel_id, twitch_username=None):
"""
Removes a user from a Twitch Alert
:param ctx: the discord context
:param raw_channel_id: The discord channel ID of the Twitch Alert
:param twitch_username: The username of the user to be removed
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_user_from_ta(channel_id, twitch_username)
# Response Message
new_embed = discord.Embed(title="Removed User from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAddTeam", aliases=["add_team_to_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_team_to_twitch_alert(self, ctx, raw_channel_id, team_name=None, *custom_live_message):
"""
Add a Twitch team to a Twitch Alert
:param ctx: The discord context of the command
:param raw_channel_id: The channel ID where the twitch alert is being used
:param team_name: The Twitch team being added (lowercase)
:param custom_live_message: the custom live message for this team's alert
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (team_name,) + custom_live_message
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, team_name):
raise discord.errors.InvalidArgument(
"The given team_name is not a valid twitch team name (please use lowercase)")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
# Setting the custom message as required
if custom_live_message is not None and custom_live_message != (None,):
default_message = " ".join(custom_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = DEFAULT_MESSAGE
self.ta_database_manager.add_team_to_ta(channel_id, team_name, default_message, ctx.message.guild.id)
# Response Message
new_embed = discord.Embed(title="Added Team to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}\n"
f"Message: {default_message}")
# new_embed.set_footer(text=f"Twitch Alert ID: {channel_id}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemoveTeam", aliases=["remove_team_from_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_team_from_twitch_alert(self, ctx, raw_channel_id, team_name=None):
"""
Removes a team from a Twitch Alert
:param ctx: the discord context
:param raw_channel_id: The discord channel ID of the Twitch Alert
:param team_name: The Twitch team being added (lowercase)
:return:
"""
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
# Check the channel specified is in this guild
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_team_from_ta(channel_id, team_name)
# Response Message
new_embed = discord.Embed(title="Removed Team from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchList", aliases=["list_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def list_twitch_alert(self, ctx, raw_channel_id=None):
"""
Shows all current TwitchAlert users and teams in a channel
:param ctx:
:param raw_channel_id:
:return:
"""
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
embed = discord.Embed()
embed.title = "Twitch Alerts"
embed.colour = KOALA_GREEN
embed.set_footer(text=f"Channel ID: {channel_id}")
results = self.ta_database_manager.get_users_in_ta(channel_id)
if results:
users = ""
for result in results:
users += f"{result[0]}\n"
embed.add_field(name=":bust_in_silhouette: Users", value=users)
else:
embed.add_field(name=":bust_in_silhouette: Users", value="None")
results = self.ta_database_manager.get_teams_in_ta(channel_id)
if results:
teams = ""
for result in results:
teams += f"{result[0]}\n"
embed.add_field(name=":busts_in_silhouette: Teams", value=teams)
else:
embed.add_field(name=":busts_in_silhouette: Teams", value="None")
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_ready(self):
"""
When the bot is started up, the loop begins
:return:
"""
if not self.running:
self.start_loops()
def start_loops(self):
self.loop_update_teams.start()
self.loop_check_team_live.start()
self.loop_check_live.start()
self.running = True
def end_loops(self):
self.loop_update_teams.cancel()
self.loop_check_team_live.cancel()
self.loop_check_live.cancel()
self.running = False
@tasks.loop(minutes=LOOP_CHECK_LIVE_DELAY)
async def loop_check_live(self):
"""
A loop that continually checks the live status of users and
sends alerts when online, removing them when offline
:return:
"""
start = time.time()
# logging.info("TwitchAlert: User Loop Started")
sql_find_users = "SELECT twitch_username " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE on TA.guild_id = GE.guild_id;"
users = self.ta_database_manager.database_manager.db_execute_select(sql_find_users)
usernames = []
for user in users:
if not re.search(TWITCH_USERNAME_REGEX, user[0]):
sql_remove_invalid_user = "DELETE FROM UserInTwitchAlert WHERE twitch_username = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[0]])
else:
usernames.append(user[0])
# user_streams = self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if not usernames:
return
user_streams = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if user_streams is None:
return
# Deals with online streams
for streams_details in user_streams:
try:
if streams_details.get('type') == "live":
current_username = str.lower(streams_details.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = \
"SELECT UserInTwitchAlert.channel_id, message_id, custom_message, default_message " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id " \
"WHERE twitch_username = ?;"
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
custom_message = result[2]
channel_default_message = result[3]
channel = self.bot.get_channel(id=channel_id)
try:
# If no Alert is posted
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(streams_details, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchAlert
SET message_id = ?
WHERE channel_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id, args=[new_message.id, result[0], current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: User Loop error {err}")
# Deals with remaining offline streams
await self.ta_database_manager.delete_all_offline_streams(False, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: User Loop Finished in > 5s | {time_diff}s")
async def create_alert_embed(self, stream_data, message):
"""
Creates and sends an alert message
:param stream_data: The twitch stream data to have in the message
:param message: The custom message to be added as a description
:return: The discord message id of the sent message
"""
user_details = await self.ta_database_manager.twitch_handler.get_user_data(
stream_data.get("user_name"))
game_details = await self.ta_database_manager.twitch_handler.get_game_data(
stream_data.get("game_id"))
return create_live_embed(stream_data, user_details, game_details, message)
@tasks.loop(minutes=REFRESH_TEAMS_DELAY)
async def loop_update_teams(self):
start = time.time()
# logging.info("TwitchAlert: Started Update Teams")
await self.ta_database_manager.update_all_teams_members()
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams updated in > 5s | {time_diff}s")
@tasks.loop(minutes=TEAMS_LOOP_CHECK_LIVE_DELAY)
async def loop_check_team_live(self):
"""
A loop to repeatedly send messages if a member of a team is live, and remove it when they are not
:return:
"""
start = time.time()
# logging.info("TwitchAlert: Team Loop Started")
sql_select_team_users = "SELECT twitch_username, twitch_team_name " \
"FROM UserInTwitchTeam " \
"JOIN TeamInTwitchAlert TITA " \
" ON UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id " \
"JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id "
users_and_teams = self.ta_database_manager.database_manager.db_execute_select(sql_select_team_users)
usernames = []
for user in users_and_teams:
if not re.search(TWITCH_USERNAME_REGEX, user[1]):
sql_remove_invalid_user = "DELETE FROM TeamInTwitchAlert WHERE twitch_team_name = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[1]])
else:
usernames.append(user[0])
if not usernames:
return
streams_data = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if streams_data is None:
return
# Deals with online streams
for stream_data in streams_data:
try:
if stream_data.get('type') == "live":
current_username = str.lower(stream_data.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = """
SELECT TITA.channel_id, UserInTwitchTeam.message_id, TITA.team_twitch_alert_id, custom_message,
default_message
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id
JOIN (SELECT extension_id, guild_id
FROM GuildExtensions
WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE ON TA.guild_id = GE.guild_id
WHERE twitch_username = ?"""
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
team_twitch_alert_id = result[2]
custom_message = result[3]
channel_default_message = result[4]
channel = self.bot.get_channel(id=channel_id)
try:
# If no Alert is posted
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(stream_data, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchTeam
SET message_id = ?
WHERE team_twitch_alert_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id,
args=[new_message.id, team_twitch_alert_id, current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: Team Loop error {err}")
# Deals with remaining offline streams
await self.ta_database_manager.delete_all_offline_streams(True, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams Loop Finished in > 5s | {time_diff}s")
def create_live_embed(stream_info, user_info, game_info, message):
"""
Creates an embed for the go live announcement
:param stream_info: The stream data from the Twitch API
:param user_info: The user data for this streamer from the Twitch API
:param game_info: The game data for this game from the Twitch API
:param message: The custom message to be added as a description
:return: The embed created
"""
embed = discord.Embed(colour=KOALA_GREEN)
if message is not None and message != "":
embed.description = message
embed.set_author(name=stream_info.get("user_name") + " is now streaming!",
icon_url=TWITCH_ICON)
embed.title = "https://twitch.tv/" + str.lower(stream_info.get("user_name"))
embed.add_field(name="Stream Title", value=stream_info.get("title"))
if game_info is None:
embed.add_field(name="Playing", value="No Category")
else:
embed.add_field(name="Playing", value=game_info.get("name"))
embed.set_thumbnail(url=user_info.get("profile_image_url"))
return embed
class TwitchAPIHandler:
"""
A wrapper to interact with the twitch API
"""
def __init__(self, client_id: str, client_secret: str):
self.client_id = client_id
self.client_secret = client_secret
self.params = {'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'client_credentials'}
self.token = {}
@property
def base_headers(self):
return {
'Authorization': f'Bearer {self.token.get("access_token")}',
'Client-ID': self.client_id
}
async def get_new_twitch_oauth(self):
"""
Get a new OAuth2 token from twitch using client_id and client_secret
:return: The new OAuth2 token
"""
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.post('https://id.twitch.tv/oauth2/token', params=self.params) as response:
if response.status > 399:
logging.critical(f'TwitchAlert: Error {response.status} while getting Oauth token')
self.token = {}
response_json = await response.json()
try:
response_json['expires_in'] += time.time()
except KeyError:
# probably shouldn't need this, but catch just in case
logging.warning('TwitchAlert: Failed to set token expiration time')
self.token = response_json
return self.token
async def requests_get(self, url, headers=None, params=None):
"""
Gets a response from a curl get request to the given url using headers of this object
:param headers: the Headers required for the request, will use self.headers by default
:param url: The URL to send the request to
:param params: The parameters of the request
:return: The response of the request
"""
if self.token.get('expires_in', 0) <= time.time() + 1 or not self.token:
await self.get_new_twitch_oauth()
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.get(url=url, headers=headers if headers else self.base_headers, params=params) as \
response:
if response.status == 401:
logging.info(f"TwitchAlert: {response.status}, getting new oauth and retrying")
await self.get_new_twitch_oauth()
return await self.requests_get(url, headers, params)
elif response.status > 399:
logging.warning(f'TwitchAlert: {response.status} while getting requesting URL:{url}')
return await response.json()
async def get_streams_data(self, usernames):
"""
Gets all stream information from a list of given usernames
:param usernames: The list of usernames
:return: The JSON data of the request
"""
url = 'https://api.twitch.tv/helix/streams?'
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result = (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get("data")
while usernames:
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result += (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get(
"data")
return result
async def get_user_data(self, username):
"""
Gets the user information of a given user
:param username: The display twitch username of the user
:return: The JSON information of the user's data
"""
url = 'https://api.twitch.tv/helix/users?login=' + username
return (await self.requests_get(url)).get("data")[0]
async def get_game_data(self, game_id):
"""
Gets the game information of a given game
:param game_id: The twitch game ID of a game
:return: The JSON information of the game's data
"""
if game_id != "":
url = 'https://api.twitch.tv/helix/games?id=' + game_id
game_data = await self.requests_get(url)
return game_data.get("data")[0]
else:
return None
async def get_team_users(self, team_id):
"""
Gets the users data about a given team
:param team_id: The team name of the twitch team
:return: the JSON information of the users
"""
url = 'https://api.twitch.tv/helix/teams?name=' + team_id
return (
await self.requests_get(url)).get("data")[0].get("users")
class TwitchAlertDBManager:
"""
A class for interacting with the Koala twitch database
"""
def __init__(self, database_manager: KoalaDBManager.KoalaDBManager, bot_client: discord.client):
"""
Initialises local variables
:param database_manager:
:param bot_client:
"""
self.database_manager = database_manager
self.twitch_handler = TwitchAPIHandler(TWITCH_CLIENT_ID, TWITCH_SECRET)
self.bot = bot_client
def get_parent_database_manager(self):
"""
A getter for the database manager of this object
:return:
"""
return self.database_manager
def create_tables(self):
"""
Creates all the tables associated with the twitch alert extension
:return:
"""
# TwitchAlerts
sql_create_twitch_alerts_table = """
CREATE TABLE IF NOT EXISTS TwitchAlerts (
guild_id integer NOT NULL,
channel_id integer NOT NULL,
default_message text NOT NULL,
PRIMARY KEY (guild_id, channel_id),
CONSTRAINT fk_guild
FOREIGN KEY (guild_id)
REFERENCES GuildExtensions (guild_id)
ON DELETE CASCADE
);"""
# UserInTwitchAlert
sql_create_user_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchAlert (
channel_id integer NOT NULL,
twitch_username text NOT NULL,
custom_message text,
message_id integer,
PRIMARY KEY (channel_id, twitch_username),
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# TeamInTwitchAlert
sql_create_team_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS TeamInTwitchAlert (
team_twitch_alert_id integer PRIMARY KEY AUTOINCREMENT,
channel_id integer NOT NULL,
twitch_team_name text NOT NULL,
custom_message text,
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# UserInTwitchTeam
sql_create_user_in_twitch_team_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchTeam (
team_twitch_alert_id text NOT NULL,
twitch_username text NOT NULL,
message_id integer,
PRIMARY KEY (team_twitch_alert_id, twitch_username),
CONSTRAINT fk_twitch_team_alert
FOREIGN KEY (team_twitch_alert_id)
REFERENCES TeamInTwitchAlert (team_twitch_alert_id)
ON DELETE CASCADE
);"""
# Create Tables
self.database_manager.db_execute_commit(sql_create_twitch_alerts_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_team_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_team_table)
def new_ta(self, guild_id, channel_id, default_message=None, replace=False):
"""
Creates a new Twitch Alert and gives the ID associated with it
:param guild_id: The discord guild ID where the Twitch Alert is located
:param channel_id: The discord channel ID of the twitch Alert
:param default_message: The default message of users in the Twitch Alert
:param replace: True if the new ta should replace the current if exists
:return: The new default_message
"""
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id=?"
message = self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
if message and not replace:
return message[0][0]
# Sets the default message if not provided
if default_message is None:
default_message = DEFAULT_MESSAGE
# Insert new Twitch Alert to database
if replace:
sql_insert_twitch_alert = """
REPLACE INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
else:
sql_insert_twitch_alert = """
INSERT INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
self.database_manager.db_execute_commit(sql_insert_twitch_alert, args=[guild_id, channel_id, default_message])
return default_message
def get_default_message(self, channel_id):
"""
Get the set default message for the twitch alert
:param channel_id: The discord channel ID of the twitch Alert
:return: The current default_message
"""
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id= ?"
return self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
def add_user_to_ta(self, channel_id, twitch_username, custom_message, guild_id=None):
"""
Add a twitch user to a given Twitch Alert
:param channel_id: The discord channel ID of the twitch Alert
:param twitch_username: The Twitch username of the user to be added
:param custom_message: The custom Message of the user's live notification.
None = use default Twitch Alert message
:param guild_id: The guild ID of the channel
:return:
:raises: KeyError if channel ID is not defined in TwitchAlerts and guild_id is not provided
"""
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username), custom_message])
else:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username)])
async def remove_user_from_ta(self, channel_id, twitch_username):
"""
Removes a user from a given Twitch Alert
:param channel_id: The discord channel ID of the twitch Alert
:param twitch_username: The Twitch username of the user to be added
:return:
"""
sql_get_message_id = "SELECT message_id " \
"FROM UserInTwitchAlert " \
"WHERE twitch_username = ? " \
"AND channel_id = ? "
message_id = self.database_manager.db_execute_select(sql_get_message_id,
args=[twitch_username, channel_id])[0][0]
if message_id is not None:
await self.delete_message(message_id, channel_id)
sql_remove_entry = """DELETE FROM UserInTwitchAlert
WHERE twitch_username = ? AND channel_id = ?"""
self.database_manager.db_execute_commit(sql_remove_entry, args=[twitch_username, channel_id])
async def delete_message(self, message_id, channel_id):
"""
Deletes a given discord message
:param message_id: discord message ID of the message to delete
:param channel_id: discord channel ID which has the message
:return:
"""
try:
channel = self.bot.get_channel(int(channel_id))
if channel is None:
logging.warning(f"TwitchAlert: Channel ID {channel_id} does not exist, removing from database")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
return
message = await channel.fetch_message(message_id)
await message.delete()
except discord.errors.NotFound as err:
logging.warning(f"TwitchAlert: Message ID {message_id} does not exist, skipping \nError: {err}")
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Channel ID: {channel_id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
def get_users_in_ta(self, channel_id):
"""
Returns all users in a given Twitch Alert
:param channel_id: The channel ID of the Twitch Alert
:return: The sql results of the users
"""
sql_get_users = "SELECT twitch_username FROM UserInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_users, args=[channel_id])
def get_teams_in_ta(self, channel_id):
"""
Returns all teams in a given Twitch Alert
:param channel_id: The channel ID of the Twitch Alert
:return: The sql results of the teams
"""
sql_get_teams = "SELECT twitch_team_name FROM TeamInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_teams, args=[channel_id])
def add_team_to_ta(self, channel_id, twitch_team, custom_message, guild_id=None):
"""
Add a twitch team to a given Twitch Alert
:param channel_id: The discord channel ID of the twitch Alert
:param twitch_team: The Twitch team to be added
:param custom_message: The custom Message of the team's live notification.
None = use default Twitch Alert message
:param guild_id: The guild ID of the channel
:return:
:raises: KeyError if channel ID is not defined in TwitchAlerts and guild_id is not provided
"""
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team), custom_message])
else:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team)])
async def remove_team_from_ta(self, channel_id, team_name):
"""
Removes a team from a given twitch alert
:param channel_id: The channel ID of the Twitch Alert
:param team_name: The team name of the team to be removed
:return:
"""
sql_get_team_alert_id = "SELECT team_twitch_alert_id " \
"FROM TeamInTwitchAlert " \
"WHERE twitch_team_name = ? " \
" AND channel_id = ?"
result = self.database_manager.db_execute_select(sql_get_team_alert_id, args=[team_name, channel_id])
if not result:
raise AttributeError("Team name not found")
team_alert_id = result[0][0]
sql_get_message_id = """SELECT UserInTwitchTeam.message_id
FROM UserInTwitchTeam
WHERE team_twitch_alert_id = ?"""
message_ids = self.database_manager.db_execute_select(sql_get_message_id, args=[team_alert_id])
if message_ids is not None:
for message_id in message_ids:
if message_id[0] is not None:
await self.delete_message(message_id[0], channel_id)
sql_remove_users = """DELETE FROM UserInTwitchTeam WHERE team_twitch_alert_id = ?"""
sql_remove_team = """DELETE FROM TeamInTwitchAlert WHERE team_twitch_alert_id = ?"""
self.database_manager.db_execute_commit(sql_remove_users, args=[team_alert_id])
self.database_manager.db_execute_commit(sql_remove_team, args=[team_alert_id])
async def update_team_members(self, twitch_team_id, team_name):
"""
Users in a team are updated to ensure they are assigned to the correct team
:param twitch_team_id: the team twitch alert id
:param team_name: the name of the team
:return:
"""
if re.search(TWITCH_USERNAME_REGEX, team_name):
users = await self.twitch_handler.get_team_users(team_name)
for user in users:
sql_add_user = """INSERT OR IGNORE INTO UserInTwitchTeam(team_twitch_alert_id, twitch_username)
VALUES(?, ?)"""
try:
self.database_manager.db_execute_commit(sql_add_user, args=[twitch_team_id, user.get("user_login")],
pass_errors=True)
except KoalaDBManager.sqlite3.IntegrityError as err:
logging.error(f"Twitch Alert: 1034: {err}")
pass
async def update_all_teams_members(self):
"""
Updates all teams with the current team members
:return:
"""
sql_get_teams = """SELECT team_twitch_alert_id, twitch_team_name FROM TeamInTwitchAlert"""
teams_info = self.database_manager.db_execute_select(sql_get_teams)
for team_info in teams_info:
await self.update_team_members(team_info[0], team_info[1])
async def delete_all_offline_streams(self, team: bool, usernames):
"""
A method that deletes all currently offline streams
:param team: True if the users are from teams, false if individuals
:param usernames: The usernames of the team members
:return:
"""
if team:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchTeam
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
else:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchAlert
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchAlert
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
results = self.database_manager.db_execute_select(
sql_select_offline_streams_with_message_ids, usernames)
for result in results:
await self.delete_message(result[1], result[0])
self.database_manager.db_execute_commit(sql_update_offline_streams, usernames)
def setup(bot: KoalaBot) -> None:
"""
Load this cog to the KoalaBot.
:param bot: the bot client for KoalaBot
"""
if TWITCH_SECRET is None or TWITCH_CLIENT_ID is None:
logging.error("TwitchAlert not started. API keys not found in environment.")
print("TwitchAlert not started. API keys not found in environment.")
KoalaBot.database_manager.insert_extension("TwitchAlert", 0, False, False)
else:
bot.add_cog(TwitchAlert(bot))
logging.info("TwitchAlert is ready.")
print("TwitchAlert is ready.")
| 44.94955 | 120 | 0.615445 |
import os
import time
import re
import aiohttp
import logging
from concurrent.futures import ThreadPoolExecutor
logging.basicConfig(filename='TwitchAlert.log')
import KoalaBot
from utils.KoalaColours import *
from utils.KoalaUtils import error_embed, is_channel_in_guild, extract_id
from utils import KoalaDBManager
from discord.ext import commands, tasks
from dotenv import load_dotenv
import asyncio
load_dotenv()
DEFAULT_MESSAGE = ""
TWITCH_ICON = "https://cdn3.iconfinder.com/data/icons/social-messaging-ui-color-shapes-2-free" \
"/128/social-twitch-circle-512.png"
TWITCH_CLIENT_ID = os.environ.get('TWITCH_TOKEN')
TWITCH_SECRET = os.environ.get('TWITCH_SECRET')
TWITCH_USERNAME_REGEX = "^[a-z0-9][a-z0-9_]{3,24}$"
LOOP_CHECK_LIVE_DELAY = 1
TEAMS_LOOP_CHECK_LIVE_DELAY = 1
REFRESH_TEAMS_DELAY = 5
def twitch_is_enabled(ctx):
try:
result = KoalaBot.check_guild_has_ext(ctx, "TwitchAlert")
except PermissionError:
result = False
return result
class TwitchAlert(commands.Cog):
def __init__(self, bot, database_manager=None):
if not database_manager:
database_manager = KoalaBot.database_manager
self.bot = bot
database_manager.create_base_tables()
database_manager.insert_extension("TwitchAlert", 0, True, True)
self.ta_database_manager = TwitchAlertDBManager(database_manager, bot)
self.ta_database_manager.create_tables()
self.loop_thread = None
self.loop_team_thread = None
self.running = False
self.stop_loop = False
@commands.command(name="twitchEditMsg", aliases=["edit_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def edit_default_message(self, ctx, raw_channel_id, *default_live_message):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
channel_id = ctx.message.channel.id
default_live_message = (raw_channel_id,) + default_live_message
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
if default_live_message is not None and default_live_message != (None,):
default_message = " ".join(default_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = None
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id, default_message,
replace=True)
new_embed = discord.Embed(title="Default Message Edited", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchViewMsg", aliases=["view_default_message"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def view_default_message(self, ctx, raw_channel_id=None):
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
default_message = self.ta_database_manager.get_default_message(channel_id)[0][0]
new_embed = discord.Embed(title="Default Message", colour=KOALA_GREEN,
description=f"Guild: {ctx.message.guild.id}\n"
f"Channel: {channel_id}\n"
f"Default Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAdd", aliases=['add_user_to_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_user_to_twitch_alert(self, ctx, raw_channel_id, twitch_username=None, *custom_live_message):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (twitch_username,) + custom_live_message
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, twitch_username):
raise discord.errors.InvalidArgument(
"The given twitch_username is not a valid username (please use lowercase)")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
default_message = self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
if custom_live_message is not None and custom_live_message != (None,):
custom_message = " ".join(custom_live_message)
default_message = custom_message
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
custom_message = None
self.ta_database_manager.add_user_to_ta(channel_id, twitch_username, custom_message, ctx.message.guild.id)
new_embed = discord.Embed(title="Added User to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}\n"
f"Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemove", aliases=['remove_user_from_twitch_alert'])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_user_from_twitch_alert(self, ctx, raw_channel_id, twitch_username=None):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
twitch_username = raw_channel_id
channel_id = ctx.message.channel.id
if twitch_username is None:
raise discord.errors.InvalidArgument("twitch_username is a required argument that is missing.")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_user_from_ta(channel_id, twitch_username)
new_embed = discord.Embed(title="Removed User from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"User: {twitch_username}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchAddTeam", aliases=["add_team_to_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def add_team_to_twitch_alert(self, ctx, raw_channel_id, team_name=None, *custom_live_message):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
custom_live_message = (team_name,) + custom_live_message
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
elif not re.search(TWITCH_USERNAME_REGEX, team_name):
raise discord.errors.InvalidArgument(
"The given team_name is not a valid twitch team name (please use lowercase)")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
self.ta_database_manager.new_ta(ctx.message.guild.id, channel_id)
if custom_live_message is not None and custom_live_message != (None,):
default_message = " ".join(custom_live_message)
if len(default_message) > 1000:
await ctx.send(embed=error_embed(
"custom_message is too long, try something with less than 1000 characters"))
return
else:
default_message = DEFAULT_MESSAGE
self.ta_database_manager.add_team_to_ta(channel_id, team_name, default_message, ctx.message.guild.id)
new_embed = discord.Embed(title="Added Team to Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}\n"
f"Message: {default_message}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchRemoveTeam", aliases=["remove_team_from_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def remove_team_from_twitch_alert(self, ctx, raw_channel_id, team_name=None):
try:
channel_id = extract_id(raw_channel_id)
except TypeError:
team_name = raw_channel_id
channel_id = ctx.message.channel.id
if team_name is None:
raise discord.errors.InvalidArgument("team_name is a required argument that is missing.")
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
await self.ta_database_manager.remove_team_from_ta(channel_id, team_name)
new_embed = discord.Embed(title="Removed Team from Twitch Alert", colour=KOALA_GREEN,
description=f"Channel: {channel_id}\n"
f"Team: {team_name}")
await ctx.send(embed=new_embed)
@commands.command(name="twitchList", aliases=["list_twitch_alert"])
@commands.check(KoalaBot.is_admin)
@commands.check(twitch_is_enabled)
async def list_twitch_alert(self, ctx, raw_channel_id=None):
if raw_channel_id is None:
channel_id = ctx.message.channel.id
else:
channel_id = extract_id(raw_channel_id)
if not is_channel_in_guild(self.bot, ctx.message.guild.id, channel_id):
await ctx.send(embed=error_embed("The channel ID provided is either invalid, or not in this server."))
return
embed = discord.Embed()
embed.title = "Twitch Alerts"
embed.colour = KOALA_GREEN
embed.set_footer(text=f"Channel ID: {channel_id}")
results = self.ta_database_manager.get_users_in_ta(channel_id)
if results:
users = ""
for result in results:
users += f"{result[0]}\n"
embed.add_field(name=":bust_in_silhouette: Users", value=users)
else:
embed.add_field(name=":bust_in_silhouette: Users", value="None")
results = self.ta_database_manager.get_teams_in_ta(channel_id)
if results:
teams = ""
for result in results:
teams += f"{result[0]}\n"
embed.add_field(name=":busts_in_silhouette: Teams", value=teams)
else:
embed.add_field(name=":busts_in_silhouette: Teams", value="None")
await ctx.send(embed=embed)
@commands.Cog.listener()
async def on_ready(self):
if not self.running:
self.start_loops()
def start_loops(self):
self.loop_update_teams.start()
self.loop_check_team_live.start()
self.loop_check_live.start()
self.running = True
def end_loops(self):
self.loop_update_teams.cancel()
self.loop_check_team_live.cancel()
self.loop_check_live.cancel()
self.running = False
@tasks.loop(minutes=LOOP_CHECK_LIVE_DELAY)
async def loop_check_live(self):
start = time.time()
sql_find_users = "SELECT twitch_username " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE on TA.guild_id = GE.guild_id;"
users = self.ta_database_manager.database_manager.db_execute_select(sql_find_users)
usernames = []
for user in users:
if not re.search(TWITCH_USERNAME_REGEX, user[0]):
sql_remove_invalid_user = "DELETE FROM UserInTwitchAlert WHERE twitch_username = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[0]])
else:
usernames.append(user[0])
if not usernames:
return
user_streams = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if user_streams is None:
return
for streams_details in user_streams:
try:
if streams_details.get('type') == "live":
current_username = str.lower(streams_details.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = \
"SELECT UserInTwitchAlert.channel_id, message_id, custom_message, default_message " \
"FROM UserInTwitchAlert " \
"JOIN TwitchAlerts TA on UserInTwitchAlert.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id " \
"WHERE twitch_username = ?;"
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
custom_message = result[2]
channel_default_message = result[3]
channel = self.bot.get_channel(id=channel_id)
try:
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(streams_details, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchAlert
SET message_id = ?
WHERE channel_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id, args=[new_message.id, result[0], current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: User Loop error {err}")
await self.ta_database_manager.delete_all_offline_streams(False, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: User Loop Finished in > 5s | {time_diff}s")
async def create_alert_embed(self, stream_data, message):
user_details = await self.ta_database_manager.twitch_handler.get_user_data(
stream_data.get("user_name"))
game_details = await self.ta_database_manager.twitch_handler.get_game_data(
stream_data.get("game_id"))
return create_live_embed(stream_data, user_details, game_details, message)
@tasks.loop(minutes=REFRESH_TEAMS_DELAY)
async def loop_update_teams(self):
start = time.time()
await self.ta_database_manager.update_all_teams_members()
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams updated in > 5s | {time_diff}s")
@tasks.loop(minutes=TEAMS_LOOP_CHECK_LIVE_DELAY)
async def loop_check_team_live(self):
start = time.time()
sql_select_team_users = "SELECT twitch_username, twitch_team_name " \
"FROM UserInTwitchTeam " \
"JOIN TeamInTwitchAlert TITA " \
" ON UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id " \
"JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id " \
"JOIN (SELECT extension_id, guild_id FROM GuildExtensions " \
"WHERE extension_id = 'TwitchAlert' " \
" OR extension_id = 'All') GE on TA.guild_id = GE.guild_id "
users_and_teams = self.ta_database_manager.database_manager.db_execute_select(sql_select_team_users)
usernames = []
for user in users_and_teams:
if not re.search(TWITCH_USERNAME_REGEX, user[1]):
sql_remove_invalid_user = "DELETE FROM TeamInTwitchAlert WHERE twitch_team_name = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_user, args=[user[1]])
else:
usernames.append(user[0])
if not usernames:
return
streams_data = await self.ta_database_manager.twitch_handler.get_streams_data(usernames)
if streams_data is None:
return
for stream_data in streams_data:
try:
if stream_data.get('type') == "live":
current_username = str.lower(stream_data.get("user_name"))
usernames.remove(current_username)
sql_find_message_id = """
SELECT TITA.channel_id, UserInTwitchTeam.message_id, TITA.team_twitch_alert_id, custom_message,
default_message
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
JOIN TwitchAlerts TA on TITA.channel_id = TA.channel_id
JOIN (SELECT extension_id, guild_id
FROM GuildExtensions
WHERE extension_id = 'TwitchAlert' OR extension_id = 'All') GE ON TA.guild_id = GE.guild_id
WHERE twitch_username = ?"""
results = self.ta_database_manager.database_manager.db_execute_select(
sql_find_message_id, args=[current_username])
new_message_embed = None
for result in results:
channel_id = result[0]
message_id = result[1]
team_twitch_alert_id = result[2]
custom_message = result[3]
channel_default_message = result[4]
channel = self.bot.get_channel(id=channel_id)
try:
if message_id is None:
if new_message_embed is None:
if custom_message is not None:
message = custom_message
else:
message = channel_default_message
new_message_embed = await self.create_alert_embed(stream_data, message)
if new_message_embed is not None and channel is not None:
new_message = await channel.send(embed=new_message_embed)
sql_update_message_id = """
UPDATE UserInTwitchTeam
SET message_id = ?
WHERE team_twitch_alert_id = ?
AND twitch_username = ?"""
self.ta_database_manager.database_manager.db_execute_commit(
sql_update_message_id,
args=[new_message.id, team_twitch_alert_id, current_username])
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Name: {channel} ID: {channel.id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.ta_database_manager.database_manager.db_execute_commit(sql_remove_invalid_channel,
args=[channel.id])
except Exception as err:
logging.error(f"TwitchAlert: Team Loop error {err}")
await self.ta_database_manager.delete_all_offline_streams(True, usernames)
time_diff = time.time() - start
if time_diff > 5:
logging.warning(f"TwitchAlert: Teams Loop Finished in > 5s | {time_diff}s")
def create_live_embed(stream_info, user_info, game_info, message):
embed = discord.Embed(colour=KOALA_GREEN)
if message is not None and message != "":
embed.description = message
embed.set_author(name=stream_info.get("user_name") + " is now streaming!",
icon_url=TWITCH_ICON)
embed.title = "https://twitch.tv/" + str.lower(stream_info.get("user_name"))
embed.add_field(name="Stream Title", value=stream_info.get("title"))
if game_info is None:
embed.add_field(name="Playing", value="No Category")
else:
embed.add_field(name="Playing", value=game_info.get("name"))
embed.set_thumbnail(url=user_info.get("profile_image_url"))
return embed
class TwitchAPIHandler:
def __init__(self, client_id: str, client_secret: str):
self.client_id = client_id
self.client_secret = client_secret
self.params = {'client_id': self.client_id,
'client_secret': self.client_secret,
'grant_type': 'client_credentials'}
self.token = {}
@property
def base_headers(self):
return {
'Authorization': f'Bearer {self.token.get("access_token")}',
'Client-ID': self.client_id
}
async def get_new_twitch_oauth(self):
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.post('https://id.twitch.tv/oauth2/token', params=self.params) as response:
if response.status > 399:
logging.critical(f'TwitchAlert: Error {response.status} while getting Oauth token')
self.token = {}
response_json = await response.json()
try:
response_json['expires_in'] += time.time()
except KeyError:
logging.warning('TwitchAlert: Failed to set token expiration time')
self.token = response_json
return self.token
async def requests_get(self, url, headers=None, params=None):
if self.token.get('expires_in', 0) <= time.time() + 1 or not self.token:
await self.get_new_twitch_oauth()
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(60)) as client:
async with client.get(url=url, headers=headers if headers else self.base_headers, params=params) as \
response:
if response.status == 401:
logging.info(f"TwitchAlert: {response.status}, getting new oauth and retrying")
await self.get_new_twitch_oauth()
return await self.requests_get(url, headers, params)
elif response.status > 399:
logging.warning(f'TwitchAlert: {response.status} while getting requesting URL:{url}')
return await response.json()
async def get_streams_data(self, usernames):
url = 'https://api.twitch.tv/helix/streams?'
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result = (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get("data")
while usernames:
next_hundred_users = usernames[:100]
usernames = usernames[100:]
result += (await self.requests_get(url + "user_login=" + "&user_login=".join(next_hundred_users))).get(
"data")
return result
async def get_user_data(self, username):
url = 'https://api.twitch.tv/helix/users?login=' + username
return (await self.requests_get(url)).get("data")[0]
async def get_game_data(self, game_id):
if game_id != "":
url = 'https://api.twitch.tv/helix/games?id=' + game_id
game_data = await self.requests_get(url)
return game_data.get("data")[0]
else:
return None
async def get_team_users(self, team_id):
url = 'https://api.twitch.tv/helix/teams?name=' + team_id
return (
await self.requests_get(url)).get("data")[0].get("users")
class TwitchAlertDBManager:
def __init__(self, database_manager: KoalaDBManager.KoalaDBManager, bot_client: discord.client):
self.database_manager = database_manager
self.twitch_handler = TwitchAPIHandler(TWITCH_CLIENT_ID, TWITCH_SECRET)
self.bot = bot_client
def get_parent_database_manager(self):
return self.database_manager
def create_tables(self):
# TwitchAlerts
sql_create_twitch_alerts_table = """
CREATE TABLE IF NOT EXISTS TwitchAlerts (
guild_id integer NOT NULL,
channel_id integer NOT NULL,
default_message text NOT NULL,
PRIMARY KEY (guild_id, channel_id),
CONSTRAINT fk_guild
FOREIGN KEY (guild_id)
REFERENCES GuildExtensions (guild_id)
ON DELETE CASCADE
);"""
# UserInTwitchAlert
sql_create_user_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchAlert (
channel_id integer NOT NULL,
twitch_username text NOT NULL,
custom_message text,
message_id integer,
PRIMARY KEY (channel_id, twitch_username),
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# TeamInTwitchAlert
sql_create_team_in_twitch_alert_table = """
CREATE TABLE IF NOT EXISTS TeamInTwitchAlert (
team_twitch_alert_id integer PRIMARY KEY AUTOINCREMENT,
channel_id integer NOT NULL,
twitch_team_name text NOT NULL,
custom_message text,
CONSTRAINT fk_channel
FOREIGN KEY (channel_id)
REFERENCES TwitchAlerts (channel_id)
ON DELETE CASCADE
);"""
# UserInTwitchTeam
sql_create_user_in_twitch_team_table = """
CREATE TABLE IF NOT EXISTS UserInTwitchTeam (
team_twitch_alert_id text NOT NULL,
twitch_username text NOT NULL,
message_id integer,
PRIMARY KEY (team_twitch_alert_id, twitch_username),
CONSTRAINT fk_twitch_team_alert
FOREIGN KEY (team_twitch_alert_id)
REFERENCES TeamInTwitchAlert (team_twitch_alert_id)
ON DELETE CASCADE
);"""
# Create Tables
self.database_manager.db_execute_commit(sql_create_twitch_alerts_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_team_in_twitch_alert_table)
self.database_manager.db_execute_commit(sql_create_user_in_twitch_team_table)
def new_ta(self, guild_id, channel_id, default_message=None, replace=False):
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id=?"
message = self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
if message and not replace:
return message[0][0]
# Sets the default message if not provided
if default_message is None:
default_message = DEFAULT_MESSAGE
# Insert new Twitch Alert to database
if replace:
sql_insert_twitch_alert = """
REPLACE INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
else:
sql_insert_twitch_alert = """
INSERT INTO TwitchAlerts(guild_id, channel_id, default_message)
VALUES(?,?,?)
"""
self.database_manager.db_execute_commit(sql_insert_twitch_alert, args=[guild_id, channel_id, default_message])
return default_message
def get_default_message(self, channel_id):
sql_find_ta = "SELECT default_message FROM TwitchAlerts WHERE channel_id= ?"
return self.database_manager.db_execute_select(sql_find_ta, args=[channel_id])
def add_user_to_ta(self, channel_id, twitch_username, custom_message, guild_id=None):
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username), custom_message])
else:
sql_insert_user_twitch_alert = """
INSERT INTO UserInTwitchAlert(channel_id, twitch_username)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_user_twitch_alert, args=[channel_id, str.lower(twitch_username)])
async def remove_user_from_ta(self, channel_id, twitch_username):
sql_get_message_id = "SELECT message_id " \
"FROM UserInTwitchAlert " \
"WHERE twitch_username = ? " \
"AND channel_id = ? "
message_id = self.database_manager.db_execute_select(sql_get_message_id,
args=[twitch_username, channel_id])[0][0]
if message_id is not None:
await self.delete_message(message_id, channel_id)
sql_remove_entry = """DELETE FROM UserInTwitchAlert
WHERE twitch_username = ? AND channel_id = ?"""
self.database_manager.db_execute_commit(sql_remove_entry, args=[twitch_username, channel_id])
async def delete_message(self, message_id, channel_id):
try:
channel = self.bot.get_channel(int(channel_id))
if channel is None:
logging.warning(f"TwitchAlert: Channel ID {channel_id} does not exist, removing from database")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
return
message = await channel.fetch_message(message_id)
await message.delete()
except discord.errors.NotFound as err:
logging.warning(f"TwitchAlert: Message ID {message_id} does not exist, skipping \nError: {err}")
except discord.errors.Forbidden as err:
logging.warning(f"TwitchAlert: {err} Channel ID: {channel_id}")
sql_remove_invalid_channel = "DELETE FROM TwitchAlerts WHERE channel_id = ?"
self.database_manager.db_execute_commit(sql_remove_invalid_channel, args=[channel_id])
def get_users_in_ta(self, channel_id):
sql_get_users = "SELECT twitch_username FROM UserInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_users, args=[channel_id])
def get_teams_in_ta(self, channel_id):
sql_get_teams = "SELECT twitch_team_name FROM TeamInTwitchAlert WHERE channel_id = ?"
return self.database_manager.db_execute_select(sql_get_teams, args=[channel_id])
def add_team_to_ta(self, channel_id, twitch_team, custom_message, guild_id=None):
self.new_ta(guild_id, channel_id)
if custom_message:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name, custom_message)
VALUES(?, ?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team), custom_message])
else:
sql_insert_team_twitch_alert = """
INSERT INTO TeamInTwitchAlert(channel_id, twitch_team_name)
VALUES(?, ?)
"""
self.database_manager.db_execute_commit(
sql_insert_team_twitch_alert, args=[channel_id, str.lower(twitch_team)])
async def remove_team_from_ta(self, channel_id, team_name):
sql_get_team_alert_id = "SELECT team_twitch_alert_id " \
"FROM TeamInTwitchAlert " \
"WHERE twitch_team_name = ? " \
" AND channel_id = ?"
result = self.database_manager.db_execute_select(sql_get_team_alert_id, args=[team_name, channel_id])
if not result:
raise AttributeError("Team name not found")
team_alert_id = result[0][0]
sql_get_message_id = """SELECT UserInTwitchTeam.message_id
FROM UserInTwitchTeam
WHERE team_twitch_alert_id = ?"""
message_ids = self.database_manager.db_execute_select(sql_get_message_id, args=[team_alert_id])
if message_ids is not None:
for message_id in message_ids:
if message_id[0] is not None:
await self.delete_message(message_id[0], channel_id)
sql_remove_users = """DELETE FROM UserInTwitchTeam WHERE team_twitch_alert_id = ?"""
sql_remove_team = """DELETE FROM TeamInTwitchAlert WHERE team_twitch_alert_id = ?"""
self.database_manager.db_execute_commit(sql_remove_users, args=[team_alert_id])
self.database_manager.db_execute_commit(sql_remove_team, args=[team_alert_id])
async def update_team_members(self, twitch_team_id, team_name):
if re.search(TWITCH_USERNAME_REGEX, team_name):
users = await self.twitch_handler.get_team_users(team_name)
for user in users:
sql_add_user = """INSERT OR IGNORE INTO UserInTwitchTeam(team_twitch_alert_id, twitch_username)
VALUES(?, ?)"""
try:
self.database_manager.db_execute_commit(sql_add_user, args=[twitch_team_id, user.get("user_login")],
pass_errors=True)
except KoalaDBManager.sqlite3.IntegrityError as err:
logging.error(f"Twitch Alert: 1034: {err}")
pass
async def update_all_teams_members(self):
sql_get_teams = """SELECT team_twitch_alert_id, twitch_team_name FROM TeamInTwitchAlert"""
teams_info = self.database_manager.db_execute_select(sql_get_teams)
for team_info in teams_info:
await self.update_team_members(team_info[0], team_info[1])
async def delete_all_offline_streams(self, team: bool, usernames):
if team:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchTeam
JOIN TeamInTwitchAlert TITA on UserInTwitchTeam.team_twitch_alert_id = TITA.team_twitch_alert_id
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchTeam
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
else:
sql_select_offline_streams_with_message_ids = f"""
SELECT channel_id, message_id
FROM UserInTwitchAlert
WHERE message_id NOT NULL
AND twitch_username in ({','.join(['?'] * len(usernames))})"""
sql_update_offline_streams = f"""
UPDATE UserInTwitchAlert
SET message_id = NULL
WHERE twitch_username in ({','.join(['?'] * len(usernames))})"""
results = self.database_manager.db_execute_select(
sql_select_offline_streams_with_message_ids, usernames)
for result in results:
await self.delete_message(result[1], result[0])
self.database_manager.db_execute_commit(sql_update_offline_streams, usernames)
def setup(bot: KoalaBot) -> None:
if TWITCH_SECRET is None or TWITCH_CLIENT_ID is None:
logging.error("TwitchAlert not started. API keys not found in environment.")
print("TwitchAlert not started. API keys not found in environment.")
KoalaBot.database_manager.insert_extension("TwitchAlert", 0, False, False)
else:
bot.add_cog(TwitchAlert(bot))
logging.info("TwitchAlert is ready.")
print("TwitchAlert is ready.")
| true | true |
f731a1ae7561e1b6b771c407eeb69465e2d4177b | 3,929 | py | Python | openstack/tests/unit/cloud/test_limits.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 99 | 2018-03-28T15:41:45.000Z | 2022-01-23T17:22:13.000Z | openstack/tests/unit/cloud/test_limits.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 5 | 2018-05-25T16:54:23.000Z | 2021-11-21T02:27:16.000Z | openstack/tests/unit/cloud/test_limits.py | NeCTAR-RC/openstacksdk | 60a24f6c4717a1f9a0e545c9a07e68afaedc5a27 | [
"Apache-2.0"
] | 104 | 2018-04-06T14:33:54.000Z | 2022-03-01T01:58:09.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack.tests.unit import base
class TestLimits(base.TestCase):
def test_get_compute_limits(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits']),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits()
self.assert_calls()
def test_other_get_compute_limits(self):
project = self.mock_for_keystone_projects(project_count=1,
list_get=True)[0]
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits'],
qs_elements=[
'tenant_id={id}'.format(id=project.project_id)
]),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits(project.project_id)
self.assert_calls()
| 40.927083 | 75 | 0.423772 |
from openstack.tests.unit import base
class TestLimits(base.TestCase):
def test_get_compute_limits(self):
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits']),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits()
self.assert_calls()
def test_other_get_compute_limits(self):
project = self.mock_for_keystone_projects(project_count=1,
list_get=True)[0]
self.register_uris([
dict(method='GET',
uri=self.get_mock_url(
'compute', 'public', append=['limits'],
qs_elements=[
'tenant_id={id}'.format(id=project.project_id)
]),
json={
"limits": {
"absolute": {
"maxImageMeta": 128,
"maxPersonality": 5,
"maxPersonalitySize": 10240,
"maxSecurityGroupRules": 20,
"maxSecurityGroups": 10,
"maxServerMeta": 128,
"maxTotalCores": 20,
"maxTotalFloatingIps": 10,
"maxTotalInstances": 10,
"maxTotalKeypairs": 100,
"maxTotalRAMSize": 51200,
"maxServerGroups": 10,
"maxServerGroupMembers": 10,
"totalCoresUsed": 0,
"totalInstancesUsed": 0,
"totalRAMUsed": 0,
"totalSecurityGroupsUsed": 0,
"totalFloatingIpsUsed": 0,
"totalServerGroupsUsed": 0
},
"rate": []
}
}),
])
self.cloud.get_compute_limits(project.project_id)
self.assert_calls()
| true | true |
f731a2121c3590ca3b044f3b321708cb81eb5991 | 9,878 | py | Python | GlutWrapper.py | kosystem/PythonGlutWrapper | e619b62927d6875f1bd3d2da8b5f2291487c8920 | [
"MIT"
] | null | null | null | GlutWrapper.py | kosystem/PythonGlutWrapper | e619b62927d6875f1bd3d2da8b5f2291487c8920 | [
"MIT"
] | null | null | null | GlutWrapper.py | kosystem/PythonGlutWrapper | e619b62927d6875f1bd3d2da8b5f2291487c8920 | [
"MIT"
] | null | null | null | from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import sys
from math import *
import time
ESCAPE = '\033'
class Camera(object):
"""docstring for Camera"""
def __init__(self):
self.lock_x = 0
self.lock_y = 0
self.lock_z = 0
self.distance = 300
self.pan = 0.0
self.tilt = 0.0
class MouseState(object):
"""docstring for MouseState"""
def __init__(self):
self.button = 0
self.pressed = 0
self.x = 0
self.y = 0
class GlutWrapper(object):
"""docstring for GlutWrapper"""
def __init__(self):
self.windowWidth = 640
self.windowHeight = 480
self.windowPositionX = 100
self.windowPositionY = 100
self.title = b"Glut Wrapper"
self.camera = Camera()
self.mouseState = MouseState()
self.frameElapsed = 0.0
self.displayElapsed = 0.0
self.elapsedTime = 0.0
self.frameTime = 1.0/20.0
def startFramework(self):
glutInit(sys.argv)
glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH)
glutInitWindowPosition(self.windowPositionX, self.windowPositionY)
glutInitWindowSize(self.windowWidth, self.windowHeight)
glutCreateWindow(self.title)
glutDisplayFunc(self.displayFramework)
glutReshapeFunc(self.reshape)
glutIdleFunc(self.idle)
glutMouseFunc(self.mouse)
glutMotionFunc(self.motion)
glutPassiveMotionFunc(self.passiveMotion)
# glutMouseWheelFunc(self.mouseWheel)
glutKeyboardFunc(self.keyboard)
glutKeyboardUpFunc(self.keyboardUp)
glutSpecialFunc(self.special)
glutSpecialUpFunc(self.specialUp)
self.initialize()
self.load()
glutMainLoop()
def displayFramework(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.setLights()
self.setCamera()
self.display(self.getDisplayElapsed())
glutSwapBuffers()
def setCamera(self):
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(30.0, float(width) / height, 0.5, 10000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
distance = self.camera.distance
tilt = self.camera.tilt
pan = self.camera.pan
lock_x = self.camera.lock_x
lock_y = self.camera.lock_y
lock_z = self.camera.lock_z
gluLookAt(
distance*sin(pan)*cos(tilt) + lock_x,
distance*sin(tilt) + lock_y,
distance*cos(pan)*cos(tilt) + lock_z,
lock_x, lock_y, lock_z,
0.0,
1.0,
0.0)
def setLights(self):
light1_position = (0.0, 1.0, 1.0, 0.0)
light2_position = (0.0, -1.0, -1.0, 0.0)
white_light = (1.0, 1.0, 1.0, 1.0)
lmodel_ambient = (0.2, 0.2, 0.2, 1.0)
ambient_light = (0.4, 0.4, 0.4, 1.0)
glLight(GL_LIGHT0, GL_POSITION, light1_position)
glLight(GL_LIGHT0, GL_AMBIENT, ambient_light)
glLight(GL_LIGHT0, GL_DIFFUSE, white_light)
glLight(GL_LIGHT0, GL_SPECULAR, white_light)
glLight(GL_LIGHT1, GL_POSITION, light2_position)
glLight(GL_LIGHT1, GL_AMBIENT, lmodel_ambient)
glLight(GL_LIGHT1, GL_DIFFUSE, ambient_light)
glLight(GL_LIGHT1, GL_SPECULAR, lmodel_ambient)
# glLightModel(GL_LIGHT_MODEL_AMBIENT, lmodel_ambient)
def getFrameElapsed(self):
now = time.time()
if self.frameElapsed == 0.0:
self.frameElapsed = now
elapsed = now - self.frameElapsed
self.frameElapsed = now
return elapsed
def getDisplayElapsed(self):
now = time.time()
if self.displayElapsed == 0.0:
self.displayElapsed = now
elapsed = now - self.displayElapsed
self.displayElapsed = now
return elapsed
# User overwite ---------------------------------------
def display(self, deltaTime):
glMaterial(GL_FRONT, GL_AMBIENT, (0.8, 0.6, 0.5, 1.0))
glMaterial(GL_FRONT, GL_DIFFUSE, (0.8, 0.6, 0.5, 1.0))
glutSolidTeapot(50)
def idle(self):
self.elapsedTime += self.getFrameElapsed()
if self.elapsedTime >= self.frameTime:
glutPostRedisplay()
self.elapsedTime -= self.frameTime
def reshape(self, w, h):
glViewport(0, 0, w, h)
def initialize(self):
glClearColor(0.4, 0.5, 0.5, 1.0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glEnable(GL_LIGHT1)
glShadeModel(GL_SMOOTH)
glEnable(GL_DEPTH_TEST)
def load(self):
# NOTE: model data load
pass
# User interface -----------------------------------
def mouse(self, button, state, x, y):
#print "MousePress: button: %d, x: %d, y:%d" % (button, x, y)
pass
def motion(self, x, y):
#print "MouseMove: x: %d, y: %d" % (x, y)
pass
def passiveMotion(self, x, y):
self.mouseState.x = x
self.mouseState.y = y
def keyboard(self, key, x, y):
#print "KeyboardPress: %s" % key
if key == ESCAPE:
sys.exit()
def keyboardUp(self, key, x, y):
#print "KeyboardUp: %s" % key
pass
def special(self, key, x, y):
#print "SpecialKeyPress: %s" % key
pass
def specialUp(self, key, x, y):
#print "SpecialKeyUp: %s" % key
pass
# Basic Draw ----------------------------------------------
def drawAxis(self, length):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(1.0, 0.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(length, 0.0, 0.0)
glColor(0.0, 1.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, length, 0.0)
glColor(0.0, 0.0, 1.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, 0.0, length)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def drawHorizon(self, x, y, xTick, yTick):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(0.7, 0.7, 0.7, 1.0)
for xi in range(-x, x+xTick, xTick):
glVertex(xi, 0.0, -y)
glVertex(xi, 0.0, y)
for yi in range(-y, y+yTick, yTick):
glVertex(-x, 0.0, yi)
glVertex(x, 0.0, yi)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def overlayString(self, string, x, y, color=(1, 1, 1)):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
currentcolor = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDisable(GL_LIGHT1)
glLineWidth(1.0)
glMatrixMode(GL_PROJECTION)
# glPushMatrix()
glLoadIdentity()
glOrtho(0.0, 2.0, 2.0, 0.0, -1.0, 1.0)
glMatrixMode(GL_MODELVIEW)
# glPushMatrix()
glLoadIdentity()
# glPushAttrib(GL_ENABLE_BIT)
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)/2
glColor(color)
if x >= 0:
positionX = x/width*2.0
else:
positionX = (width + x)/width*2.0
if y >= 0:
positionY = (y + 10.0)/height*2.0
else:
positionY = (height + y)/height*2.0
glRasterPos3f(positionX, positionY, 0.0)
for x in string:
glutBitmapCharacter(GLUT_BITMAP_HELVETICA_12, ord(x))
# glPopAttrib()
# glPopMatrix()
glMatrixMode(GL_PROJECTION)
# glPopMatrix()
glMatrixMode(GL_MODELVIEW)
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if depth:
glEnable(GL_DEPTH_TEST)
glColor(currentcolor)
def drawBlock(self, w, h, d):
glPushMatrix()
glScale(w/100, h/100, d/100 )
glutSolidCube(100)
glPopMatrix()
def drawSquer(self, w, h):
glPushMatrix()
glBegin(GL_QUADS)
glVertex(w, h, 0)
glVertex(-w, h, 0)
glVertex(-w, -h, 0)
glVertex(w, -h, 0)
glEnd()
glPopMatrix()
def setColor(self, color):
glColor(color[0], color[1], color[2])
glMaterial(GL_FRONT, GL_AMBIENT, color)
glMaterial(GL_FRONT, GL_DIFFUSE, color)
if __name__ == '__main__':
#print "Hit ESC key to quit."
gl = GlutWrapper()
gl.title = b"Tracer"
gl.startFramework()
| 27.983003 | 74 | 0.571877 | from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import sys
from math import *
import time
ESCAPE = '\033'
class Camera(object):
def __init__(self):
self.lock_x = 0
self.lock_y = 0
self.lock_z = 0
self.distance = 300
self.pan = 0.0
self.tilt = 0.0
class MouseState(object):
def __init__(self):
self.button = 0
self.pressed = 0
self.x = 0
self.y = 0
class GlutWrapper(object):
def __init__(self):
self.windowWidth = 640
self.windowHeight = 480
self.windowPositionX = 100
self.windowPositionY = 100
self.title = b"Glut Wrapper"
self.camera = Camera()
self.mouseState = MouseState()
self.frameElapsed = 0.0
self.displayElapsed = 0.0
self.elapsedTime = 0.0
self.frameTime = 1.0/20.0
def startFramework(self):
glutInit(sys.argv)
glutInitDisplayMode(GLUT_RGB | GLUT_DEPTH)
glutInitWindowPosition(self.windowPositionX, self.windowPositionY)
glutInitWindowSize(self.windowWidth, self.windowHeight)
glutCreateWindow(self.title)
glutDisplayFunc(self.displayFramework)
glutReshapeFunc(self.reshape)
glutIdleFunc(self.idle)
glutMouseFunc(self.mouse)
glutMotionFunc(self.motion)
glutPassiveMotionFunc(self.passiveMotion)
glutKeyboardFunc(self.keyboard)
glutKeyboardUpFunc(self.keyboardUp)
glutSpecialFunc(self.special)
glutSpecialUpFunc(self.specialUp)
self.initialize()
self.load()
glutMainLoop()
def displayFramework(self):
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
self.setLights()
self.setCamera()
self.display(self.getDisplayElapsed())
glutSwapBuffers()
def setCamera(self):
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(30.0, float(width) / height, 0.5, 10000)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
distance = self.camera.distance
tilt = self.camera.tilt
pan = self.camera.pan
lock_x = self.camera.lock_x
lock_y = self.camera.lock_y
lock_z = self.camera.lock_z
gluLookAt(
distance*sin(pan)*cos(tilt) + lock_x,
distance*sin(tilt) + lock_y,
distance*cos(pan)*cos(tilt) + lock_z,
lock_x, lock_y, lock_z,
0.0,
1.0,
0.0)
def setLights(self):
light1_position = (0.0, 1.0, 1.0, 0.0)
light2_position = (0.0, -1.0, -1.0, 0.0)
white_light = (1.0, 1.0, 1.0, 1.0)
lmodel_ambient = (0.2, 0.2, 0.2, 1.0)
ambient_light = (0.4, 0.4, 0.4, 1.0)
glLight(GL_LIGHT0, GL_POSITION, light1_position)
glLight(GL_LIGHT0, GL_AMBIENT, ambient_light)
glLight(GL_LIGHT0, GL_DIFFUSE, white_light)
glLight(GL_LIGHT0, GL_SPECULAR, white_light)
glLight(GL_LIGHT1, GL_POSITION, light2_position)
glLight(GL_LIGHT1, GL_AMBIENT, lmodel_ambient)
glLight(GL_LIGHT1, GL_DIFFUSE, ambient_light)
glLight(GL_LIGHT1, GL_SPECULAR, lmodel_ambient)
def getFrameElapsed(self):
now = time.time()
if self.frameElapsed == 0.0:
self.frameElapsed = now
elapsed = now - self.frameElapsed
self.frameElapsed = now
return elapsed
def getDisplayElapsed(self):
now = time.time()
if self.displayElapsed == 0.0:
self.displayElapsed = now
elapsed = now - self.displayElapsed
self.displayElapsed = now
return elapsed
def display(self, deltaTime):
glMaterial(GL_FRONT, GL_AMBIENT, (0.8, 0.6, 0.5, 1.0))
glMaterial(GL_FRONT, GL_DIFFUSE, (0.8, 0.6, 0.5, 1.0))
glutSolidTeapot(50)
def idle(self):
self.elapsedTime += self.getFrameElapsed()
if self.elapsedTime >= self.frameTime:
glutPostRedisplay()
self.elapsedTime -= self.frameTime
def reshape(self, w, h):
glViewport(0, 0, w, h)
def initialize(self):
glClearColor(0.4, 0.5, 0.5, 1.0)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glEnable(GL_LIGHT1)
glShadeModel(GL_SMOOTH)
glEnable(GL_DEPTH_TEST)
def load(self):
pass
def mouse(self, button, state, x, y):
pass
def motion(self, x, y):
pass
def passiveMotion(self, x, y):
self.mouseState.x = x
self.mouseState.y = y
def keyboard(self, key, x, y):
if key == ESCAPE:
sys.exit()
def keyboardUp(self, key, x, y):
pass
def special(self, key, x, y):
pass
def specialUp(self, key, x, y):
pass
def drawAxis(self, length):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(1.0, 0.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(length, 0.0, 0.0)
glColor(0.0, 1.0, 0.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, length, 0.0)
glColor(0.0, 0.0, 1.0, 1.0)
glVertex(0.0, 0.0, 0.0)
glVertex(0.0, 0.0, length)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def drawHorizon(self, x, y, xTick, yTick):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
color = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glLineWidth(1.0)
glBegin(GL_LINES)
glColor(0.7, 0.7, 0.7, 1.0)
for xi in range(-x, x+xTick, xTick):
glVertex(xi, 0.0, -y)
glVertex(xi, 0.0, y)
for yi in range(-y, y+yTick, yTick):
glVertex(-x, 0.0, yi)
glVertex(x, 0.0, yi)
glEnd()
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if not depth:
glDisable(GL_DEPTH_TEST)
glColor(color)
def overlayString(self, string, x, y, color=(1, 1, 1)):
lighting = glGetBoolean(GL_LIGHTING)
light0 = glGetBoolean(GL_LIGHT0)
light1 = glGetBoolean(GL_LIGHT1)
currentcolor = glGetFloatv(GL_CURRENT_COLOR)
depth = glGetBoolean(GL_DEPTH_TEST)
glEnable(GL_DEPTH_TEST)
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDisable(GL_LIGHT1)
glLineWidth(1.0)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0.0, 2.0, 2.0, 0.0, -1.0, 1.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glDisable(GL_DEPTH_TEST)
glDisable(GL_CULL_FACE)
width = glutGet(GLUT_WINDOW_WIDTH)
height = glutGet(GLUT_WINDOW_HEIGHT)/2
glColor(color)
if x >= 0:
positionX = x/width*2.0
else:
positionX = (width + x)/width*2.0
if y >= 0:
positionY = (y + 10.0)/height*2.0
else:
positionY = (height + y)/height*2.0
glRasterPos3f(positionX, positionY, 0.0)
for x in string:
glutBitmapCharacter(GLUT_BITMAP_HELVETICA_12, ord(x))
glMatrixMode(GL_PROJECTION)
glMatrixMode(GL_MODELVIEW)
if lighting:
glEnable(GL_LIGHTING)
if light0:
glEnable(GL_LIGHT0)
if light1:
glEnable(GL_LIGHT1)
if depth:
glEnable(GL_DEPTH_TEST)
glColor(currentcolor)
def drawBlock(self, w, h, d):
glPushMatrix()
glScale(w/100, h/100, d/100 )
glutSolidCube(100)
glPopMatrix()
def drawSquer(self, w, h):
glPushMatrix()
glBegin(GL_QUADS)
glVertex(w, h, 0)
glVertex(-w, h, 0)
glVertex(-w, -h, 0)
glVertex(w, -h, 0)
glEnd()
glPopMatrix()
def setColor(self, color):
glColor(color[0], color[1], color[2])
glMaterial(GL_FRONT, GL_AMBIENT, color)
glMaterial(GL_FRONT, GL_DIFFUSE, color)
if __name__ == '__main__':
gl = GlutWrapper()
gl.title = b"Tracer"
gl.startFramework()
| true | true |
f731a2862378f2a67c66b8effa16a59cf7ae26f1 | 867 | py | Python | py/pe/pe8.py | kittttttan/pe | 6f87e4527793198c393700fedbdd52274fec5b44 | [
"MIT"
] | null | null | null | py/pe/pe8.py | kittttttan/pe | 6f87e4527793198c393700fedbdd52274fec5b44 | [
"MIT"
] | null | null | null | py/pe/pe8.py | kittttttan/pe | 6f87e4527793198c393700fedbdd52274fec5b44 | [
"MIT"
] | 1 | 2016-09-01T22:47:28.000Z | 2016-09-01T22:47:28.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Discover the largest product of five consecutive digits
in the 1000-digit number.
"""
def pe8(fname="../../res/pe8.txt", n=5):
"""
Discover the largest product of five consecutive digits
in the 1000-digit number.
>>> pe8()
40824
"""
with open(fname, 'r') as f:
s = f.read()
s = s.replace('\n', '')
ls = len(s)
if ls < n:
raise ValueError
m = 0
for x in range(ls - n + 1):
t = 1
for y in range(n):
t *= int(s[x + y])
if m < t:
m = t
return(m)
if __name__ == "__main__":
import doctest
doctest.testmod()
try:
while True:
s = input('> ')
n = int(s)
print(pe8(n=n))
except (SyntaxError, EOFError, KeyboardInterrupt, NameError):
pass
| 20.642857 | 65 | 0.500577 |
def pe8(fname="../../res/pe8.txt", n=5):
with open(fname, 'r') as f:
s = f.read()
s = s.replace('\n', '')
ls = len(s)
if ls < n:
raise ValueError
m = 0
for x in range(ls - n + 1):
t = 1
for y in range(n):
t *= int(s[x + y])
if m < t:
m = t
return(m)
if __name__ == "__main__":
import doctest
doctest.testmod()
try:
while True:
s = input('> ')
n = int(s)
print(pe8(n=n))
except (SyntaxError, EOFError, KeyboardInterrupt, NameError):
pass
| true | true |
f731a3593f56de78afad7e6ff95b896326f0909e | 2,885 | py | Python | ExcelPixelator.py | nibble-4bits/excel-pixel-art | 793c04c69f5035034dc42e26948b3e6390c81668 | [
"MIT"
] | 2 | 2021-01-02T02:46:30.000Z | 2021-06-30T00:37:06.000Z | ExcelPixelator.py | nibble-4bits/excel-pixel-art | 793c04c69f5035034dc42e26948b3e6390c81668 | [
"MIT"
] | null | null | null | ExcelPixelator.py | nibble-4bits/excel-pixel-art | 793c04c69f5035034dc42e26948b3e6390c81668 | [
"MIT"
] | 1 | 2021-06-30T00:37:13.000Z | 2021-06-30T00:37:13.000Z | from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import PatternFill
from PIL import Image
class ExcelPixelator:
def __init__(self, input_path, output_path, file_name, cell_size, pixel_size):
self.image = Image.open(input_path).convert('RGB')
self.output_path = output_path
self.file_name = file_name
self.cell_size = cell_size # size of the cell in pixels
self.pixel_size = pixel_size
self.__is_pixelsize_common_factor()
def create_pixel_art(self):
default_excel_font_size = 16
wb = Workbook()
ws = wb.create_sheet('Pixel-Art')
wb.remove(wb['Sheet']) # remove default worksheet
width, height = self.image.size
pixel_map = self.get_pixel_map(width, height)
for row in range(len(pixel_map)):
ws.row_dimensions[row + 1].height = self.cell_size * 10 / default_excel_font_size
for col in range(len(pixel_map[row])):
curr_col = get_column_letter(col + 1)
ws.column_dimensions[curr_col].width = self.cell_size / 9
rgbTuple = pixel_map[row][col]
fill_color = self.__rgbToHex(rgbTuple)
ws[f'{curr_col}{row + 1}'].fill = PatternFill(start_color=fill_color, end_color=fill_color, fill_type='solid')
wb.save(f'{self.output_path}/{self.file_name}.xlsx')
wb.close()
def get_pixel_map(self, w, h):
pixel_map = [[0 for x in range(w // self.pixel_size)] for y in range(h // self.pixel_size)]
squares = w * h // self.pixel_size ** 2
i, j = 0, 0
for sq in range(squares):
rAvg, gAvg, bAvg = 0, 0, 0
row_start = (sq * self.pixel_size // h) * self.pixel_size
row_end = row_start + self.pixel_size
col_start = sq * self.pixel_size % w
col_end = col_start + self.pixel_size
for row in range(row_start, row_end):
for col in range(col_start, col_end):
r, g, b = self.image.getpixel((col, row))
rAvg += r
gAvg += g
bAvg += b
rAvg //= self.pixel_size ** 2
gAvg //= self.pixel_size ** 2
bAvg //= self.pixel_size ** 2
pixel_map[i][j] = (rAvg, gAvg, bAvg)
i = i + 1 if j >= (w // self.pixel_size) - 1 else i
j = (j + 1) % (w // self.pixel_size)
return pixel_map
def __rgbToHex(self, rgbTuple):
return ('%02x%02x%02x' % rgbTuple).upper()
def __is_pixelsize_common_factor(self):
width, height = self.image.size
if width % self.pixel_size != 0 or height % self.pixel_size != 0:
print('ERROR: Pixel size must be a number divisible exactly by both the image width and height')
exit(1)
| 38.986486 | 126 | 0.586828 | from openpyxl import Workbook
from openpyxl.utils import get_column_letter
from openpyxl.styles import PatternFill
from PIL import Image
class ExcelPixelator:
def __init__(self, input_path, output_path, file_name, cell_size, pixel_size):
self.image = Image.open(input_path).convert('RGB')
self.output_path = output_path
self.file_name = file_name
self.cell_size = cell_size
self.pixel_size = pixel_size
self.__is_pixelsize_common_factor()
def create_pixel_art(self):
default_excel_font_size = 16
wb = Workbook()
ws = wb.create_sheet('Pixel-Art')
wb.remove(wb['Sheet'])
width, height = self.image.size
pixel_map = self.get_pixel_map(width, height)
for row in range(len(pixel_map)):
ws.row_dimensions[row + 1].height = self.cell_size * 10 / default_excel_font_size
for col in range(len(pixel_map[row])):
curr_col = get_column_letter(col + 1)
ws.column_dimensions[curr_col].width = self.cell_size / 9
rgbTuple = pixel_map[row][col]
fill_color = self.__rgbToHex(rgbTuple)
ws[f'{curr_col}{row + 1}'].fill = PatternFill(start_color=fill_color, end_color=fill_color, fill_type='solid')
wb.save(f'{self.output_path}/{self.file_name}.xlsx')
wb.close()
def get_pixel_map(self, w, h):
pixel_map = [[0 for x in range(w // self.pixel_size)] for y in range(h // self.pixel_size)]
squares = w * h // self.pixel_size ** 2
i, j = 0, 0
for sq in range(squares):
rAvg, gAvg, bAvg = 0, 0, 0
row_start = (sq * self.pixel_size // h) * self.pixel_size
row_end = row_start + self.pixel_size
col_start = sq * self.pixel_size % w
col_end = col_start + self.pixel_size
for row in range(row_start, row_end):
for col in range(col_start, col_end):
r, g, b = self.image.getpixel((col, row))
rAvg += r
gAvg += g
bAvg += b
rAvg //= self.pixel_size ** 2
gAvg //= self.pixel_size ** 2
bAvg //= self.pixel_size ** 2
pixel_map[i][j] = (rAvg, gAvg, bAvg)
i = i + 1 if j >= (w // self.pixel_size) - 1 else i
j = (j + 1) % (w // self.pixel_size)
return pixel_map
def __rgbToHex(self, rgbTuple):
return ('%02x%02x%02x' % rgbTuple).upper()
def __is_pixelsize_common_factor(self):
width, height = self.image.size
if width % self.pixel_size != 0 or height % self.pixel_size != 0:
print('ERROR: Pixel size must be a number divisible exactly by both the image width and height')
exit(1)
| true | true |
f731a40b7f102fd430b7e6b6d58a594d1b5f7c7b | 3,806 | py | Python | scripts/spectral_clustering_demo.py | vipavlovic/pyprobml | 59a2edc682d0163955db5e2f27491ad772b60141 | [
"MIT"
] | 4,895 | 2016-08-17T22:28:34.000Z | 2022-03-31T17:07:15.000Z | scripts/spectral_clustering_demo.py | vipavlovic/pyprobml | 59a2edc682d0163955db5e2f27491ad772b60141 | [
"MIT"
] | 446 | 2016-09-17T14:35:29.000Z | 2022-03-31T19:59:33.000Z | scripts/spectral_clustering_demo.py | vipavlovic/pyprobml | 59a2edc682d0163955db5e2f27491ad772b60141 | [
"MIT"
] | 1,160 | 2016-08-18T23:19:27.000Z | 2022-03-31T12:44:07.000Z | import superimport
import itertools
import matplotlib.pyplot as plt
import numpy as np
from scipy.linalg import eigh
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import rbf_kernel
import pyprobml_utils as pml
plt.style.use('classic')
def spectral_clustering_demo():
np.random.seed(0)
num_clusters = 2
for data_type, data in (('circle', sample_circle(num_clusters)),
('spiral', sample_spiral())):
kmeans = KMeans(n_clusters=num_clusters, random_state=0)
kmeans.fit(data)
assignments = kmeans.predict(data)
plot_data(data, assignments, 'k-means clustering', data_type)
sigma = 0.1
gamma = 1 / (2 * sigma ** 2)
W = rbf_kernel(data, gamma=gamma)
d = np.sum(W, 1, keepdims=True)
sqrt_d = np.sqrt(d)
normalized_W = (W / sqrt_d) / sqrt_d.T
paranoid_assert(W, normalized_W, False)
# We select the largest eigen values of normalized_W, rather
# than the smallest eigenvalues of I - normalized_W. The two
# problems are equivalent. The eigen values can be converted
# between the two problems via `1 - eigen_values`. The eigen
# vectors are the same between both problems.
eigen_values, eigen_vectors = eigh(normalized_W,
# Get only the top num_clusters eigenvalues
eigvals=(data.shape[0] - num_clusters, data.shape[0]-1))
eigen_vectors = eigen_vectors / np.linalg.norm(eigen_vectors, axis=1, keepdims=True)
kmeans.fit(eigen_vectors)
assignments = kmeans.predict(eigen_vectors)
plot_data(data, assignments, 'spectral clustering', data_type)
plt.show()
def paranoid_assert(W, normalized_W, enable):
if not enable:
return
D = np.diag(np.sum(W, 1))
L = D - W
D_inv_sqrt = np.diag(1 / np.diag(np.sqrt(D)))
np.testing.assert_almost_equal(np.sum(L, 1), 0, err_msg="Rows of Laplacian must sum to 0.")
np.testing.assert_allclose(normalized_W, D_inv_sqrt * W * D_inv_sqrt, rtol=0, atol=1)
def sample_circle(num_clusters):
points_per_cluster = 500
bandwidth = 0.1
data = np.zeros((num_clusters * points_per_cluster, 2))
for k, n in itertools.product(range(num_clusters), range(points_per_cluster)):
theta = 2 * np.pi * np.random.uniform()
rho = k + 1 + np.random.randn() * bandwidth
x, y = pol2cart(theta, rho)
idx = k * points_per_cluster + n
data[idx, 0] = x
data[idx, 1] = y
data = data.reshape((num_clusters * points_per_cluster, 2))
return data
def pol2cart(theta, rho):
x = rho * np.cos(theta)
y = rho * np.sin(theta)
return(x, y)
def sample_spiral():
# Only 2 clusters in this case. This is hard-coded.
points_per_cluster = 500
bandwidth = 0.1
data = np.empty((points_per_cluster, 2))
w = np.arange(1, points_per_cluster + 1).astype(np.float32) / points_per_cluster
data[:,0] = (4 * w + 1) * np.cos(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data[:,1] = (4 * w + 1) * np.sin(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data = np.vstack((data, -data))
return data
def plot_data(data, assignments, title, data_type):
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(data[assignments == 0, 0], data[assignments == 0, 1], 'o', color='r')
ax.plot(data[assignments == 1, 0], data[assignments == 1, 1], 'o', color='b')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.axis('square')
ax.grid(True)
ax.set_title(title)
plt.tight_layout()
pml.savefig(f"{data_type}_{title.replace(' ', '_')}.pdf")
if __name__ == '__main__':
spectral_clustering_demo()
| 35.90566 | 99 | 0.633211 | import superimport
import itertools
import matplotlib.pyplot as plt
import numpy as np
from scipy.linalg import eigh
from sklearn.cluster import KMeans
from sklearn.metrics.pairwise import rbf_kernel
import pyprobml_utils as pml
plt.style.use('classic')
def spectral_clustering_demo():
np.random.seed(0)
num_clusters = 2
for data_type, data in (('circle', sample_circle(num_clusters)),
('spiral', sample_spiral())):
kmeans = KMeans(n_clusters=num_clusters, random_state=0)
kmeans.fit(data)
assignments = kmeans.predict(data)
plot_data(data, assignments, 'k-means clustering', data_type)
sigma = 0.1
gamma = 1 / (2 * sigma ** 2)
W = rbf_kernel(data, gamma=gamma)
d = np.sum(W, 1, keepdims=True)
sqrt_d = np.sqrt(d)
normalized_W = (W / sqrt_d) / sqrt_d.T
paranoid_assert(W, normalized_W, False)
eigen_values, eigen_vectors = eigh(normalized_W,
eigvals=(data.shape[0] - num_clusters, data.shape[0]-1))
eigen_vectors = eigen_vectors / np.linalg.norm(eigen_vectors, axis=1, keepdims=True)
kmeans.fit(eigen_vectors)
assignments = kmeans.predict(eigen_vectors)
plot_data(data, assignments, 'spectral clustering', data_type)
plt.show()
def paranoid_assert(W, normalized_W, enable):
if not enable:
return
D = np.diag(np.sum(W, 1))
L = D - W
D_inv_sqrt = np.diag(1 / np.diag(np.sqrt(D)))
np.testing.assert_almost_equal(np.sum(L, 1), 0, err_msg="Rows of Laplacian must sum to 0.")
np.testing.assert_allclose(normalized_W, D_inv_sqrt * W * D_inv_sqrt, rtol=0, atol=1)
def sample_circle(num_clusters):
points_per_cluster = 500
bandwidth = 0.1
data = np.zeros((num_clusters * points_per_cluster, 2))
for k, n in itertools.product(range(num_clusters), range(points_per_cluster)):
theta = 2 * np.pi * np.random.uniform()
rho = k + 1 + np.random.randn() * bandwidth
x, y = pol2cart(theta, rho)
idx = k * points_per_cluster + n
data[idx, 0] = x
data[idx, 1] = y
data = data.reshape((num_clusters * points_per_cluster, 2))
return data
def pol2cart(theta, rho):
x = rho * np.cos(theta)
y = rho * np.sin(theta)
return(x, y)
def sample_spiral():
points_per_cluster = 500
bandwidth = 0.1
data = np.empty((points_per_cluster, 2))
w = np.arange(1, points_per_cluster + 1).astype(np.float32) / points_per_cluster
data[:,0] = (4 * w + 1) * np.cos(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data[:,1] = (4 * w + 1) * np.sin(2*np.pi * w) + np.random.randn(points_per_cluster) * bandwidth
data = np.vstack((data, -data))
return data
def plot_data(data, assignments, title, data_type):
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
ax.plot(data[assignments == 0, 0], data[assignments == 0, 1], 'o', color='r')
ax.plot(data[assignments == 1, 0], data[assignments == 1, 1], 'o', color='b')
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.axis('square')
ax.grid(True)
ax.set_title(title)
plt.tight_layout()
pml.savefig(f"{data_type}_{title.replace(' ', '_')}.pdf")
if __name__ == '__main__':
spectral_clustering_demo()
| true | true |
f731a4104b7ef8d6f39fe8aa8ecf2aad84c9136c | 5,770 | py | Python | run_fits.py | ruizca/pseudoXspec | a8ad2ca3744fe73e5ace78aedbabc49e32e2202a | [
"MIT"
] | null | null | null | run_fits.py | ruizca/pseudoXspec | a8ad2ca3744fe73e5ace78aedbabc49e32e2202a | [
"MIT"
] | null | null | null | run_fits.py | ruizca/pseudoXspec | a8ad2ca3744fe73e5ace78aedbabc49e32e2202a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""
Created on Fri Jun 19 10:46:32 2015
@author: ruizca
"""
import argparse
import logging
import subprocess
from itertools import count
from pathlib import Path
from astropy.coordinates.sky_coordinate import SkyCoord
from astropy.table import Table
from astropy.units import UnitTypeError
from gdpyc import GasMap
from tqdm.contrib import tzip
logging.basicConfig(level=logging.INFO)
def get_last_source_fit(last_source_file):
try:
with last_source_file.open("r") as fp:
first_source = int(fp.readline())
except FileNotFoundError:
first_source = 0
return first_source
def update_last_source_fit(last_source, last_source_file):
with last_source_file.open("w") as fp:
fp.write(str(last_source))
def check_results_folder(results_folder):
if results_folder.exists():
raise FileExistsError(f"results_folder '{results_folder}' already exists!")
else:
results_folder.mkdir()
def _get_redshift(sample, zcol):
if not zcol:
zcol = "DEFAULT_REDSHIFT"
sample[zcol] = 0.0
return sample[zcol]
def _set_coords(ra, dec, unit):
try:
coords = SkyCoord(ra, dec)
except UnitTypeError:
coords = SkyCoord(ra, dec, unit=unit)
return coords
def _get_nhgal(sample, nhcol, racol, deccol, unit="deg"):
if not nhcol:
nhcol = "NHGAL"
coords = _set_coords(sample[racol], sample[deccol], unit)
sample[nhcol] = GasMap.nh(coords, nhmap="LAB")
return sample[nhcol]
def get_sources_data(sample_file, racol, deccol, zcol=None, nhcol=None, first_source=0):
sample = Table.read(sample_file)
sample = sample[first_source:]
obsid = sample["OBS_ID"]
detid = sample["DETID"]
z = _get_redshift(sample, zcol)
nhgal = _get_nhgal(sample, nhcol, racol, deccol)
return obsid, detid, z, nhgal
def stack_spectra(obsid, detid, spec_folder):
# Find spectra of interest for this detection
obs_path = spec_folder.joinpath(obsid)
spec_files = obs_path.glob(f"{detid}_SRSPEC_*.pha")
# Create stack file for existing spectra in the observation
stack_file = Path(f"spec_{detid}.lis")
with stack_file.open("w") as fp:
for spec in spec_files:
fp.write(spec.resolve().as_posix() + "\n")
return stack_file
def remove_stack_spectra(stack_file):
try:
stack_file.unlink()
except FileNotFoundError:
logging.warning("No stack file!")
def fit_detection(z, nh, obsid, detid, results_folder, spectra_folder, fixgamma=True):
task = "./fit_Xspec.py"
args = [
"--redshift",
f"{z:f}",
"--nh",
str(nh),
"--obsid",
obsid,
"--detid",
str(detid),
"--output_folder",
results_folder.as_posix(),
"--spectra_folder",
spectra_folder.as_posix(),
]
if fixgamma:
args += ["--fixGamma"]
logging.debug(" ".join([task] + args))
subprocess.check_output([task] + args, stderr=subprocess.STDOUT)
def main(args):
spec_folder = Path(args.spec_folder)
results_folder = Path(args.results_folder)
lastsource_file = Path(args.file_lastsource)
first_source = get_last_source_fit(lastsource_file)
if first_source == 0:
check_results_folder(results_folder)
obsids, detids, redshifts, nhgals = get_sources_data(
args.sources_table, args.racol, args.deccol, args.zcol, args.nhcol, first_source
)
for obsid, detid, z, nh, current_source in tzip(
obsids, detids, redshifts, nhgals, count(first_source)
):
try:
fit_detection(z, nh, obsid, detid, results_folder, spec_folder, args.fixgamma)
update_last_source_fit(current_source + 1, lastsource_file)
except Exception as e:
logging.error(e)
logging.error(f"Something went wrong fitting detection {detid}")
if __name__ == "__main__":
# Parser for shell parameters
parser = argparse.ArgumentParser(description="Fitting X-ray pseudospectra")
parser.add_argument(
"--catalogue",
dest="sources_table",
action="store",
default=None,
help="Full route to the detections catalogue.",
)
parser.add_argument(
"--spec_folder",
dest="spec_folder",
action="store",
default="./data/spectra/",
help="Folder of the pseudospectra.",
)
parser.add_argument(
"--results_folder",
dest="results_folder",
action="store",
default="./fit_results/",
help="Folder for saving the fit results.",
)
parser.add_argument(
"--racol",
dest="racol",
action="store",
default="XMM_RA",
help="Name of the RA column in the catalogue.",
)
parser.add_argument(
"--deccol",
dest="deccol",
action="store",
default="XMM_DEC",
help="Name of the Dec column in the catalogue.",
)
parser.add_argument(
"--zcol",
dest="zcol",
action="store",
default=None,
help="Name of the redshift column in the catalogue.",
)
parser.add_argument(
"--nhcol",
dest="nhcol",
action="store",
default=None,
help="Name of the Galactic NH column in the catalogue.",
)
parser.add_argument(
"--lsf",
dest="file_lastsource",
action="store",
default="last_source.dat",
help="File to store the last fitted source.",
)
parser.add_argument(
"--fixGamma",
dest="fixgamma",
action="store_true",
default=False,
help="Fit with a fixed photon index (1.9).",
)
main(parser.parse_args())
| 25.874439 | 90 | 0.629116 |
import argparse
import logging
import subprocess
from itertools import count
from pathlib import Path
from astropy.coordinates.sky_coordinate import SkyCoord
from astropy.table import Table
from astropy.units import UnitTypeError
from gdpyc import GasMap
from tqdm.contrib import tzip
logging.basicConfig(level=logging.INFO)
def get_last_source_fit(last_source_file):
try:
with last_source_file.open("r") as fp:
first_source = int(fp.readline())
except FileNotFoundError:
first_source = 0
return first_source
def update_last_source_fit(last_source, last_source_file):
with last_source_file.open("w") as fp:
fp.write(str(last_source))
def check_results_folder(results_folder):
if results_folder.exists():
raise FileExistsError(f"results_folder '{results_folder}' already exists!")
else:
results_folder.mkdir()
def _get_redshift(sample, zcol):
if not zcol:
zcol = "DEFAULT_REDSHIFT"
sample[zcol] = 0.0
return sample[zcol]
def _set_coords(ra, dec, unit):
try:
coords = SkyCoord(ra, dec)
except UnitTypeError:
coords = SkyCoord(ra, dec, unit=unit)
return coords
def _get_nhgal(sample, nhcol, racol, deccol, unit="deg"):
if not nhcol:
nhcol = "NHGAL"
coords = _set_coords(sample[racol], sample[deccol], unit)
sample[nhcol] = GasMap.nh(coords, nhmap="LAB")
return sample[nhcol]
def get_sources_data(sample_file, racol, deccol, zcol=None, nhcol=None, first_source=0):
sample = Table.read(sample_file)
sample = sample[first_source:]
obsid = sample["OBS_ID"]
detid = sample["DETID"]
z = _get_redshift(sample, zcol)
nhgal = _get_nhgal(sample, nhcol, racol, deccol)
return obsid, detid, z, nhgal
def stack_spectra(obsid, detid, spec_folder):
obs_path = spec_folder.joinpath(obsid)
spec_files = obs_path.glob(f"{detid}_SRSPEC_*.pha")
stack_file = Path(f"spec_{detid}.lis")
with stack_file.open("w") as fp:
for spec in spec_files:
fp.write(spec.resolve().as_posix() + "\n")
return stack_file
def remove_stack_spectra(stack_file):
try:
stack_file.unlink()
except FileNotFoundError:
logging.warning("No stack file!")
def fit_detection(z, nh, obsid, detid, results_folder, spectra_folder, fixgamma=True):
task = "./fit_Xspec.py"
args = [
"--redshift",
f"{z:f}",
"--nh",
str(nh),
"--obsid",
obsid,
"--detid",
str(detid),
"--output_folder",
results_folder.as_posix(),
"--spectra_folder",
spectra_folder.as_posix(),
]
if fixgamma:
args += ["--fixGamma"]
logging.debug(" ".join([task] + args))
subprocess.check_output([task] + args, stderr=subprocess.STDOUT)
def main(args):
spec_folder = Path(args.spec_folder)
results_folder = Path(args.results_folder)
lastsource_file = Path(args.file_lastsource)
first_source = get_last_source_fit(lastsource_file)
if first_source == 0:
check_results_folder(results_folder)
obsids, detids, redshifts, nhgals = get_sources_data(
args.sources_table, args.racol, args.deccol, args.zcol, args.nhcol, first_source
)
for obsid, detid, z, nh, current_source in tzip(
obsids, detids, redshifts, nhgals, count(first_source)
):
try:
fit_detection(z, nh, obsid, detid, results_folder, spec_folder, args.fixgamma)
update_last_source_fit(current_source + 1, lastsource_file)
except Exception as e:
logging.error(e)
logging.error(f"Something went wrong fitting detection {detid}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Fitting X-ray pseudospectra")
parser.add_argument(
"--catalogue",
dest="sources_table",
action="store",
default=None,
help="Full route to the detections catalogue.",
)
parser.add_argument(
"--spec_folder",
dest="spec_folder",
action="store",
default="./data/spectra/",
help="Folder of the pseudospectra.",
)
parser.add_argument(
"--results_folder",
dest="results_folder",
action="store",
default="./fit_results/",
help="Folder for saving the fit results.",
)
parser.add_argument(
"--racol",
dest="racol",
action="store",
default="XMM_RA",
help="Name of the RA column in the catalogue.",
)
parser.add_argument(
"--deccol",
dest="deccol",
action="store",
default="XMM_DEC",
help="Name of the Dec column in the catalogue.",
)
parser.add_argument(
"--zcol",
dest="zcol",
action="store",
default=None,
help="Name of the redshift column in the catalogue.",
)
parser.add_argument(
"--nhcol",
dest="nhcol",
action="store",
default=None,
help="Name of the Galactic NH column in the catalogue.",
)
parser.add_argument(
"--lsf",
dest="file_lastsource",
action="store",
default="last_source.dat",
help="File to store the last fitted source.",
)
parser.add_argument(
"--fixGamma",
dest="fixgamma",
action="store_true",
default=False,
help="Fit with a fixed photon index (1.9).",
)
main(parser.parse_args())
| true | true |
f731a56cb0c8590006223c1be11722be665011fc | 1,998 | py | Python | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssoarec_args.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssoarec_args.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | nitro-python/nssrc/com/citrix/netscaler/nitro/resource/config/dns/dnssoarec_args.py | culbertm/NSttyPython | ff9f6aedae3fb8495342cd0fc4247c819cf47397 | [
"Apache-2.0"
] | null | null | null | #
# Copyright (c) 2008-2016 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class dnssoarec_args :
r""" Provides additional arguments required for fetching the dnssoarec resource.
"""
def __init__(self) :
self._type = None
self._nodeid = None
@property
def type(self) :
r"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY.
"""
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
r"""Type of records to display. Available settings function as follows:
* ADNS - Display all authoritative address records.
* PROXY - Display all proxy address records.
* ALL - Display all address records.<br/>Possible values = ALL, ADNS, PROXY
"""
try :
self._type = type
except Exception as e:
raise e
@property
def nodeid(self) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31.
"""
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
r"""Unique number that identifies the cluster node.<br/>Minimum value = 0<br/>Maximum value = 31
"""
try :
self._nodeid = nodeid
except Exception as e:
raise e
class Type:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
| 27.75 | 101 | 0.698699 |
class dnssoarec_args :
def __init__(self) :
self._type = None
self._nodeid = None
@property
def type(self) :
try :
return self._type
except Exception as e:
raise e
@type.setter
def type(self, type) :
try :
self._type = type
except Exception as e:
raise e
@property
def nodeid(self) :
try :
return self._nodeid
except Exception as e:
raise e
@nodeid.setter
def nodeid(self, nodeid) :
try :
self._nodeid = nodeid
except Exception as e:
raise e
class Type:
ALL = "ALL"
ADNS = "ADNS"
PROXY = "PROXY"
| true | true |
f731a5771e4fe79a045785785375a014bc97a2e2 | 717 | py | Python | bot_v_bot.py | garybake/man_alphagb | e69c954e3f42dd556f9b5b7b9edcbf2856389d2f | [
"MIT"
] | null | null | null | bot_v_bot.py | garybake/man_alphagb | e69c954e3f42dd556f9b5b7b9edcbf2856389d2f | [
"MIT"
] | null | null | null | bot_v_bot.py | garybake/man_alphagb | e69c954e3f42dd556f9b5b7b9edcbf2856389d2f | [
"MIT"
] | null | null | null | import time
# from dlgo.agent import naive
from dlgo.agent import naive
from dlgo import gotypes
from dlgo import goboard_slow as goboard
from dlgo.utils import print_board, print_move
def main():
board_size = 9
game = goboard.GameState.new_game(board_size)
bots = {
gotypes.Player.black: naive.RandomBot(),
gotypes.Player.white: naive.RandomBot()
}
while not game.is_over():
time.sleep(0.3)
# print(chr(27) + "[2J")
# print_board(game.board)
bot_move = bots[game.next_player].select_move(game)
print_move(game.next_player, bot_move)
game = game.apply_move(bot_move)
if __name__ == '__main__':
main()
# x = naive.RandomBot() | 25.607143 | 59 | 0.666667 | import time
from dlgo.agent import naive
from dlgo import gotypes
from dlgo import goboard_slow as goboard
from dlgo.utils import print_board, print_move
def main():
board_size = 9
game = goboard.GameState.new_game(board_size)
bots = {
gotypes.Player.black: naive.RandomBot(),
gotypes.Player.white: naive.RandomBot()
}
while not game.is_over():
time.sleep(0.3)
bot_move = bots[game.next_player].select_move(game)
print_move(game.next_player, bot_move)
game = game.apply_move(bot_move)
if __name__ == '__main__':
main()
| true | true |
f731a5baeb0a5f98871e45535782937905736879 | 187,564 | bzl | Python | go_third_party.bzl | enp0s3/project-infra | a53795f5df0dbb22c94ba04da47e9ba2fdfe60ed | [
"Apache-2.0"
] | 16 | 2017-07-30T10:51:51.000Z | 2021-09-02T17:58:03.000Z | go_third_party.bzl | enp0s3/project-infra | a53795f5df0dbb22c94ba04da47e9ba2fdfe60ed | [
"Apache-2.0"
] | 1,946 | 2017-07-03T13:00:30.000Z | 2022-03-31T18:55:19.000Z | go_third_party.bzl | enp0s3/project-infra | a53795f5df0dbb22c94ba04da47e9ba2fdfe60ed | [
"Apache-2.0"
] | 103 | 2017-07-03T09:53:36.000Z | 2022-03-22T20:12:00.000Z | load("@bazel_gazelle//:deps.bzl", "go_repository")
def go_deps():
go_repository(
name = "ag_pack_amqp",
build_file_proto_mode = "disable",
importpath = "pack.ag/amqp",
sum = "h1:cuNDWLUTbKRtEZwhB0WQBXf9pGbm87pUBXQhvcFxBWg=",
version = "v0.11.2",
)
go_repository(
name = "cc_mvdan_interfacer",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/interfacer",
sum = "h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=",
version = "v0.0.0-20180901003855-c20040233aed",
)
go_repository(
name = "cc_mvdan_lint",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/lint",
sum = "h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=",
version = "v0.0.0-20170908181259-adc824a0674b",
)
go_repository(
name = "cc_mvdan_unparam",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/unparam",
sum = "h1:kAREL6MPwpsk1/PQPFD3Eg7WAQR5mPTWZJaBiG5LDbY=",
version = "v0.0.0-20200501210554-b37ab49443f7",
)
go_repository(
name = "cc_mvdan_xurls_v2",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/xurls/v2",
sum = "h1:r1zSOSNS/kqtpmATyMMMvaZ4/djsesbYz5kr0+qMRWc=",
version = "v2.0.0",
)
go_repository(
name = "co_honnef_go_tools",
build_file_proto_mode = "disable",
importpath = "honnef.co/go/tools",
sum = "h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=",
version = "v0.0.1-2020.1.4",
)
go_repository(
name = "com_github_agnivade_levenshtein",
build_file_proto_mode = "disable",
importpath = "github.com/agnivade/levenshtein",
sum = "h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_alcortesm_tgz",
build_file_proto_mode = "disable",
importpath = "github.com/alcortesm/tgz",
sum = "h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=",
version = "v0.0.0-20161220082320-9c5fe88206d7",
)
go_repository(
name = "com_github_alecthomas_kingpin",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/kingpin",
sum = "h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_alecthomas_template",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/template",
sum = "h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=",
version = "v0.0.0-20190718012654-fb15b899a751",
)
go_repository(
name = "com_github_alecthomas_units",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/units",
sum = "h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E=",
version = "v0.0.0-20190924025748-f65c72e2690d",
)
go_repository(
name = "com_github_andreyvit_diff",
build_file_proto_mode = "disable",
importpath = "github.com/andreyvit/diff",
sum = "h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=",
version = "v0.0.0-20170406064948-c7f18ee00883",
)
go_repository(
name = "com_github_andybalholm_brotli",
build_file_proto_mode = "disable",
importpath = "github.com/andybalholm/brotli",
sum = "h1:bZ28Hqta7TFAK3Q08CMvv8y3/8ATaEqv2nGoc6yff6c=",
version = "v0.0.0-20190621154722-5f990b63d2d6",
)
go_repository(
name = "com_github_andygrunwald_go_gerrit",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-gerrit",
sum = "h1:7gokoTWteZhP1t2f0OzrFFXlyL8o0+b0r4ZaRV9PXOs=",
version = "v0.0.0-20210709065208-9d38b0be0268",
)
go_repository(
name = "com_github_andygrunwald_go_jira",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-jira",
sum = "h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE=",
version = "v1.13.0",
)
go_repository(
name = "com_github_anmitsu_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/anmitsu/go-shlex",
sum = "h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=",
version = "v0.0.0-20161002113705-648efa622239",
)
go_repository(
name = "com_github_antihax_optional",
build_file_proto_mode = "disable",
importpath = "github.com/antihax/optional",
sum = "h1:uZuxRZCz65cG1o6K/xUqImNcYKtmk9ylqaH0itMSvzA=",
version = "v0.0.0-20180407024304-ca021399b1a6",
)
go_repository(
name = "com_github_apache_thrift",
build_file_proto_mode = "disable",
importpath = "github.com/apache/thrift",
sum = "h1:pODnxUFNcjP9UTLZGTdeh+j16A8lJbRvD3rOtrk/7bs=",
version = "v0.12.0",
)
go_repository(
name = "com_github_apex_log",
build_file_proto_mode = "disable",
importpath = "github.com/apex/log",
sum = "h1:1fyfbPvUwD10nMoh3hY6MXzvZShJQn9/ck7ATgAt5pA=",
version = "v1.3.0",
)
go_repository(
name = "com_github_apex_logs",
build_file_proto_mode = "disable",
importpath = "github.com/apex/logs",
sum = "h1:KmEBVwfDUOTFcBO8cfkJYwdQ5487UZSN+GteOGPmiro=",
version = "v0.0.4",
)
go_repository(
name = "com_github_aphistic_golf",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/golf",
sum = "h1:2KLQMJ8msqoPHIPDufkxVcoTtcmE5+1sL9950m4R9Pk=",
version = "v0.0.0-20180712155816-02c07f170c5a",
)
go_repository(
name = "com_github_aphistic_sweet",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/sweet",
sum = "h1:I4z+fAUqvKfvZV/CHi5dV0QuwbmIvYYFDjG0Ss5QpAs=",
version = "v0.2.0",
)
go_repository(
name = "com_github_armon_circbuf",
build_file_proto_mode = "disable",
importpath = "github.com/armon/circbuf",
sum = "h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA=",
version = "v0.0.0-20150827004946-bbbad097214e",
)
go_repository(
name = "com_github_armon_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/armon/consul-api",
sum = "h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=",
version = "v0.0.0-20180202201655-eb2c6b5be1b6",
)
go_repository(
name = "com_github_armon_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-metrics",
sum = "h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I=",
version = "v0.0.0-20180917152333-f0300d1749da",
)
go_repository(
name = "com_github_armon_go_radix",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-radix",
sum = "h1:BUAU3CGlLvorLI26FmByPp2eC2qla6E1Tw+scpcg/to=",
version = "v0.0.0-20180808171621-7fddfc383310",
)
go_repository(
name = "com_github_armon_go_socks5",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-socks5",
sum = "h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=",
version = "v0.0.0-20160902184237-e75332964ef5",
)
go_repository(
name = "com_github_asaskevich_govalidator",
build_file_proto_mode = "disable",
importpath = "github.com/asaskevich/govalidator",
sum = "h1:zV3ejI06GQ59hwDQAvmK1qxOQGB3WuVTRoY0okPTAv0=",
version = "v0.0.0-20200108200545-475eaeb16496",
)
go_repository(
name = "com_github_aws_aws_k8s_tester",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-k8s-tester",
sum = "h1:Zr5NWiRK5fhmRIlhrsTwrY8yB488FyN6iulci2D7VaI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_aws_aws_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-sdk-go",
sum = "h1:cyZp8TvUbH9rrShdrwULtCj4pB5szddrw9aKHUsw1Ic=",
version = "v1.37.22",
)
go_repository(
name = "com_github_aybabtme_rgbterm",
build_file_proto_mode = "disable",
importpath = "github.com/aybabtme/rgbterm",
sum = "h1:WWB576BN5zNSZc/M9d/10pqEx5VHNhaQ/yOVAkmj5Yo=",
version = "v0.0.0-20170906152045-cc83f3b3ce59",
)
go_repository(
name = "com_github_azure_azure_amqp_common_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-amqp-common-go/v2",
sum = "h1:+QbFgmWCnPzdaRMfsI0Yb6GrRdBj5jVL8N3EXuEUcBQ=",
version = "v2.1.0",
)
go_repository(
name = "com_github_azure_azure_pipeline_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-pipeline-go",
sum = "h1:6oiIS9yaG6XCCzhgAgKFfIWyo4LLCiDhZot6ltoThhY=",
version = "v0.2.2",
)
go_repository(
name = "com_github_azure_azure_sdk_for_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-sdk-for-go",
sum = "h1:m4oQOm3HXtQh2Ipata+pLSS1kGUD/7ikkvNq81XM/7s=",
version = "v46.3.0+incompatible",
)
go_repository(
name = "com_github_azure_azure_service_bus_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-service-bus-go",
sum = "h1:G1qBLQvHCFDv9pcpgwgFkspzvnGknJRR0PYJ9ytY/JA=",
version = "v0.9.1",
)
go_repository(
name = "com_github_azure_azure_storage_blob_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-storage-blob-go",
sum = "h1:53qhf0Oxa0nOjgbDeeYPUeyiNmafAFEY95rZLK0Tj6o=",
version = "v0.8.0",
)
go_repository(
name = "com_github_azure_go_ansiterm",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-ansiterm",
sum = "h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=",
version = "v0.0.0-20170929234023-d6e3b3328b78",
)
go_repository(
name = "com_github_azure_go_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest",
replace = "github.com/Azure/go-autorest",
sum = "h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=",
version = "v14.2.0+incompatible",
)
go_repository(
name = "com_github_azure_go_autorest_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest",
sum = "h1:gI8ytXbxMfI+IVbI9mP2JGCTXIuhHLgRlvQ9X4PsnHE=",
version = "v0.11.12",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_adal",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/adal",
sum = "h1:Y3bBUV4rTuxenJJs41HU3qmqsb+auo+a3Lz+PlJPpL0=",
version = "v0.9.5",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_auth",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/auth",
sum = "h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk=",
version = "v0.4.2",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_cli",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/cli",
sum = "h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U=",
version = "v0.3.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_date",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/date",
sum = "h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_mocks",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/mocks",
sum = "h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=",
version = "v0.4.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_to",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/to",
sum = "h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk=",
version = "v0.4.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_validation",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/validation",
sum = "h1:3I9AAI63HfcLtphd9g39ruUwRI+Ca+z/f36KHPFRUss=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_logger",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/logger",
sum = "h1:e4RVHVZKC5p6UANLJHkM4OfR1UKZPj8Wt8Pcx+3oqrE=",
version = "v0.2.0",
)
go_repository(
name = "com_github_azure_go_autorest_tracing",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/tracing",
sum = "h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=",
version = "v0.6.0",
)
go_repository(
name = "com_github_beorn7_perks",
build_file_proto_mode = "disable",
importpath = "github.com/beorn7/perks",
sum = "h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=",
version = "v1.0.1",
)
go_repository(
name = "com_github_bgentry_speakeasy",
build_file_proto_mode = "disable",
importpath = "github.com/bgentry/speakeasy",
sum = "h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_bitly_go_simplejson",
build_file_proto_mode = "disable",
importpath = "github.com/bitly/go-simplejson",
sum = "h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=",
version = "v0.5.0",
)
go_repository(
name = "com_github_bketelsen_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/bketelsen/crypt",
sum = "h1:+0HFd5KSZ/mm3JmhmrDukiId5iR6w4+BdFtfSy4yWIc=",
version = "v0.0.3-0.20200106085610-5cbc8cc4026c",
)
go_repository(
name = "com_github_blakesmith_ar",
build_file_proto_mode = "disable",
importpath = "github.com/blakesmith/ar",
sum = "h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4=",
version = "v0.0.0-20190502131153-809d4375e1fb",
)
go_repository(
name = "com_github_blang_semver",
build_file_proto_mode = "disable",
importpath = "github.com/blang/semver",
sum = "h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=",
version = "v3.5.1+incompatible",
)
go_repository(
name = "com_github_bmizerany_assert",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/assert",
sum = "h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=",
version = "v0.0.0-20160611221934-b7ed37b82869",
)
go_repository(
name = "com_github_bmizerany_perks",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/perks",
sum = "h1:AP/Y7sqYicnjGDfD5VcY4CIfh1hRXBUavxrvELjTiOE=",
version = "v0.0.0-20141205001514-d9a9656a3a4b",
)
go_repository(
name = "com_github_bombsimon_wsl_v2",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v2",
sum = "h1:/DdSteYCq4lPX+LqDg7mdoxm14UxzZPoDT0taYc3DTU=",
version = "v2.2.0",
)
go_repository(
name = "com_github_bombsimon_wsl_v3",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v3",
sum = "h1:E5SRssoBgtVFPcYWUOFJEcgaySgdtTNYzsSKDOY7ss8=",
version = "v3.1.0",
)
go_repository(
name = "com_github_bshuster_repo_logrus_logstash_hook",
build_file_proto_mode = "disable",
importpath = "github.com/bshuster-repo/logrus-logstash-hook",
sum = "h1:pgAtgj+A31JBVtEHu2uHuEx0n+2ukqUJnS2vVe5pQNA=",
version = "v0.4.1",
)
go_repository(
name = "com_github_bugsnag_bugsnag_go",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/bugsnag-go",
sum = "h1:rFt+Y/IK1aEZkEHchZRSq9OQbsSzIT/OrI8YFFmRIng=",
version = "v0.0.0-20141110184014-b1d153021fcd",
)
go_repository(
name = "com_github_bugsnag_osext",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/osext",
sum = "h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ=",
version = "v0.0.0-20130617224835-0dd3f918b21b",
)
go_repository(
name = "com_github_bugsnag_panicwrap",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/panicwrap",
sum = "h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o=",
version = "v0.0.0-20151223152923-e2c28503fcd0",
)
go_repository(
name = "com_github_burntsushi_toml",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/toml",
sum = "h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=",
version = "v0.3.1",
)
go_repository(
name = "com_github_burntsushi_xgb",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/xgb",
sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
version = "v0.0.0-20160522181843-27f122750802",
)
go_repository(
name = "com_github_bwmarrin_snowflake",
build_file_proto_mode = "disable",
importpath = "github.com/bwmarrin/snowflake",
sum = "h1:dRbqXFjM10uA3wdrVZ8Kh19uhciRMOroUYJ7qAqDLhY=",
version = "v0.0.0",
)
go_repository(
name = "com_github_caarlos0_ctrlc",
build_file_proto_mode = "disable",
importpath = "github.com/caarlos0/ctrlc",
sum = "h1:2DtF8GSIcajgffDFJzyG15vO+1PuBWOMUdFut7NnXhw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_campoy_unique",
build_file_proto_mode = "disable",
importpath = "github.com/campoy/unique",
sum = "h1:V9a67dfYqPLAvzk5hMQOXYJlZ4SLIXgyKIE+ZiHzgGQ=",
version = "v0.0.0-20180121183637-88950e537e7e",
)
go_repository(
name = "com_github_cavaliercoder_go_cpio",
build_file_proto_mode = "disable",
importpath = "github.com/cavaliercoder/go-cpio",
sum = "h1:hHg27A0RSSp2Om9lubZpiMgVbvn39bsUmW9U5h0twqc=",
version = "v0.0.0-20180626203310-925f9528c45e",
)
go_repository(
name = "com_github_census_instrumentation_opencensus_proto",
build_file_proto_mode = "disable",
importpath = "github.com/census-instrumentation/opencensus-proto",
sum = "h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk=",
version = "v0.2.1",
)
go_repository(
name = "com_github_cespare_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash",
sum = "h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=",
version = "v1.1.0",
)
go_repository(
name = "com_github_cespare_xxhash_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash/v2",
sum = "h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=",
version = "v2.1.1",
)
go_repository(
name = "com_github_chai2010_gettext_go",
build_file_proto_mode = "disable",
importpath = "github.com/chai2010/gettext-go",
sum = "h1:7aWHqerlJ41y6FOsEUvknqgXnGmJyJSbjhAWq5pO4F8=",
version = "v0.0.0-20160711120539-c6fed771bfd5",
)
go_repository(
name = "com_github_chzyer_logex",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/logex",
sum = "h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=",
version = "v1.1.10",
)
go_repository(
name = "com_github_chzyer_readline",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/readline",
sum = "h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=",
version = "v0.0.0-20180603132655-2972be24d48e",
)
go_repository(
name = "com_github_chzyer_test",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/test",
sum = "h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=",
version = "v0.0.0-20180213035817-a1ea475d72b1",
)
go_repository(
name = "com_github_cihub_seelog",
build_file_proto_mode = "disable",
importpath = "github.com/cihub/seelog",
sum = "h1:kHaBemcxl8o/pQ5VM1c8PVE1PubbNx3mjUr09OqWGCs=",
version = "v0.0.0-20170130134532-f561c5e57575",
)
go_repository(
name = "com_github_clarketm_json",
build_file_proto_mode = "disable",
importpath = "github.com/clarketm/json",
sum = "h1:0JketcMdLC16WGnRGJiNmTXuQznDEQaiknxSPRBxg+k=",
version = "v1.13.4",
)
go_repository(
name = "com_github_client9_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/client9/misspell",
sum = "h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=",
version = "v0.3.4",
)
go_repository(
name = "com_github_cloudevents_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go",
sum = "h1:gS5I0s2qPmdc4GBPlUmzZU7RH30BaiOdcRJ1RkXnPrc=",
version = "v1.0.0",
)
go_repository(
name = "com_github_cloudevents_sdk_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go/v2",
sum = "h1:AUdGJwaSUnA+VvepKqgjy6XDkPcf0hf/3L7icEs1ibs=",
version = "v2.0.0",
)
go_repository(
name = "com_github_cloudflare_cloudflare_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudflare/cloudflare-go",
sum = "h1:bhMGoNhAg21DuqJjU9jQepRRft6vYfo6pejT3NN4V6A=",
version = "v0.13.2",
)
go_repository(
name = "com_github_cncf_udpa_go",
build_file_proto_mode = "disable",
importpath = "github.com/cncf/udpa/go",
sum = "h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU=",
version = "v0.0.0-20191209042840-269d4d468f6f",
)
go_repository(
name = "com_github_cockroachdb_datadriven",
build_file_proto_mode = "disable",
importpath = "github.com/cockroachdb/datadriven",
sum = "h1:OaNxuTZr7kxeODyLWsRMC+OD03aFUH+mW6r2d+MWa5Y=",
version = "v0.0.0-20190809214429-80d97fb3cbaa",
)
go_repository(
name = "com_github_containerd_cgroups",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/cgroups",
sum = "h1:tSNMc+rJDfmYntojat8lljbt1mgKNpTxUZJsSzJ9Y1s=",
version = "v0.0.0-20190919134610-bf292b21730f",
)
go_repository(
name = "com_github_containerd_console",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/console",
sum = "h1:uict5mhHFTzKLUCufdSLym7z/J0CbBJT59lYbP9wtbg=",
version = "v0.0.0-20180822173158-c12b1e7919c1",
)
go_repository(
name = "com_github_containerd_containerd",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/containerd",
sum = "h1:LoIzb5y9x5l8VKAlyrbusNPXqBY0+kviRloxFUMFwKc=",
version = "v1.3.3",
)
go_repository(
name = "com_github_containerd_continuity",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/continuity",
sum = "h1:kIFnQBO7rQ0XkMe6xEwbybYHBEaWmh/f++laI6Emt7M=",
version = "v0.0.0-20200107194136-26c1120b8d41",
)
go_repository(
name = "com_github_containerd_fifo",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/fifo",
sum = "h1:PUD50EuOMkXVcpBIA/R95d56duJR9VxhwncsFbNnxW4=",
version = "v0.0.0-20190226154929-a9fb20d87448",
)
go_repository(
name = "com_github_containerd_go_runc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/go-runc",
sum = "h1:esQOJREg8nw8aXj6uCN5dfW5cKUBiEJ/+nni1Q/D/sw=",
version = "v0.0.0-20180907222934-5a6d9f37cfa3",
)
go_repository(
name = "com_github_containerd_ttrpc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/ttrpc",
sum = "h1:dlfGmNcE3jDAecLqwKPMNX6nk2qh1c1Vg1/YTzpOOF4=",
version = "v0.0.0-20190828154514-0e0f228740de",
)
go_repository(
name = "com_github_containerd_typeurl",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/typeurl",
sum = "h1:JNn81o/xG+8NEo3bC/vx9pbi/g2WI8mtP2/nXzu297Y=",
version = "v0.0.0-20180627222232-a93fcdb778cd",
)
go_repository(
name = "com_github_coreos_bbolt",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/bbolt",
sum = "h1:n6AiVyVRKQFNb6mJlwESEvvLoDyiTzXX7ORAUlkeBdY=",
version = "v1.3.3",
)
go_repository(
name = "com_github_coreos_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/etcd",
sum = "h1:f/Z3EoDSx1yjaIjLQGo1diYUlQYSBrrAQ5vP8NjwXwo=",
version = "v3.3.17+incompatible",
)
go_repository(
name = "com_github_coreos_go_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-etcd",
sum = "h1:bXhRBIXoTm9BYHS3gE0TtQuyNZyeEMux2sDi4oo5YOo=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_oidc",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-oidc",
sum = "h1:sdJrfw8akMnCuUlaZU3tE/uYXFgfqom8DBE9so9EBsM=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_semver",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-semver",
sum = "h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=",
version = "v0.3.0",
)
go_repository(
name = "com_github_coreos_go_systemd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-systemd",
sum = "h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=",
version = "v0.0.0-20190321100706-95778dfbb74e",
)
go_repository(
name = "com_github_coreos_pkg",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/pkg",
sum = "h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=",
version = "v0.0.0-20180928190104-399ea9e2e55f",
)
go_repository(
name = "com_github_cpu_goacmedns",
build_file_proto_mode = "disable",
importpath = "github.com/cpu/goacmedns",
sum = "h1:QOeMpIEsIdm1LSASSswjaTf8CXmzcrgy5OeCfHjppA4=",
version = "v0.0.3",
)
go_repository(
name = "com_github_cpuguy83_go_md2man",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man",
sum = "h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=",
version = "v1.0.10",
)
go_repository(
name = "com_github_cpuguy83_go_md2man_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man/v2",
sum = "h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=",
version = "v2.0.0",
)
go_repository(
name = "com_github_creack_pty",
build_file_proto_mode = "disable",
importpath = "github.com/creack/pty",
sum = "h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=",
version = "v1.1.11",
)
go_repository(
name = "com_github_cyphar_filepath_securejoin",
build_file_proto_mode = "disable",
importpath = "github.com/cyphar/filepath-securejoin",
sum = "h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=",
version = "v0.2.2",
)
go_repository(
name = "com_github_datadog_zstd",
build_file_proto_mode = "disable",
importpath = "github.com/DataDog/zstd",
sum = "h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM=",
version = "v1.4.1",
)
go_repository(
name = "com_github_davecgh_go_spew",
build_file_proto_mode = "disable",
importpath = "github.com/davecgh/go-spew",
sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
version = "v1.1.1",
)
go_repository(
name = "com_github_daviddengcn_go_colortext",
build_file_proto_mode = "disable",
importpath = "github.com/daviddengcn/go-colortext",
sum = "h1:uVsMphB1eRx7xB1njzL3fuMdWRN8HtVzoUOItHMwv5c=",
version = "v0.0.0-20160507010035-511bcaf42ccd",
)
go_repository(
name = "com_github_deislabs_oras",
build_file_proto_mode = "disable",
importpath = "github.com/deislabs/oras",
sum = "h1:If674KraJVpujYR00rzdi0QAmW4BxzMJPVAZJKuhQ0c=",
version = "v0.8.1",
)
go_repository(
name = "com_github_denisenkom_go_mssqldb",
build_file_proto_mode = "disable",
importpath = "github.com/denisenkom/go-mssqldb",
sum = "h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=",
version = "v0.0.0-20191124224453-732737034ffd",
)
go_repository(
name = "com_github_denverdino_aliyungo",
build_file_proto_mode = "disable",
importpath = "github.com/denverdino/aliyungo",
sum = "h1:p6poVbjHDkKa+wtC8frBMwQtT3BmqGYBjzMwJ63tuR4=",
version = "v0.0.0-20190125010748-a747050bb1ba",
)
go_repository(
name = "com_github_devigned_tab",
build_file_proto_mode = "disable",
importpath = "github.com/devigned/tab",
sum = "h1:3mD6Kb1mUOYeLpJvTVSDwSg5ZsfSxfvxGRTxRsJsITA=",
version = "v0.1.1",
)
go_repository(
name = "com_github_dgrijalva_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go",
sum = "h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=",
version = "v3.2.0+incompatible",
)
go_repository(
name = "com_github_dgrijalva_jwt_go_v4",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go/v4",
sum = "h1:CaO/zOnF8VvUfEbhRatPcwKVWamvbYd8tQGRWacE9kU=",
version = "v4.0.0-preview1",
)
go_repository(
name = "com_github_dgryski_go_gk",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-gk",
sum = "h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q=",
version = "v0.0.0-20200319235926-a69029f61654",
)
go_repository(
name = "com_github_dgryski_go_sip13",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-sip13",
sum = "h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=",
version = "v0.0.0-20181026042036-e10d5fee7954",
)
go_repository(
name = "com_github_digitalocean_godo",
build_file_proto_mode = "disable",
importpath = "github.com/digitalocean/godo",
sum = "h1:IMElzMUpO1dVR8qjSg53+5vDkOLzMbhJt4yTAq7NGCQ=",
version = "v1.44.0",
)
go_repository(
name = "com_github_dimchansky_utfbom",
build_file_proto_mode = "disable",
importpath = "github.com/dimchansky/utfbom",
sum = "h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=",
version = "v1.1.0",
)
go_repository(
name = "com_github_djarvur_go_err113",
build_file_proto_mode = "disable",
importpath = "github.com/Djarvur/go-err113",
sum = "h1:uCRZZOdMQ0TZPHYTdYpoC0bLYJKPEHPUJ8MeAa51lNU=",
version = "v0.1.0",
)
go_repository(
name = "com_github_djherbis_atime",
build_file_proto_mode = "disable",
importpath = "github.com/djherbis/atime",
sum = "h1:ySLvBAM0EvOGaX7TI4dAM5lWj+RdJUCKtGSEHN8SGBg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_dnaeon_go_vcr",
build_file_proto_mode = "disable",
importpath = "github.com/dnaeon/go-vcr",
sum = "h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY=",
version = "v1.0.1",
)
go_repository(
name = "com_github_docker_cli",
build_file_proto_mode = "disable",
importpath = "github.com/docker/cli",
sum = "h1:AbI1uj9w4yt6TvfKHfRu7G55KuQe7NCvWPQRKDoXggE=",
version = "v0.0.0-20200210162036-a4bedce16568",
)
go_repository(
name = "com_github_docker_distribution",
build_file_proto_mode = "disable",
importpath = "github.com/docker/distribution",
sum = "h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=",
version = "v2.7.1+incompatible",
)
go_repository(
name = "com_github_docker_docker",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker",
sum = "h1:IkZjBSIc8hBjLpqeAbeE5mca5mNgeatLHBy3GO78BWo=",
version = "v1.13.1",
)
go_repository(
name = "com_github_docker_docker_credential_helpers",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker-credential-helpers",
sum = "h1:zI2p9+1NQYdnG6sMU26EX4aVGlqbInSQxQXLvzJ4RPQ=",
version = "v0.6.3",
)
go_repository(
name = "com_github_docker_go_connections",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-connections",
sum = "h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-metrics",
sum = "h1:yWHOI+vFjEsAakUTSrtqc/SAHrhSkmn48pqjidZX3QA=",
version = "v0.0.0-20180209012529-399ea8c73916",
)
go_repository(
name = "com_github_docker_go_units",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-units",
sum = "h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_libtrust",
build_file_proto_mode = "disable",
importpath = "github.com/docker/libtrust",
sum = "h1:ZClxb8laGDf5arXfYcAtECDFgAgHklGI8CxgjHnXKJ4=",
version = "v0.0.0-20150114040149-fa567046d9b1",
)
go_repository(
name = "com_github_docker_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/docker/spdystream",
sum = "h1:cenwrSVm+Z7QLSV/BsnenAOcDXdX4cMv4wP0B/5QbPg=",
version = "v0.0.0-20160310174837-449fdfce4d96",
)
go_repository(
name = "com_github_docopt_docopt_go",
build_file_proto_mode = "disable",
importpath = "github.com/docopt/docopt-go",
sum = "h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=",
version = "v0.0.0-20180111231733-ee0de3bc6815",
)
go_repository(
name = "com_github_dsnet_compress",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/compress",
sum = "h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=",
version = "v0.0.1",
)
go_repository(
name = "com_github_dsnet_golib",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/golib",
sum = "h1:tFh1tRc4CA31yP6qDcu+Trax5wW5GuMxvkIba07qVLY=",
version = "v0.0.0-20171103203638-1ea166775780",
)
go_repository(
name = "com_github_dustin_go_humanize",
build_file_proto_mode = "disable",
importpath = "github.com/dustin/go-humanize",
sum = "h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_eapache_go_resiliency",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-resiliency",
sum = "h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q=",
version = "v1.2.0",
)
go_repository(
name = "com_github_eapache_go_xerial_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-xerial-snappy",
sum = "h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=",
version = "v0.0.0-20180814174437-776d5712da21",
)
go_repository(
name = "com_github_eapache_queue",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/queue",
sum = "h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=",
version = "v1.1.0",
)
go_repository(
name = "com_github_elazarl_goproxy",
build_file_proto_mode = "disable",
importpath = "github.com/elazarl/goproxy",
sum = "h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=",
version = "v0.0.0-20180725130230-947c36da3153",
)
go_repository(
name = "com_github_emicklei_go_restful",
build_file_proto_mode = "disable",
importpath = "github.com/emicklei/go-restful",
sum = "h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk=",
version = "v2.9.5+incompatible",
)
go_repository(
name = "com_github_emirpasic_gods",
build_file_proto_mode = "disable",
importpath = "github.com/emirpasic/gods",
sum = "h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=",
version = "v1.12.0",
)
go_repository(
name = "com_github_envoyproxy_go_control_plane",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/go-control-plane",
sum = "h1:rEvIZUSZ3fx39WIi3JkQqQBitGwpELBIYWeBVh6wn+E=",
version = "v0.9.4",
)
go_repository(
name = "com_github_envoyproxy_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/protoc-gen-validate",
sum = "h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A=",
version = "v0.1.0",
)
go_repository(
name = "com_github_erikstmartin_go_testdb",
build_file_proto_mode = "disable",
importpath = "github.com/erikstmartin/go-testdb",
sum = "h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=",
version = "v0.0.0-20160219214506-8d10e4a1bae5",
)
go_repository(
name = "com_github_evanphx_json_patch",
build_file_proto_mode = "disable",
importpath = "github.com/evanphx/json-patch",
sum = "h1:glyUF9yIYtMHzn8xaKw5rMhdWcwsYV8dZHIq5567/xs=",
version = "v4.11.0+incompatible",
)
go_repository(
name = "com_github_exponent_io_jsonpath",
build_file_proto_mode = "disable",
importpath = "github.com/exponent-io/jsonpath",
sum = "h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM=",
version = "v0.0.0-20151013193312-d6023ce2651d",
)
go_repository(
name = "com_github_fatih_camelcase",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/camelcase",
sum = "h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_fatih_color",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/color",
sum = "h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=",
version = "v1.9.0",
)
go_repository(
name = "com_github_fatih_structs",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/structs",
sum = "h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=",
version = "v1.1.0",
)
go_repository(
name = "com_github_felixge_fgprof",
build_file_proto_mode = "disable",
importpath = "github.com/felixge/fgprof",
sum = "h1:E6FUJ2Mlv043ipLOCFqo8+cHo9MhQ203E2cdEK/isEs=",
version = "v0.9.1",
)
go_repository(
name = "com_github_flynn_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/flynn/go-shlex",
sum = "h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=",
version = "v0.0.0-20150515145356-3f9db97f8568",
)
go_repository(
name = "com_github_form3tech_oss_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/form3tech-oss/jwt-go",
sum = "h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk=",
version = "v3.2.2+incompatible",
)
go_repository(
name = "com_github_fortytw2_leaktest",
build_file_proto_mode = "disable",
importpath = "github.com/fortytw2/leaktest",
sum = "h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_frankban_quicktest",
build_file_proto_mode = "disable",
importpath = "github.com/frankban/quicktest",
sum = "h1:PvpJR0Uq8SdX+zagCMsarBMlhz6ysGTf1+pRmCsRXqY=",
version = "v1.8.1",
)
go_repository(
name = "com_github_fsnotify_fsnotify",
build_file_proto_mode = "disable",
importpath = "github.com/fsnotify/fsnotify",
sum = "h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=",
version = "v1.4.9",
)
go_repository(
name = "com_github_fsouza_fake_gcs_server",
build_file_proto_mode = "disable",
importpath = "github.com/fsouza/fake-gcs-server",
sum = "h1:3bRRh/rQnB2XbrMolHAj9oX/PFiWVQFVVfPR5y2pxb8=",
version = "v1.19.4",
)
go_repository(
name = "com_github_fvbommel_sortorder",
build_file_proto_mode = "disable",
importpath = "github.com/fvbommel/sortorder",
sum = "h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_garyburd_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/garyburd/redigo",
sum = "h1:LofdAjjjqCSXMwLGgOgnE+rdPuvX9DxCqaHwKy7i/ko=",
version = "v0.0.0-20150301180006-535138d7bcd7",
)
go_repository(
name = "com_github_ghodss_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/ghodss/yaml",
sum = "h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_gliderlabs_ssh",
build_file_proto_mode = "disable",
importpath = "github.com/gliderlabs/ssh",
sum = "h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=",
version = "v0.2.2",
)
go_repository(
name = "com_github_globalsign_mgo",
build_file_proto_mode = "disable",
importpath = "github.com/globalsign/mgo",
sum = "h1:DujepqpGd1hyOd7aW59XpK7Qymp8iy83xq74fLr21is=",
version = "v0.0.0-20181015135952-eeefdecb41b8",
)
go_repository(
name = "com_github_go_bindata_go_bindata_v3",
build_file_proto_mode = "disable",
importpath = "github.com/go-bindata/go-bindata/v3",
sum = "h1:F0nVttLC3ws0ojc7p60veTurcOm//D4QBODNM7EGrCI=",
version = "v3.1.3",
)
go_repository(
name = "com_github_go_critic_go_critic",
build_file_proto_mode = "disable",
importpath = "github.com/go-critic/go-critic",
sum = "h1:sGEEdiuvLV0OC7/yC6MnK3K6LCPBplspK45B0XVdFAc=",
version = "v0.4.3",
)
go_repository(
name = "com_github_go_git_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/gcfg",
sum = "h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_git_go_billy_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-billy/v5",
sum = "h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk=",
version = "v5.1.0",
)
go_repository(
name = "com_github_go_git_go_git_fixtures_v4",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git-fixtures/v4",
sum = "h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=",
version = "v4.0.2-0.20200613231340-f56387b50c12",
)
go_repository(
name = "com_github_go_git_go_git_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git/v5",
sum = "h1:8WKMtJR2j8RntEXR/uvTKagfEt4GYlwQ7mntE4+0GWc=",
version = "v5.3.0",
)
go_repository(
name = "com_github_go_gl_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw",
sum = "h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0=",
version = "v0.0.0-20190409004039-e6da0acd62b1",
)
go_repository(
name = "com_github_go_gl_glfw_v3_3_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw/v3.3/glfw",
sum = "h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I=",
version = "v0.0.0-20200222043503-6f7a984d4dc4",
)
go_repository(
name = "com_github_go_ini_ini",
build_file_proto_mode = "disable",
importpath = "github.com/go-ini/ini",
sum = "h1:0wVcG9udk2C3TGgmdIGKK9ScOZHZB5nbG+gwji9fhhc=",
version = "v1.55.0",
)
go_repository(
name = "com_github_go_kit_kit",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/kit",
sum = "h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk=",
version = "v0.9.0",
)
go_repository(
name = "com_github_go_kit_log",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/log",
sum = "h1:DGJh0Sm43HbOeYDNnVZFl8BvcYVvjD5bqYJvp0REbwQ=",
version = "v0.1.0",
)
go_repository(
name = "com_github_go_ldap_ldap",
build_file_proto_mode = "disable",
importpath = "github.com/go-ldap/ldap",
sum = "h1:kD5HQcAzlQ7yrhfn+h+MSABeAy/jAJhvIJ/QDllP44g=",
version = "v3.0.2+incompatible",
)
go_repository(
name = "com_github_go_lintpack_lintpack",
build_file_proto_mode = "disable",
importpath = "github.com/go-lintpack/lintpack",
sum = "h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0=",
version = "v0.5.2",
)
go_repository(
name = "com_github_go_logfmt_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-logfmt/logfmt",
sum = "h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4=",
version = "v0.5.0",
)
go_repository(
name = "com_github_go_logr_logr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/logr",
sum = "h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_logr_zapr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/zapr",
sum = "h1:uc1uML3hRYL9/ZZPdgHS/n8Nzo+eaYL/Efxkkamf7OM=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_ole_go_ole",
build_file_proto_mode = "disable",
importpath = "github.com/go-ole/go-ole",
sum = "h1:2lOsA72HgjxAuMlKpFiCbHTvu44PIVkZ5hqm3RSdI/E=",
version = "v1.2.1",
)
go_repository(
name = "com_github_go_openapi_analysis",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/analysis",
sum = "h1:8b2ZgKfKIUTVQpTb77MoRDIMEIwvDVw40o3aOXdfYzI=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_openapi_errors",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/errors",
sum = "h1:a2kIyV3w+OS3S97zxUndRVD46+FhGOUBDFY7nmu4CsY=",
version = "v0.19.2",
)
go_repository(
name = "com_github_go_openapi_jsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonpointer",
sum = "h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_jsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonreference",
sum = "h1:5cxNfTy0UVC3X8JL5ymxzyoUZmo8iZb+jeTWn7tUa8o=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_loads",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/loads",
sum = "h1:5I4CCSqoWzT+82bBkNIvmLc0UOsoKKQ4Fz+3VxOB7SY=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_runtime",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/runtime",
sum = "h1:csnOgcgAiuGoM/Po7PEpKDoNulCcF3FGbSnbHfxgjMI=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_spec",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/spec",
sum = "h1:rMMMj8cV38KVXK7SFc+I2MWClbEfbK705+j+dyqun5g=",
version = "v0.19.6",
)
go_repository(
name = "com_github_go_openapi_strfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/strfmt",
sum = "h1:eRfyY5SkaNJCAwmmMcADjY31ow9+N7MCLW7oRkbsINA=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_swag",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/swag",
sum = "h1:VRuXN2EnMSsZdauzdss6JBC29YotDqG59BZ+tdlIL1s=",
version = "v0.19.7",
)
go_repository(
name = "com_github_go_openapi_validate",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/validate",
sum = "h1:QhCBKRYqZR+SKo4gl1lPhPahope8/RLt6EVgY8X80w0=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_sql_driver_mysql",
build_file_proto_mode = "disable",
importpath = "github.com/go-sql-driver/mysql",
sum = "h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_stack_stack",
build_file_proto_mode = "disable",
importpath = "github.com/go-stack/stack",
sum = "h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=",
version = "v1.8.0",
)
go_repository(
name = "com_github_go_task_slim_sprig",
build_file_proto_mode = "disable",
importpath = "github.com/go-task/slim-sprig",
sum = "h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=",
version = "v0.0.0-20210107165309-348f09dbbbc0",
)
go_repository(
name = "com_github_go_test_deep",
build_file_proto_mode = "disable",
importpath = "github.com/go-test/deep",
sum = "h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M=",
version = "v1.0.7",
)
go_repository(
name = "com_github_go_toolsmith_astcast",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcast",
sum = "h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astcopy",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcopy",
sum = "h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astequal",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astequal",
sum = "h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astfmt",
sum = "h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astinfo",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astinfo",
sum = "h1:wP6mXeB2V/d1P1K7bZ5vDUO3YqEzcvOREOxZPEu3gVI=",
version = "v0.0.0-20180906194353-9809ff7efb21",
)
go_repository(
name = "com_github_go_toolsmith_astp",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astp",
sum = "h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_pkgload",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/pkgload",
sum = "h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_strparse",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/strparse",
sum = "h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_typep",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/typep",
sum = "h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk=",
version = "v1.0.2",
)
go_repository(
name = "com_github_go_xmlfmt_xmlfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-xmlfmt/xmlfmt",
sum = "h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo=",
version = "v0.0.0-20191208150333-d5b6f63a941b",
)
go_repository(
name = "com_github_go_yaml_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/go-yaml/yaml",
sum = "h1:RYi2hDdss1u4YE7GwixGzWwVo47T8UQwnTLB6vQiq+o=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_gobuffalo_envy",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/envy",
sum = "h1:OQl5ys5MBea7OGCdvPbBJWRgnhC/fGona6QKfvFeau8=",
version = "v1.7.1",
)
go_repository(
name = "com_github_gobuffalo_flect",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/flect",
sum = "h1:EWCvMGGxOjsgwlWaP+f4+Hh6yrrte7JeFL2S6b+0hdM=",
version = "v0.2.0",
)
go_repository(
name = "com_github_gobwas_glob",
build_file_proto_mode = "disable",
importpath = "github.com/gobwas/glob",
sum = "h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=",
version = "v0.2.3",
)
go_repository(
name = "com_github_godbus_dbus",
build_file_proto_mode = "disable",
importpath = "github.com/godbus/dbus",
sum = "h1:BWhy2j3IXJhjCbC68FptL43tDKIq8FladmaTs3Xs7Z8=",
version = "v0.0.0-20190422162347-ade71ed3457e",
)
go_repository(
name = "com_github_gofrs_flock",
build_file_proto_mode = "disable",
importpath = "github.com/gofrs/flock",
sum = "h1:DP+LD/t0njgoPBvT5MJLeliUIVQR03hiKR6vezdwHlc=",
version = "v0.7.1",
)
go_repository(
name = "com_github_gogo_googleapis",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/googleapis",
sum = "h1:kFkMAZBNAn4j7K0GiZr8cRYzejq68VbheufiV3YuyFI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_gogo_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/protobuf",
sum = "h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=",
version = "v1.3.2",
)
go_repository(
name = "com_github_golang_gddo",
build_file_proto_mode = "disable",
importpath = "github.com/golang/gddo",
sum = "h1:KRMr9A3qfbVM7iV/WcLY/rL5LICqwMHLhwRXKu99fXw=",
version = "v0.0.0-20190419222130-af0f2af80721",
)
go_repository(
name = "com_github_golang_glog",
build_file_proto_mode = "disable",
importpath = "github.com/golang/glog",
sum = "h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=",
version = "v0.0.0-20160126235308-23def4e6c14b",
)
go_repository(
name = "com_github_golang_groupcache",
build_file_proto_mode = "disable",
importpath = "github.com/golang/groupcache",
sum = "h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=",
version = "v0.0.0-20200121045136-8c9f03a8e57e",
)
go_repository(
name = "com_github_golang_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golang/lint",
sum = "h1:2hRPrmiwPrp3fQX967rNJIhQPtiGXdlQWAxKbKw3VHA=",
version = "v0.0.0-20180702182130-06c8688daad7",
)
go_repository(
name = "com_github_golang_mock",
build_file_proto_mode = "disable",
importpath = "github.com/golang/mock",
sum = "h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g=",
version = "v1.5.0",
)
go_repository(
name = "com_github_golang_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/golang/protobuf",
sum = "h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=",
version = "v1.5.2",
)
go_repository(
name = "com_github_golang_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/golang/snappy",
sum = "h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=",
version = "v0.0.1",
)
go_repository(
name = "com_github_golang_sql_civil",
build_file_proto_mode = "disable",
importpath = "github.com/golang-sql/civil",
sum = "h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=",
version = "v0.0.0-20190719163853-cb61b32ac6fe",
)
go_repository(
name = "com_github_golangci_check",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/check",
sum = "h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0=",
version = "v0.0.0-20180506172741-cfe4005ccda2",
)
go_repository(
name = "com_github_golangci_dupl",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/dupl",
sum = "h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=",
version = "v0.0.0-20180902072040-3e9179ac440a",
)
go_repository(
name = "com_github_golangci_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/errcheck",
sum = "h1:YYWNAGTKWhKpcLLt7aSj/odlKrSrelQwlovBpDuf19w=",
version = "v0.0.0-20181223084120-ef45e06d44b6",
)
go_repository(
name = "com_github_golangci_go_misc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/go-misc",
sum = "h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw=",
version = "v0.0.0-20180628070357-927a3d87b613",
)
go_repository(
name = "com_github_golangci_goconst",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/goconst",
sum = "h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8=",
version = "v0.0.0-20180610141641-041c5f2b40f3",
)
go_repository(
name = "com_github_golangci_gocyclo",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gocyclo",
sum = "h1:pXTK/gkVNs7Zyy7WKgLXmpQ5bHTrq5GDsp8R9Qs67g0=",
version = "v0.0.0-20180528144436-0a533e8fa43d",
)
go_repository(
name = "com_github_golangci_gofmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gofmt",
sum = "h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks=",
version = "v0.0.0-20190930125516-244bba706f1a",
)
go_repository(
name = "com_github_golangci_golangci_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/golangci-lint",
sum = "h1:VYLx63qb+XJsHdZ27PMS2w5JZacN0XG8ffUwe7yQomo=",
version = "v1.27.0",
)
go_repository(
name = "com_github_golangci_ineffassign",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/ineffassign",
sum = "h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI=",
version = "v0.0.0-20190609212857-42439a7714cc",
)
go_repository(
name = "com_github_golangci_lint_1",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/lint-1",
sum = "h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA=",
version = "v0.0.0-20191013205115-297bf364a8e0",
)
go_repository(
name = "com_github_golangci_maligned",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/maligned",
sum = "h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=",
version = "v0.0.0-20180506175553-b1d89398deca",
)
go_repository(
name = "com_github_golangci_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/misspell",
sum = "h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo=",
version = "v0.3.5",
)
go_repository(
name = "com_github_golangci_prealloc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/prealloc",
sum = "h1:leSNB7iYzLYSSx3J/s5sVf4Drkc68W2wm4Ixh/mr0us=",
version = "v0.0.0-20180630174525-215b22d4de21",
)
go_repository(
name = "com_github_golangci_revgrep",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/revgrep",
sum = "h1:XQKc8IYQOeRwVs36tDrEmTgDgP88d5iEURwpmtiAlOM=",
version = "v0.0.0-20180812185044-276a5c0a1039",
)
go_repository(
name = "com_github_golangci_unconvert",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/unconvert",
sum = "h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=",
version = "v0.0.0-20180507085042-28b1c447d1f4",
)
go_repository(
name = "com_github_golangplus_bytes",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/bytes",
sum = "h1:7xqw01UYS+KCI25bMrPxwNYkSns2Db1ziQPpVq99FpE=",
version = "v0.0.0-20160111154220-45c989fe5450",
)
go_repository(
name = "com_github_golangplus_fmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/fmt",
sum = "h1:f5gsjBiF9tRRVomCvrkGMMWI8W1f2OBFar2c5oakAP0=",
version = "v0.0.0-20150411045040-2a5d6d7d2995",
)
go_repository(
name = "com_github_golangplus_testing",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/testing",
sum = "h1:KhcknUwkWHKZPbFy2P7jH5LKJ3La+0ZeknkkmrSgqb0=",
version = "v0.0.0-20180327235837-af21d9c3145e",
)
go_repository(
name = "com_github_gomodule_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/gomodule/redigo",
sum = "h1:nRAxCa+SVsyjSBrtZmG/cqb6VbTmuRzpg/PoTFlpumc=",
version = "v1.8.5",
)
go_repository(
name = "com_github_google_btree",
build_file_proto_mode = "disable",
importpath = "github.com/google/btree",
sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_go_cmp",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-cmp",
sum = "h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=",
version = "v0.5.5",
)
go_repository(
name = "com_github_google_go_containerregistry",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-containerregistry",
sum = "h1:AG8FSAfXglim2l5qSrqp5VK2Xl03PiBf25NiTGGamws=",
version = "v0.1.1",
)
go_repository(
name = "com_github_google_go_github",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github",
sum = "h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=",
version = "v17.0.0+incompatible",
)
go_repository(
name = "com_github_google_go_github_v27",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v27",
sum = "h1:oiOZuBmGHvrGM1X9uNUAUlLgp5r1UUO/M/KnbHnLRlQ=",
version = "v27.0.6",
)
go_repository(
name = "com_github_google_go_github_v28",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v28",
sum = "h1:kORf5ekX5qwXO2mGzXXOjMe/g6ap8ahVe0sBEulhSxo=",
version = "v28.1.1",
)
go_repository(
name = "com_github_google_go_github_v29",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v29",
sum = "h1:IktKCTwU//aFHnpA+2SLIi7Oo9uhAzgsdZNbcAqhgdc=",
version = "v29.0.3",
)
go_repository(
name = "com_github_google_go_github_v32",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v32",
sum = "h1:q74KVb22spUq0U5HqZ9VCYqQz8YRuOtL/39ZnfwO+NM=",
version = "v32.0.0",
)
go_repository(
name = "com_github_google_go_licenses",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-licenses",
sum = "h1:eVR9gT5gBPTHXeyGAcA8OF/SKNUFFg+a0BJqfx4z5eE=",
version = "v0.0.0-20200227160636-0fa8c766a591",
)
go_repository(
name = "com_github_google_go_querystring",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-querystring",
sum = "h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=",
version = "v1.1.0",
)
go_repository(
name = "com_github_google_go_replayers_grpcreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/grpcreplay",
sum = "h1:eNb1y9rZFmY4ax45uEEECSa8fsxGRU+8Bil52ASAwic=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_go_replayers_httpreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/httpreplay",
sum = "h1:AX7FUb4BjrrzNvblr/OlgwrmFiep6soj5K2QSDW7BGk=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_gofuzz",
build_file_proto_mode = "disable_global",
importpath = "github.com/google/gofuzz",
sum = "h1:VcIYpAGBae3Z6BVncE0OnTE/ZjlDXqtYhOZky88neLM=",
version = "v1.2.1-0.20210504230335-f78f29fc09ea",
)
go_repository(
name = "com_github_google_licenseclassifier",
build_file_proto_mode = "disable",
importpath = "github.com/google/licenseclassifier",
sum = "h1:OggOMmdI0JLwg1FkOKH9S7fVHF0oEm8PX6S8kAdpOps=",
version = "v0.0.0-20200402202327-879cb1424de0",
)
go_repository(
name = "com_github_google_mako",
build_file_proto_mode = "disable",
importpath = "github.com/google/mako",
sum = "h1:/o5e44nTD/QEEiWPGSFT3bSqcq3Qg7q27N9bv4gKh5M=",
version = "v0.0.0-20190821191249-122f8dcef9e3",
)
go_repository(
name = "com_github_google_martian",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian",
sum = "h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=",
version = "v2.1.1-0.20190517191504-25dcb96d9e51+incompatible",
)
go_repository(
name = "com_github_google_martian_v3",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian/v3",
sum = "h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs=",
version = "v3.0.0",
)
go_repository(
name = "com_github_google_pprof",
build_file_proto_mode = "disable",
importpath = "github.com/google/pprof",
sum = "h1:k+KkMRk8mGOu1xG38StS7dQ+Z6oW1i9n3dgrAVU9Q/E=",
version = "v0.0.0-20200905233945-acf8798be1f7",
)
go_repository(
name = "com_github_google_renameio",
build_file_proto_mode = "disable",
importpath = "github.com/google/renameio",
sum = "h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_rpmpack",
build_file_proto_mode = "disable",
importpath = "github.com/google/rpmpack",
sum = "h1:BW6OvS3kpT5UEPbCZ+KyX/OB4Ks9/MNMhWjqPPkZxsE=",
version = "v0.0.0-20191226140753-aa36bfddb3a0",
)
go_repository(
name = "com_github_google_subcommands",
build_file_proto_mode = "disable",
importpath = "github.com/google/subcommands",
sum = "h1:/eqq+otEXm5vhfBrbREPCSVQbvofip6kIz+mX5TUH7k=",
version = "v1.0.1",
)
go_repository(
name = "com_github_google_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/google/uuid",
sum = "h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_google_wire",
build_file_proto_mode = "disable",
importpath = "github.com/google/wire",
sum = "h1:kXcsA/rIGzJImVqPdhfnr6q0xsS9gU0515q1EPpJ9fE=",
version = "v0.4.0",
)
go_repository(
name = "com_github_googleapis_gax_go",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go",
sum = "h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=",
version = "v2.0.2+incompatible",
)
go_repository(
name = "com_github_googleapis_gax_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go/v2",
sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=",
version = "v2.0.5",
)
go_repository(
name = "com_github_googleapis_gnostic",
build_file_generation = "on",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gnostic",
replace = "github.com/googleapis/gnostic",
sum = "h1:DLJCy1n/vrD4HPjOvYcT8aYQXpPIzoRZONaYwyycI+I=",
version = "v0.4.1",
)
go_repository(
name = "com_github_googlecloudplatform_cloud_builders_gcs_fetcher",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloud-builders/gcs-fetcher",
sum = "h1:Pjo3SOZigEnIGevhFqcbFndnqyCH8WimcREd3hRM9vU=",
version = "v0.0.0-20191203181535-308b93ad1f39",
)
go_repository(
name = "com_github_googlecloudplatform_cloudsql_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloudsql-proxy",
sum = "h1:sTOp2Ajiew5XIH92YSdwhYc+bgpUX5j5TKK/Ac8Saw8=",
version = "v0.0.0-20191009163259-e802c2cb94ae",
)
go_repository(
name = "com_github_googlecloudplatform_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/k8s-cloud-provider",
sum = "h1:N7lSsF+R7wSulUADi36SInSQA3RvfO/XclHQfedr0qk=",
version = "v0.0.0-20190822182118-27a4ced34534",
)
go_repository(
name = "com_github_googlecloudplatform_testgrid",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/testgrid",
sum = "h1:qs3/BQpz3j3qsgnfjV8aVBfPopkGxp/TnWjjiboUVf8=",
version = "v0.0.68",
)
go_repository(
name = "com_github_gookit_color",
build_file_proto_mode = "disable",
importpath = "github.com/gookit/color",
sum = "h1:xOYBan3Fwlrqj1M1UN2TlHOCRiek3bGzWf/vPnJ1roE=",
version = "v1.2.4",
)
go_repository(
name = "com_github_gophercloud_gophercloud",
build_file_proto_mode = "disable",
importpath = "github.com/gophercloud/gophercloud",
sum = "h1:P/nh25+rzXouhytV2pUHBb65fnds26Ghl8/391+sT5o=",
version = "v0.1.0",
)
go_repository(
name = "com_github_gopherjs_gopherjs",
build_file_proto_mode = "disable",
importpath = "github.com/gopherjs/gopherjs",
sum = "h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=",
version = "v0.0.0-20181017120253-0766667cb4d1",
)
go_repository(
name = "com_github_goreleaser_goreleaser",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/goreleaser",
sum = "h1:Z+7XPrfGK11s/Sp+a06sx2FzGuCjTBdxN2ubpGvQbjY=",
version = "v0.136.0",
)
go_repository(
name = "com_github_goreleaser_nfpm",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/nfpm",
sum = "h1:BPwIomC+e+yuDX9poJowzV7JFVcYA0+LwGSkbAPs2Hw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_gorilla_context",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/context",
sum = "h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_csrf",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/csrf",
sum = "h1:QqQ/OWwuFp4jMKgBFAzJVW3FMULdyUW7JoM4pEWuqKg=",
version = "v1.6.2",
)
go_repository(
name = "com_github_gorilla_handlers",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/handlers",
sum = "h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gorilla_mux",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/mux",
sum = "h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=",
version = "v1.8.0",
)
go_repository(
name = "com_github_gorilla_securecookie",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/securecookie",
sum = "h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_sessions",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/sessions",
sum = "h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=",
version = "v1.2.0",
)
go_repository(
name = "com_github_gorilla_websocket",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/websocket",
sum = "h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gostaticanalysis_analysisutil",
build_file_proto_mode = "disable",
importpath = "github.com/gostaticanalysis/analysisutil",
sum = "h1:iwp+5/UAyzQSFgQ4uR2sni99sJ8Eo9DEacKWM5pekIg=",
version = "v0.0.3",
)
go_repository(
name = "com_github_gosuri_uitable",
build_file_proto_mode = "disable",
importpath = "github.com/gosuri/uitable",
sum = "h1:IG2xLKRvErL3uhY6e1BylFzG+aJiwQviDDTfOKeKTpY=",
version = "v0.0.4",
)
go_repository(
name = "com_github_gotestyourself_gotestyourself",
build_file_proto_mode = "disable",
importpath = "github.com/gotestyourself/gotestyourself",
sum = "h1:AQwinXlbQR2HvPjQZOmDhRqsv5mZf+Jb1RnSLxcqZcI=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "com_github_gregjones_httpcache",
build_file_proto_mode = "disable",
importpath = "github.com/gregjones/httpcache",
sum = "h1:f8eY6cV/x1x+HLjOp4r72s/31/V2aTUtg5oKRRPf8/Q=",
version = "v0.0.0-20190212212710-3befbb6ad0cc",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_middleware",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-middleware",
sum = "h1:z53tR0945TRRQO/fLEVPI6SMv7ZflF0TEaTAoU7tOzg=",
version = "v1.0.1-0.20190118093823-f849b5445de4",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_prometheus",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-prometheus",
sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=",
version = "v1.2.0",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_gateway",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/grpc-gateway",
sum = "h1:D0EVSTwQoQOyfY35QNSuPJA4jpZRtkoGYWQMB7XNg5o=",
version = "v1.12.2",
)
go_repository(
name = "com_github_h2non_gock",
build_file_proto_mode = "disable",
importpath = "github.com/h2non/gock",
sum = "h1:17gCehSo8ZOgEsFKpQgqHiR7VLyjxdAG3lkhVvO9QZU=",
version = "v1.0.9",
)
go_repository(
name = "com_github_hashicorp_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/api",
sum = "h1:BNQPM9ytxj6jbjjdRPioQ94T6YXriSopn0i8COv6SRA=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_consul_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/sdk",
sum = "h1:LnuDWGNsoajlhGyHJvuWW6FVqRl8JOTPqS6CPTsYjhY=",
version = "v0.1.1",
)
go_repository(
name = "com_github_hashicorp_errwrap",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/errwrap",
sum = "h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_cleanhttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-cleanhttp",
sum = "h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=",
version = "v0.5.1",
)
go_repository(
name = "com_github_hashicorp_go_hclog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-hclog",
sum = "h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=",
version = "v0.9.2",
)
go_repository(
name = "com_github_hashicorp_go_immutable_radix",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-immutable-radix",
sum = "h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_msgpack",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-msgpack",
sum = "h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=",
version = "v0.5.3",
)
go_repository(
name = "com_github_hashicorp_go_multierror",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-multierror",
sum = "h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_go_net",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go.net",
sum = "h1:sNCoNyDEvN1xa+X0baata4RdcpKwcMS6DH+xwfqPgjw=",
version = "v0.0.1",
)
go_repository(
name = "com_github_hashicorp_go_plugin",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-plugin",
sum = "h1:4OtAfUGbnKC6yS48p0CtMX2oFYtzFZVv6rok3cRWgnE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_retryablehttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-retryablehttp",
sum = "h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM=",
version = "v0.6.6",
)
go_repository(
name = "com_github_hashicorp_go_rootcerts",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-rootcerts",
sum = "h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_sockaddr",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-sockaddr",
sum = "h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_hashicorp_go_syslog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-syslog",
sum = "h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-uuid",
sum = "h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_version",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-version",
sum = "h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=",
version = "v1.2.0",
)
go_repository(
name = "com_github_hashicorp_golang_lru",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/golang-lru",
sum = "h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=",
version = "v0.5.4",
)
go_repository(
name = "com_github_hashicorp_hcl",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/hcl",
sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_logutils",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/logutils",
sum = "h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_mdns",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/mdns",
sum = "h1:WhIgCr5a7AaVH6jPUwjtRuuE7/RDufnUvzIr48smyxs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_memberlist",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/memberlist",
sum = "h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=",
version = "v0.1.3",
)
go_repository(
name = "com_github_hashicorp_serf",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/serf",
sum = "h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=",
version = "v0.8.2",
)
go_repository(
name = "com_github_hashicorp_vault_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/api",
sum = "h1:j08Or/wryXT4AcHj1oCbMd7IijXcKzYUGw59LGu9onU=",
version = "v1.0.4",
)
go_repository(
name = "com_github_hashicorp_vault_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/sdk",
sum = "h1:mOEPeOhT7jl0J4AMl1E705+BcmeRs1VmKNb9F0sMLy8=",
version = "v0.1.13",
)
go_repository(
name = "com_github_hashicorp_yamux",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/yamux",
sum = "h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=",
version = "v0.0.0-20181012175058-2f1d1f20f75d",
)
go_repository(
name = "com_github_howeyc_gopass",
build_file_proto_mode = "disable",
importpath = "github.com/howeyc/gopass",
sum = "h1:kQWxfPIHVLbgLzphqk3QUflDy9QdksZR4ygR807bpy0=",
version = "v0.0.0-20170109162249-bf9dde6d0d2c",
)
go_repository(
name = "com_github_hpcloud_tail",
build_file_proto_mode = "disable",
importpath = "github.com/hpcloud/tail",
sum = "h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_huandu_xstrings",
build_file_proto_mode = "disable",
importpath = "github.com/huandu/xstrings",
sum = "h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_ianlancetaylor_demangle",
build_file_proto_mode = "disable",
importpath = "github.com/ianlancetaylor/demangle",
sum = "h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=",
version = "v0.0.0-20181102032728-5e5cf60278f6",
)
go_repository(
name = "com_github_imdario_mergo",
build_file_proto_mode = "disable",
importpath = "github.com/imdario/mergo",
sum = "h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=",
version = "v0.3.12",
)
go_repository(
name = "com_github_inconshreveable_mousetrap",
build_file_proto_mode = "disable",
importpath = "github.com/inconshreveable/mousetrap",
sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_influxdata_influxdb",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/influxdb",
sum = "h1:AciJ2ei/llFRundm7CtqwF6B2aOds1A7QG3sMW8QiaQ=",
version = "v0.0.0-20161215172503-049f9b42e9a5",
)
go_repository(
name = "com_github_influxdata_tdigest",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/tdigest",
sum = "h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE=",
version = "v0.0.0-20181121200506-bf2b5ad3c0a9",
)
go_repository(
name = "com_github_jarcoal_httpmock",
build_file_proto_mode = "disable",
importpath = "github.com/jarcoal/httpmock",
sum = "h1:cHtVEcTxRSX4J0je7mWPfc9BpDpqzXSJ5HbymZmyHck=",
version = "v1.0.5",
)
go_repository(
name = "com_github_jbenet_go_context",
build_file_proto_mode = "disable",
importpath = "github.com/jbenet/go-context",
sum = "h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=",
version = "v0.0.0-20150711004518-d14ea06fba99",
)
go_repository(
name = "com_github_jcmturner_gofork",
build_file_proto_mode = "disable",
importpath = "github.com/jcmturner/gofork",
sum = "h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jenkins_x_go_scm",
build_file_proto_mode = "disable",
importpath = "github.com/jenkins-x/go-scm",
sum = "h1:D7d1sDWUU+xocCNLQVoYKpMjVKnQvsPva+hPzruchbM=",
version = "v1.5.117",
)
go_repository(
name = "com_github_jessevdk_go_flags",
build_file_proto_mode = "disable",
importpath = "github.com/jessevdk/go-flags",
sum = "h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=",
version = "v1.5.0",
)
go_repository(
name = "com_github_jetstack_cert_manager",
build_file_proto_mode = "disable",
importpath = "github.com/jetstack/cert-manager",
sum = "h1:gEhBV9I83m+kpQShDhNO4+J8O2qfNDjvAEL27pThGmg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_jingyugao_rowserrcheck",
build_file_proto_mode = "disable",
importpath = "github.com/jingyugao/rowserrcheck",
sum = "h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk=",
version = "v0.0.0-20191204022205-72ab7603b68a",
)
go_repository(
name = "com_github_jinzhu_gorm",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/gorm",
sum = "h1:Drgk1clyWT9t9ERbzHza6Mj/8FY/CqMyVzOiHviMo6Q=",
version = "v1.9.12",
)
go_repository(
name = "com_github_jinzhu_inflection",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/inflection",
sum = "h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jinzhu_now",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/now",
sum = "h1:g39TucaRWyV3dwDO++eEc6qf8TVIQ/Da48WmqjZ3i7E=",
version = "v1.1.1",
)
go_repository(
name = "com_github_jirfag_go_printf_func_name",
build_file_proto_mode = "disable",
importpath = "github.com/jirfag/go-printf-func-name",
sum = "h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48=",
version = "v0.0.0-20200119135958-7558a9eaa5af",
)
go_repository(
name = "com_github_jmespath_go_jmespath",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath",
sum = "h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=",
version = "v0.4.0",
)
go_repository(
name = "com_github_jmespath_go_jmespath_internal_testify",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath/internal/testify",
sum = "h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=",
version = "v1.5.1",
)
go_repository(
name = "com_github_jmoiron_sqlx",
build_file_proto_mode = "disable",
importpath = "github.com/jmoiron/sqlx",
sum = "h1:lrdPtrORjGv1HbbEvKWDUAy97mPpFm4B8hp77tcCUJY=",
version = "v1.2.1-0.20190826204134-d7d95172beb5",
)
go_repository(
name = "com_github_joefitzgerald_rainbow_reporter",
build_file_proto_mode = "disable",
importpath = "github.com/joefitzgerald/rainbow-reporter",
sum = "h1:AuMG652zjdzI0YCCnXAqATtRBpGXMcAnrajcaTrSeuo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joho_godotenv",
build_file_proto_mode = "disable",
importpath = "github.com/joho/godotenv",
sum = "h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_jonboulle_clockwork",
build_file_proto_mode = "disable",
importpath = "github.com/jonboulle/clockwork",
sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joshdk_go_junit",
build_file_proto_mode = "disable",
importpath = "github.com/joshdk/go-junit",
sum = "h1:Bp5LAZasx/ev9wUmIIC74+MsXgwD99VjV1JmDVbpJm8=",
version = "v0.0.0-20190428045703-ad7e11aa49ff",
)
go_repository(
name = "com_github_jpillora_backoff",
build_file_proto_mode = "disable",
importpath = "github.com/jpillora/backoff",
sum = "h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_json_iterator_go",
build_file_proto_mode = "disable",
importpath = "github.com/json-iterator/go",
sum = "h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=",
version = "v1.1.11",
)
go_repository(
name = "com_github_jstemmer_go_junit_report",
build_file_proto_mode = "disable",
importpath = "github.com/jstemmer/go-junit-report",
sum = "h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=",
version = "v0.9.1",
)
go_repository(
name = "com_github_jtolds_gls",
build_file_proto_mode = "disable",
importpath = "github.com/jtolds/gls",
sum = "h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=",
version = "v4.20.0+incompatible",
)
go_repository(
name = "com_github_julienschmidt_httprouter",
build_file_proto_mode = "disable",
importpath = "github.com/julienschmidt/httprouter",
sum = "h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=",
version = "v1.3.0",
)
go_repository(
name = "com_github_kballard_go_shellquote",
build_file_proto_mode = "disable",
importpath = "github.com/kballard/go-shellquote",
sum = "h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=",
version = "v0.0.0-20180428030007-95032a82bc51",
)
go_repository(
name = "com_github_kelseyhightower_envconfig",
build_file_proto_mode = "disable",
importpath = "github.com/kelseyhightower/envconfig",
sum = "h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=",
version = "v1.4.0",
)
go_repository(
name = "com_github_kevinburke_ssh_config",
build_file_proto_mode = "disable",
importpath = "github.com/kevinburke/ssh_config",
sum = "h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=",
version = "v0.0.0-20201106050909-4977a11b4351",
)
go_repository(
name = "com_github_kisielk_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/errcheck",
sum = "h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY=",
version = "v1.5.0",
)
go_repository(
name = "com_github_kisielk_gotool",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/gotool",
sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_klauspost_compress",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/compress",
sum = "h1:Znfn6hXZAHaLPNnlqUYRrBSReFHYybslgv4PTiyz6P0=",
version = "v1.10.2",
)
go_repository(
name = "com_github_klauspost_cpuid",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/cpuid",
sum = "h1:1xAgYebNnsb9LKCdLOvFWtAxGU/33mjJtyOVbmUa0Us=",
version = "v1.2.2",
)
go_repository(
name = "com_github_klauspost_pgzip",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/pgzip",
sum = "h1:oIPZROsWuPHpOdMVWLuJZXwgjhrW8r1yEX8UqMyeNHM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_knative_build",
build_file_proto_mode = "disable",
importpath = "github.com/knative/build",
sum = "h1:o/VYWA3HKyZlNqdU2hDE5LHpanBe8gazgPKL97XJ6bo=",
version = "v0.1.2",
)
go_repository(
name = "com_github_konsorten_go_windows_terminal_sequences",
build_file_proto_mode = "disable",
importpath = "github.com/konsorten/go-windows-terminal-sequences",
sum = "h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=",
version = "v1.0.3",
)
go_repository(
name = "com_github_kr_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/kr/logfmt",
sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=",
version = "v0.0.0-20140226030751-b84e30acd515",
)
go_repository(
name = "com_github_kr_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pretty",
sum = "h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=",
version = "v0.2.1",
)
go_repository(
name = "com_github_kr_pty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pty",
sum = "h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI=",
version = "v1.1.8",
)
go_repository(
name = "com_github_kr_text",
build_file_proto_mode = "disable",
importpath = "github.com/kr/text",
sum = "h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=",
version = "v0.2.0",
)
go_repository(
name = "com_github_lib_pq",
build_file_proto_mode = "disable",
importpath = "github.com/lib/pq",
sum = "h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_liggitt_tabwriter",
build_file_proto_mode = "disable",
importpath = "github.com/liggitt/tabwriter",
sum = "h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=",
version = "v0.0.0-20181228230101-89fcab3d43de",
)
go_repository(
name = "com_github_lightstep_tracecontext_go",
build_file_proto_mode = "disable",
importpath = "github.com/lightstep/tracecontext.go",
sum = "h1:+2b6iGRJe3hvV/yVXrd41yVEjxuFHxasJqDhkIjS4gk=",
version = "v0.0.0-20181129014701-1757c391b1ac",
)
go_repository(
name = "com_github_lithammer_dedent",
build_file_proto_mode = "disable",
importpath = "github.com/lithammer/dedent",
sum = "h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=",
version = "v1.1.0",
)
go_repository(
name = "com_github_logrusorgru_aurora",
build_file_proto_mode = "disable",
importpath = "github.com/logrusorgru/aurora",
sum = "h1:9MlwzLdW7QSDrhDjFlsEYmxpFyIoXmYRon3dt0io31k=",
version = "v0.0.0-20181002194514-a7b3b318ed4e",
)
go_repository(
name = "com_github_lyft_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/lyft/protoc-gen-validate",
sum = "h1:KNt/RhmQTOLr7Aj8PsJ7mTronaFyx80mRTT9qF261dA=",
version = "v0.0.13",
)
go_repository(
name = "com_github_magiconair_properties",
build_file_proto_mode = "disable",
importpath = "github.com/magiconair/properties",
sum = "h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=",
version = "v1.8.1",
)
go_repository(
name = "com_github_mailru_easyjson",
build_file_proto_mode = "disable",
importpath = "github.com/mailru/easyjson",
sum = "h1:jcoUdG1TzY/M/eM5BLFLP8DJeMximx5NQYSlLL9YeWc=",
version = "v0.7.1-0.20191009090205-6c0755d89d1e",
)
go_repository(
name = "com_github_makenowjust_heredoc",
build_file_proto_mode = "disable",
importpath = "github.com/MakeNowJust/heredoc",
sum = "h1:sjQovDkwrZp8u+gxLtPgKGjk5hCxuy2hrRejBTA9xFU=",
version = "v0.0.0-20170808103936-bb23615498cd",
)
go_repository(
name = "com_github_maratori_testpackage",
build_file_proto_mode = "disable",
importpath = "github.com/maratori/testpackage",
sum = "h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_markbates_inflect",
build_file_proto_mode = "disable",
importpath = "github.com/markbates/inflect",
sum = "h1:5fh1gzTFhfae06u3hzHYO9xe3l3v3nW5Pwt3naLTP5g=",
version = "v1.0.4",
)
go_repository(
name = "com_github_marstr_guid",
build_file_proto_mode = "disable",
importpath = "github.com/marstr/guid",
sum = "h1:/M4H/1G4avsieL6BbUwCOBzulmoeKVP5ux/3mQNnbyI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_goutils",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/goutils",
sum = "h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_semver",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver",
sum = "h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=",
version = "v1.5.0",
)
go_repository(
name = "com_github_masterminds_semver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver/v3",
sum = "h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk=",
version = "v3.1.0",
)
go_repository(
name = "com_github_masterminds_sprig_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/sprig/v3",
sum = "h1:wz22D0CiSctrliXiI9ZO3HoNApweeRGftyDN+BQa3B8=",
version = "v3.0.2",
)
go_repository(
name = "com_github_masterminds_vcs",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/vcs",
sum = "h1:NL3G1X7/7xduQtA2sJLpVpfHTNBALVNSjob6KEjPXNQ=",
version = "v1.13.1",
)
go_repository(
name = "com_github_matoous_godox",
build_file_proto_mode = "disable",
importpath = "github.com/matoous/godox",
sum = "h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE=",
version = "v0.0.0-20190911065817-5d6d842e92eb",
)
go_repository(
name = "com_github_mattbaird_jsonpatch",
build_file_proto_mode = "disable",
importpath = "github.com/mattbaird/jsonpatch",
sum = "h1:+J2gw7Bw77w/fbK7wnNJJDKmw1IbWft2Ul5BzrG1Qm8=",
version = "v0.0.0-20171005235357-81af80346b1a",
)
go_repository(
name = "com_github_mattn_go_colorable",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-colorable",
sum = "h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=",
version = "v0.1.6",
)
go_repository(
name = "com_github_mattn_go_ieproxy",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-ieproxy",
sum = "h1:qiyop7gCflfhwCzGyeT0gro3sF9AIg9HU98JORTkqfI=",
version = "v0.0.1",
)
go_repository(
name = "com_github_mattn_go_isatty",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-isatty",
sum = "h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=",
version = "v0.0.12",
)
go_repository(
name = "com_github_mattn_go_runewidth",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-runewidth",
sum = "h1:3tS41NlGYSmhhe/8fhGRzc+z3AYCw1Fe1WAyLuujKs0=",
version = "v0.0.8",
)
go_repository(
name = "com_github_mattn_go_shellwords",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-shellwords",
sum = "h1:Y7Xqm8piKOO3v10Thp7Z36h4FYFjt5xB//6XvOrs2Gw=",
version = "v1.0.10",
)
go_repository(
name = "com_github_mattn_go_sqlite3",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-sqlite3",
sum = "h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_mattn_go_zglob",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-zglob",
sum = "h1:0qT24o2wsZ8cOXQAERwBX6s+rPMs/bJTKxLVVtgfDXc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_mattn_goveralls",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/goveralls",
sum = "h1:7eJB6EqsPhRVxvwEXGnqdO2sJI0PTsrWoTMXEk9/OQc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_matttproud_golang_protobuf_extensions",
build_file_proto_mode = "disable",
importpath = "github.com/matttproud/golang_protobuf_extensions",
sum = "h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI=",
version = "v1.0.2-0.20181231171920-c182affec369",
)
go_repository(
name = "com_github_maxbrunsfeld_counterfeiter_v6",
build_file_proto_mode = "disable",
importpath = "github.com/maxbrunsfeld/counterfeiter/v6",
sum = "h1:g+4J5sZg6osfvEfkRZxJ1em0VT95/UOZgi/l7zi1/oE=",
version = "v6.2.2",
)
go_repository(
name = "com_github_mgutz_ansi",
build_file_proto_mode = "disable",
importpath = "github.com/mgutz/ansi",
sum = "h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=",
version = "v0.0.0-20170206155736-9520e82c474b",
)
go_repository(
name = "com_github_mholt_archiver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/mholt/archiver/v3",
sum = "h1:vWjhY8SQp5yzM9P6OJ/eZEkmi3UAbRrxCq48MxjAzig=",
version = "v3.3.0",
)
go_repository(
name = "com_github_microsoft_go_winio",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/go-winio",
sum = "h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=",
version = "v0.4.16",
)
go_repository(
name = "com_github_microsoft_hcsshim",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/hcsshim",
sum = "h1:ptnOoufxGSzauVTsdE+wMYnCWA301PdoN4xg5oRdZpg=",
version = "v0.8.7",
)
go_repository(
name = "com_github_miekg_dns",
build_file_proto_mode = "disable",
importpath = "github.com/miekg/dns",
sum = "h1:sJFOl9BgwbYAWOGEwr61FU28pqsBNdpRBnhGXtO06Oo=",
version = "v1.1.31",
)
go_repository(
name = "com_github_mitchellh_cli",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/cli",
sum = "h1:iGBIsUe3+HZ/AD/Vd7DErOt5sU9fa8Uj7A2s1aggv1Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_copystructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/copystructure",
sum = "h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_homedir",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-homedir",
sum = "h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=",
version = "v1.1.0",
)
go_repository(
name = "com_github_mitchellh_go_ps",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-ps",
sum = "h1:9+ke9YJ9KGWw5ANXK6ozjoK47uI3uNbXv4YVINBnGm8=",
version = "v0.0.0-20190716172923-621e5597135b",
)
go_repository(
name = "com_github_mitchellh_go_testing_interface",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-testing-interface",
sum = "h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_wordwrap",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-wordwrap",
sum = "h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_gox",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/gox",
sum = "h1:lfGJxY7ToLJQjHHwi0EX6uYBdK78egf954SQl13PQJc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_mitchellh_iochan",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/iochan",
sum = "h1:C+X3KsSTLFVBr/tK1eYN/vs4rJcvsiLU338UhYPJWeY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_ioprogress",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/ioprogress",
sum = "h1:Qa6dnn8DlasdXRnacluu8HzPts0S1I9zvvUPDbBnXFI=",
version = "v0.0.0-20180201004757-6a23b12fa88e",
)
go_repository(
name = "com_github_mitchellh_mapstructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/mapstructure",
sum = "h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=",
version = "v1.3.1",
)
go_repository(
name = "com_github_mitchellh_osext",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/osext",
sum = "h1:2+myh5ml7lgEU/51gbeLHfKGNfgEQQIWrlbdaOsidbQ=",
version = "v0.0.0-20151018003038-5e2d6d41470f",
)
go_repository(
name = "com_github_mitchellh_reflectwalk",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/reflectwalk",
sum = "h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_moby_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/moby/spdystream",
sum = "h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=",
version = "v0.2.0",
)
go_repository(
name = "com_github_moby_term",
build_file_proto_mode = "disable",
importpath = "github.com/moby/term",
sum = "h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk=",
version = "v0.0.0-20201216013528-df9cb8a40635",
)
go_repository(
name = "com_github_modern_go_concurrent",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/concurrent",
sum = "h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=",
version = "v0.0.0-20180306012644-bacd9c7ef1dd",
)
go_repository(
name = "com_github_modern_go_reflect2",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/reflect2",
sum = "h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_mohae_deepcopy",
build_file_proto_mode = "disable",
importpath = "github.com/mohae/deepcopy",
sum = "h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=",
version = "v0.0.0-20170929034955-c48cc78d4826",
)
go_repository(
name = "com_github_morikuni_aec",
build_file_proto_mode = "disable",
importpath = "github.com/morikuni/aec",
sum = "h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mozilla_tls_observatory",
build_file_proto_mode = "disable",
importpath = "github.com/mozilla/tls-observatory",
sum = "h1:1xJ+Xi9lYWLaaP4yB67ah0+548CD3110mCPWhVVjFkI=",
version = "v0.0.0-20200317151703-4fa42e1c2dee",
)
go_repository(
name = "com_github_munnerz_crd_schema_fuzz",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/crd-schema-fuzz",
sum = "h1:8erI9yzEnOGw9K5O+a8zZdoo8N/OwrFi7c7SjBtkHAs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_munnerz_goautoneg",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/goautoneg",
sum = "h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=",
version = "v0.0.0-20191010083416-a7dc8b61c822",
)
go_repository(
name = "com_github_mwitkow_go_conntrack",
build_file_proto_mode = "disable",
importpath = "github.com/mwitkow/go-conntrack",
sum = "h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=",
version = "v0.0.0-20190716064945-2f068394615f",
)
go_repository(
name = "com_github_mxk_go_flowrate",
build_file_proto_mode = "disable",
importpath = "github.com/mxk/go-flowrate",
sum = "h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=",
version = "v0.0.0-20140419014527-cca7078d478f",
)
go_repository(
name = "com_github_nakabonne_nestif",
build_file_proto_mode = "disable",
importpath = "github.com/nakabonne/nestif",
sum = "h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_natefinch_lumberjack",
build_file_proto_mode = "disable",
importpath = "github.com/natefinch/lumberjack",
sum = "h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_nats_io_gnatsd",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/gnatsd",
sum = "h1:RconcfDeWpKCD6QIIwiVFcvForlXpWeJP7i5/lDLy44=",
version = "v1.4.1",
)
go_repository(
name = "com_github_nats_io_go_nats",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/go-nats",
sum = "h1:oQOfHcLr8hb43QG8yeVyY2jtarIaTjOv41CGdF3tTvQ=",
version = "v1.7.0",
)
go_repository(
name = "com_github_nats_io_jwt",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/jwt",
sum = "h1:+RB5hMpXUUA2dfxuhBTEkMOrYmM+gKIZYS1KjSostMI=",
version = "v0.3.2",
)
go_repository(
name = "com_github_nats_io_nats_go",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats.go",
sum = "h1:ik3HbLhZ0YABLto7iX80pZLPw/6dx3T+++MZJwLnMrQ=",
version = "v1.9.1",
)
go_repository(
name = "com_github_nats_io_nats_server_v2",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats-server/v2",
sum = "h1:i2Ly0B+1+rzNZHHWtD4ZwKi+OU5l+uQo1iDHZ2PmiIc=",
version = "v2.1.2",
)
go_repository(
name = "com_github_nats_io_nkeys",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nkeys",
sum = "h1:6JrEfig+HzTH85yxzhSVbjHRJv9cn0p6n3IngIcM5/k=",
version = "v0.1.3",
)
go_repository(
name = "com_github_nats_io_nuid",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nuid",
sum = "h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=",
version = "v1.0.1",
)
go_repository(
name = "com_github_nbio_st",
build_file_proto_mode = "disable",
importpath = "github.com/nbio/st",
sum = "h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=",
version = "v0.0.0-20140626010706-e9e8d9816f32",
)
go_repository(
name = "com_github_nbutton23_zxcvbn_go",
build_file_proto_mode = "disable",
importpath = "github.com/nbutton23/zxcvbn-go",
sum = "h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=",
version = "v0.0.0-20180912185939-ae427f1e4c1d",
)
go_repository(
name = "com_github_ncw_swift",
build_file_proto_mode = "disable",
importpath = "github.com/ncw/swift",
sum = "h1:4DQRPj35Y41WogBxyhOXlrI37nzGlyEcsforeudyYPQ=",
version = "v1.0.47",
)
go_repository(
name = "com_github_niemeyer_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/niemeyer/pretty",
sum = "h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=",
version = "v0.0.0-20200227124842-a10e7caefd8e",
)
go_repository(
name = "com_github_nwaples_rardecode",
build_file_proto_mode = "disable",
importpath = "github.com/nwaples/rardecode",
sum = "h1:r7vGuS5akxOnR4JQSkko62RJ1ReCMXxQRPtxsiFMBOs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_nxadm_tail",
build_file_proto_mode = "disable",
importpath = "github.com/nxadm/tail",
sum = "h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=",
version = "v1.4.8",
)
go_repository(
name = "com_github_nytimes_gziphandler",
build_file_proto_mode = "disable",
importpath = "github.com/NYTimes/gziphandler",
sum = "h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=",
version = "v1.1.1",
)
go_repository(
name = "com_github_octago_sflags",
build_file_proto_mode = "disable",
importpath = "github.com/octago/sflags",
sum = "h1:XceYzkRXGAHa/lSFmKLcaxSrsh4MTuOMQdIGsUD0wlk=",
version = "v0.2.0",
)
go_repository(
name = "com_github_oklog_run",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/run",
sum = "h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_oklog_ulid",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/ulid",
sum = "h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=",
version = "v1.3.1",
)
go_repository(
name = "com_github_olekukonko_tablewriter",
build_file_proto_mode = "disable",
importpath = "github.com/olekukonko/tablewriter",
sum = "h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=",
version = "v0.0.4",
)
go_repository(
name = "com_github_oneofone_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/OneOfOne/xxhash",
sum = "h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=",
version = "v1.2.2",
)
go_repository(
name = "com_github_onsi_ginkgo",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/ginkgo",
sum = "h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=",
version = "v1.16.4",
)
go_repository(
name = "com_github_onsi_gomega",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/gomega",
sum = "h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak=",
version = "v1.13.0",
)
go_repository(
name = "com_github_op_go_logging",
build_file_proto_mode = "disable",
importpath = "github.com/op/go-logging",
sum = "h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=",
version = "v0.0.0-20160315200505-970db520ece7",
)
go_repository(
name = "com_github_opencontainers_go_digest",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/go-digest",
sum = "h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ=",
version = "v1.0.0-rc1",
)
go_repository(
name = "com_github_opencontainers_image_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/image-spec",
sum = "h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_opencontainers_runc",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runc",
sum = "h1:GlxAyO6x8rfZYN9Tt0Kti5a/cP41iuiO2yYT0IJGY8Y=",
version = "v0.1.1",
)
go_repository(
name = "com_github_opencontainers_runtime_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-spec",
sum = "h1:eNUVfm/RFLIi1G7flU5/ZRTHvd4kcVuzfRnL6OFlzCI=",
version = "v0.1.2-0.20190507144316-5b71a03e2700",
)
go_repository(
name = "com_github_opencontainers_runtime_tools",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-tools",
sum = "h1:H7DMc6FAjgwZZi8BRqjrAAHWoqEr5e5L6pS4V0ezet4=",
version = "v0.0.0-20181011054405-1d69bd0f9c39",
)
go_repository(
name = "com_github_openpeedeep_depguard",
build_file_proto_mode = "disable",
importpath = "github.com/OpenPeeDeeP/depguard",
sum = "h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us=",
version = "v1.0.1",
)
go_repository(
name = "com_github_openzipkin_zipkin_go",
build_file_proto_mode = "disable",
importpath = "github.com/openzipkin/zipkin-go",
sum = "h1:nY8Hti+WKaP0cRsSeQ026wU03QsM762XBeCXBb9NAWI=",
version = "v0.2.2",
)
go_repository(
name = "com_github_otiai10_copy",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/copy",
sum = "h1:DDNipYy6RkIkjMwy+AWzgKiNTyj2RUI9yEMeETEpVyc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_otiai10_curr",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/curr",
sum = "h1:+OLn68pqasWca0z5ryit9KGfp3sUsW4Lqg32iRMJyzs=",
version = "v0.0.0-20150429015615-9b4961190c95",
)
go_repository(
name = "com_github_otiai10_mint",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/mint",
sum = "h1:Ady6MKVezQwHBkGzLFbrsywyp09Ah7rkmfjV3Bcr5uc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_pascaldekloe_goe",
build_file_proto_mode = "disable",
importpath = "github.com/pascaldekloe/goe",
sum = "h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_pavel_v_chernykh_keystore_go",
build_file_proto_mode = "disable",
importpath = "github.com/pavel-v-chernykh/keystore-go",
sum = "h1:Jd6xfriVlJ6hWPvYOE0Ni0QWcNTLRehfGPFxr3eSL80=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_pborman_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/pborman/uuid",
sum = "h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=",
version = "v1.2.0",
)
go_repository(
name = "com_github_pelletier_go_buffruneio",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-buffruneio",
sum = "h1:U4t4R6YkofJ5xHm3dJzuRpPZ0mr5MMCoAWooScCR7aA=",
version = "v0.2.0",
)
go_repository(
name = "com_github_pelletier_go_toml",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-toml",
sum = "h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=",
version = "v1.8.0",
)
go_repository(
name = "com_github_peterbourgon_diskv",
build_file_proto_mode = "disable",
importpath = "github.com/peterbourgon/diskv",
sum = "h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_phayes_checkstyle",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/checkstyle",
sum = "h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA=",
version = "v0.0.0-20170904204023-bfd46e6a821d",
)
go_repository(
name = "com_github_phayes_freeport",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/freeport",
sum = "h1:JhzVVoYvbOACxoUmOs6V/G4D5nPVUW73rKvXxP4XUJc=",
version = "v0.0.0-20180830031419-95f893ade6f2",
)
go_repository(
name = "com_github_pierrec_lz4",
build_file_proto_mode = "disable",
importpath = "github.com/pierrec/lz4",
sum = "h1:6aCX4/YZ9v8q69hTyiR7dNLnTA3fgtKHVVW5BCd5Znw=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_pkg_errors",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/errors",
sum = "h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=",
version = "v0.9.1",
)
go_repository(
name = "com_github_pkg_profile",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/profile",
sum = "h1:F++O52m40owAmADcojzM+9gyjmMOY/T4oYJkgFDH8RE=",
version = "v1.2.1",
)
go_repository(
name = "com_github_pmezard_go_difflib",
build_file_proto_mode = "disable",
importpath = "github.com/pmezard/go-difflib",
sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_posener_complete",
build_file_proto_mode = "disable",
importpath = "github.com/posener/complete",
sum = "h1:ccV59UEOTzVDnDUEFdT95ZzHVZ+5+158q8+SJb2QV5w=",
version = "v1.1.1",
)
go_repository(
name = "com_github_pquerna_cachecontrol",
build_file_proto_mode = "disable",
importpath = "github.com/pquerna/cachecontrol",
sum = "h1:0XM1XL/OFFJjXsYXlG30spTkV/E9+gmd5GD1w2HE8xM=",
version = "v0.0.0-20171018203845-0dec1b30a021",
)
go_repository(
name = "com_github_prometheus_client_golang",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_golang",
sum = "h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=",
version = "v1.11.0",
)
go_repository(
name = "com_github_prometheus_client_model",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_model",
sum = "h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=",
version = "v0.2.0",
)
go_repository(
name = "com_github_prometheus_common",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/common",
sum = "h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=",
version = "v0.26.0",
)
go_repository(
name = "com_github_prometheus_procfs",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/procfs",
sum = "h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=",
version = "v0.6.0",
)
go_repository(
name = "com_github_prometheus_tsdb",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/tsdb",
sum = "h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=",
version = "v0.7.1",
)
go_repository(
name = "com_github_puerkitobio_purell",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/purell",
sum = "h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=",
version = "v1.1.1",
)
go_repository(
name = "com_github_puerkitobio_urlesc",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/urlesc",
sum = "h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=",
version = "v0.0.0-20170810143723-de5bf2ad4578",
)
go_repository(
name = "com_github_quasilyte_go_consistent",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-consistent",
sum = "h1:JoUA0uz9U0FVFq5p4LjEq4C0VgQ0El320s3Ms0V4eww=",
version = "v0.0.0-20190521200055-c6f3937de18c",
)
go_repository(
name = "com_github_quasilyte_go_ruleguard",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-ruleguard",
sum = "h1:DvnesvLtRPQOvaUbfXfh0tpMHg29by0H7F2U+QIkSu8=",
version = "v0.1.2-0.20200318202121-b00d7a75d3d8",
)
go_repository(
name = "com_github_rcrowley_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/rcrowley/go-metrics",
sum = "h1:eUm8ma4+yPknhXtkYlWh3tMkE6gBjXZToDned9s2gbQ=",
version = "v0.0.0-20190706150252-9beb055b7962",
)
go_repository(
name = "com_github_remyoudompheng_bigfft",
build_file_proto_mode = "disable",
importpath = "github.com/remyoudompheng/bigfft",
sum = "h1:/NRJ5vAYoqz+7sG51ubIDHXeWO8DlTSrToPu6q11ziA=",
version = "v0.0.0-20170806203942-52369c62f446",
)
go_repository(
name = "com_github_rogpeppe_fastuuid",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/fastuuid",
sum = "h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_rogpeppe_go_internal",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/go-internal",
sum = "h1:qLvObTrvO/XRCqmkKxUlOBc48bI3efyDuAZe25QiF0w=",
version = "v1.5.2",
)
go_repository(
name = "com_github_rubiojr_go_vhd",
build_file_proto_mode = "disable",
importpath = "github.com/rubiojr/go-vhd",
sum = "h1:ht7N4d/B7Ezf58nvMNVF3OlvDlz9pp+WHVcRNS0nink=",
version = "v0.0.0-20160810183302-0bfd3b39853c",
)
go_repository(
name = "com_github_russross_blackfriday",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday",
sum = "h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=",
version = "v1.5.2",
)
go_repository(
name = "com_github_russross_blackfriday_v2",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday/v2",
sum = "h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=",
version = "v2.0.1",
)
go_repository(
name = "com_github_rwcarlsen_goexif",
build_file_proto_mode = "disable",
importpath = "github.com/rwcarlsen/goexif",
sum = "h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=",
version = "v0.0.0-20190401172101-9e8deecbddbd",
)
go_repository(
name = "com_github_ryancurrah_gomodguard",
build_file_proto_mode = "disable",
importpath = "github.com/ryancurrah/gomodguard",
sum = "h1:DWbye9KyMgytn8uYpuHkwf0RHqAYO6Ay/D0TbCpPtVU=",
version = "v1.1.0",
)
go_repository(
name = "com_github_ryanuber_columnize",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/columnize",
sum = "h1:j1Wcmh8OrK4Q7GXY+V7SVSY8nUWQxHW5TkBe7YUl+2s=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_ryanuber_go_glob",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/go-glob",
sum = "h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sassoftware_go_rpmutils",
build_file_proto_mode = "disable",
importpath = "github.com/sassoftware/go-rpmutils",
sum = "h1:+gCnWOZV8Z/8jehJ2CdqB47Z3S+SREmQcuXkRFLNsiI=",
version = "v0.0.0-20190420191620-a8f1baeba37b",
)
go_repository(
name = "com_github_satori_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/satori/go.uuid",
sum = "h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sclevine_spec",
build_file_proto_mode = "disable",
importpath = "github.com/sclevine/spec",
sum = "h1:1Jwdf9jSfDl9NVmt8ndHqbTZ7XCCPbh1jI3hkDBHVYA=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sean_seed",
build_file_proto_mode = "disable",
importpath = "github.com/sean-/seed",
sum = "h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=",
version = "v0.0.0-20170313163322-e2103e2c3529",
)
go_repository(
name = "com_github_securego_gosec",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec",
sum = "h1:rq2/kILQnPtq5oL4+IAjgVOjh5e2yj2aaCYi7squEvI=",
version = "v0.0.0-20200401082031-e946c8c39989",
)
go_repository(
name = "com_github_securego_gosec_v2",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec/v2",
sum = "h1:y/9mCF2WPDbSDpL3QDWZD3HHGrSYw0QSHnCqTfs4JPE=",
version = "v2.3.0",
)
go_repository(
name = "com_github_sergi_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sergi/go-diff",
sum = "h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_shirou_gopsutil",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/gopsutil",
sum = "h1:WokF3GuxBeL+n4Lk4Fa8v9mbdjlrl7bHuneF4N1bk2I=",
version = "v0.0.0-20190901111213-e4ec7b275ada",
)
go_repository(
name = "com_github_shirou_w32",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/w32",
sum = "h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=",
version = "v0.0.0-20160930032740-bb4de0191aa4",
)
go_repository(
name = "com_github_shopify_logrus_bugsnag",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/logrus-bugsnag",
sum = "h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=",
version = "v0.0.0-20171204204709-577dee27f20d",
)
go_repository(
name = "com_github_shopify_sarama",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/sarama",
sum = "h1:XxJBCZEoWJtoWjf/xRbmGUpAmTZGnuuF0ON0EvxxBrs=",
version = "v1.23.1",
)
go_repository(
name = "com_github_shopify_toxiproxy",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/toxiproxy",
sum = "h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc=",
version = "v2.1.4+incompatible",
)
go_repository(
name = "com_github_shurcool_githubv4",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/githubv4",
sum = "h1:N5B+JgvM/DVYIxreItPJMM3yWrNO/GB2q4nESrtBisM=",
version = "v0.0.0-20210725200734-83ba7b4c9228",
)
go_repository(
name = "com_github_shurcool_go",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go",
sum = "h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM=",
version = "v0.0.0-20180423040247-9e1955d9fb6e",
)
go_repository(
name = "com_github_shurcool_go_goon",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go-goon",
sum = "h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc=",
version = "v0.0.0-20170922171312-37c2f522c041",
)
go_repository(
name = "com_github_shurcool_graphql",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/graphql",
sum = "h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=",
version = "v0.0.0-20181231061246-d48a9a75455f",
)
go_repository(
name = "com_github_shurcool_sanitized_anchor_name",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/sanitized_anchor_name",
sum = "h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sirupsen_logrus",
build_file_proto_mode = "disable",
importpath = "github.com/sirupsen/logrus",
sum = "h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=",
version = "v1.8.1",
)
go_repository(
name = "com_github_smartystreets_assertions",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/assertions",
sum = "h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_smartystreets_go_aws_auth",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/go-aws-auth",
sum = "h1:hp2CYQUINdZMHdvTdXtPOY2ainKl4IoMcpAXEf2xj3Q=",
version = "v0.0.0-20180515143844-0c1422d1fdb9",
)
go_repository(
name = "com_github_smartystreets_goconvey",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/goconvey",
sum = "h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=",
version = "v1.6.4",
)
go_repository(
name = "com_github_smartystreets_gunit",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/gunit",
sum = "h1:RyPDUFcJbvtXlhJPk7v+wnxZRY2EUokhEYl2EJOPToI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_soheilhy_cmux",
build_file_proto_mode = "disable",
importpath = "github.com/soheilhy/cmux",
sum = "h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=",
version = "v0.1.4",
)
go_repository(
name = "com_github_sourcegraph_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sourcegraph/go-diff",
sum = "h1:lhIKJ2nXLZZ+AfbHpYxTn0pXpNTTui0DX7DO3xeb1Zs=",
version = "v0.5.3",
)
go_repository(
name = "com_github_spaolacci_murmur3",
build_file_proto_mode = "disable",
importpath = "github.com/spaolacci/murmur3",
sum = "h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=",
version = "v0.0.0-20180118202830-f09979ecbc72",
)
go_repository(
name = "com_github_spf13_afero",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/afero",
sum = "h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=",
version = "v1.2.2",
)
go_repository(
name = "com_github_spf13_cast",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cast",
sum = "h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=",
version = "v1.3.1",
)
go_repository(
name = "com_github_spf13_cobra",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cobra",
sum = "h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=",
version = "v1.1.3",
)
go_repository(
name = "com_github_spf13_jwalterweatherman",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/jwalterweatherman",
sum = "h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=",
version = "v1.1.0",
)
go_repository(
name = "com_github_spf13_pflag",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/pflag",
sum = "h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=",
version = "v1.0.5",
)
go_repository(
name = "com_github_spf13_viper",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/viper",
sum = "h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk=",
version = "v1.7.1",
)
go_repository(
name = "com_github_src_d_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/src-d/gcfg",
sum = "h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=",
version = "v1.4.0",
)
go_repository(
name = "com_github_stackexchange_wmi",
build_file_proto_mode = "disable",
importpath = "github.com/StackExchange/wmi",
sum = "h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=",
version = "v0.0.0-20180116203802-5d049714c4a6",
)
go_repository(
name = "com_github_streadway_amqp",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/amqp",
sum = "h1:0ngsPmuP6XIjiFRNFYlvKwSr5zff2v+uPHaffZ6/M4k=",
version = "v0.0.0-20190404075320-75d898a42a94",
)
go_repository(
name = "com_github_streadway_quantile",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/quantile",
sum = "h1:7z3LSn867ex6VSaahyKadf4WtSsJIgne6A1WLOAGM8A=",
version = "v0.0.0-20150917103942-b0c588724d25",
)
go_repository(
name = "com_github_stretchr_objx",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/objx",
sum = "h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=",
version = "v0.2.0",
)
go_repository(
name = "com_github_stretchr_testify",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/testify",
sum = "h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=",
version = "v1.7.0",
)
go_repository(
name = "com_github_subosito_gotenv",
build_file_proto_mode = "disable",
importpath = "github.com/subosito/gotenv",
sum = "h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_syndtr_gocapability",
build_file_proto_mode = "disable",
importpath = "github.com/syndtr/gocapability",
sum = "h1:zLV6q4e8Jv9EHjNg/iHfzwDkCve6Ua5jCygptrtXHvI=",
version = "v0.0.0-20170704070218-db04d3cc01c8",
)
go_repository(
name = "com_github_tdakkota_asciicheck",
build_file_proto_mode = "disable",
importpath = "github.com/tdakkota/asciicheck",
sum = "h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U=",
version = "v0.0.0-20200416200610-e657995f937b",
)
go_repository(
name = "com_github_tektoncd_pipeline",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/pipeline",
sum = "h1:hWdWj5bDjkSGYLlJS+u+Kh9ZktBJgs2JNUv/kP0LVOA=",
version = "v0.13.1-0.20200625065359-44f22a067b75",
)
go_repository(
name = "com_github_tektoncd_plumbing",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing",
sum = "h1:crv70CBAJ2gZFSbf13aRVwdbjR2GYwTms/ZEok/SnFM=",
version = "v0.0.0-20200430135134-e53521e1d887",
)
go_repository(
name = "com_github_tektoncd_plumbing_pipelinerun_logs",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing/pipelinerun-logs",
sum = "h1:9qeyrQsoPZbHOyOPt0OeB1TCYXfYb5swrxlFWzTIYYk=",
version = "v0.0.0-20191206114338-712d544c2c21",
)
go_repository(
name = "com_github_tetafro_godot",
build_file_proto_mode = "disable",
importpath = "github.com/tetafro/godot",
sum = "h1:Dib7un+rYJFUi8vN0Bk6EHheKy6fv6ZzFURHw75g6m8=",
version = "v0.4.2",
)
go_repository(
name = "com_github_tidwall_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/tidwall/pretty",
sum = "h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_timakin_bodyclose",
build_file_proto_mode = "disable",
importpath = "github.com/timakin/bodyclose",
sum = "h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8=",
version = "v0.0.0-20200424151742-cb6215831a94",
)
go_repository(
name = "com_github_tj_assert",
build_file_proto_mode = "disable",
importpath = "github.com/tj/assert",
sum = "h1:Rw8kxzWo1mr6FSaYXjQELRe88y2KdfynXdnK72rdjtA=",
version = "v0.0.0-20171129193455-018094318fb0",
)
go_repository(
name = "com_github_tj_go_elastic",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-elastic",
sum = "h1:eGaGNxrtoZf/mBURsnNQKDR7u50Klgcf2eFDQEnc8Bc=",
version = "v0.0.0-20171221160941-36157cbbebc2",
)
go_repository(
name = "com_github_tj_go_kinesis",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-kinesis",
sum = "h1:m74UWYy+HBs+jMFR9mdZU6shPewugMyH5+GV6LNgW8w=",
version = "v0.0.0-20171128231115-08b17f58cb1b",
)
go_repository(
name = "com_github_tj_go_spin",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-spin",
sum = "h1:lhdWZsvImxvZ3q1C5OIB7d72DuOwP4O2NdBg9PyzNds=",
version = "v1.1.0",
)
go_repository(
name = "com_github_tmc_grpc_websocket_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/tmc/grpc-websocket-proxy",
sum = "h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=",
version = "v0.0.0-20190109142713-0ad062ec5ee5",
)
go_repository(
name = "com_github_tommy_muehle_go_mnd",
build_file_proto_mode = "disable",
importpath = "github.com/tommy-muehle/go-mnd",
sum = "h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As=",
version = "v1.3.1-0.20200224220436-e6f9a994e8fa",
)
go_repository(
name = "com_github_trivago_tgo",
build_file_proto_mode = "disable",
importpath = "github.com/trivago/tgo",
sum = "h1:bxatjJIXNIpV18bucU4Uk/LaoxvxuOlp/oowRHyncLQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_tsenart_vegeta",
build_file_proto_mode = "disable",
importpath = "github.com/tsenart/vegeta",
sum = "h1:ErZrHhRveAoznVW80gbrxz+qxJNydpA2fcQxTPHkZbU=",
version = "v12.7.1-0.20190725001342-b5f4fca92137+incompatible",
)
go_repository(
name = "com_github_ugorji_go",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go",
sum = "h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=",
version = "v1.1.4",
)
go_repository(
name = "com_github_ugorji_go_codec",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go/codec",
sum = "h1:3SVOIvH7Ae1KRYyQWRjXWJEA9sS/c/pjvH++55Gr648=",
version = "v0.0.0-20181204163529-d75b2dcb6bc8",
)
go_repository(
name = "com_github_ulikunitz_xz",
build_file_proto_mode = "disable",
importpath = "github.com/ulikunitz/xz",
sum = "h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4=",
version = "v0.5.7",
)
go_repository(
name = "com_github_ultraware_funlen",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/funlen",
sum = "h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo=",
version = "v0.0.2",
)
go_repository(
name = "com_github_ultraware_whitespace",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/whitespace",
sum = "h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg=",
version = "v0.0.4",
)
go_repository(
name = "com_github_urfave_cli",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli",
sum = "h1:u7tSpNPPswAFymm8IehJhy4uJMlUuU/GmqSkvJ1InXA=",
version = "v1.22.4",
)
go_repository(
name = "com_github_urfave_cli_v2",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli/v2",
sum = "h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=",
version = "v2.1.1",
)
go_repository(
name = "com_github_uudashr_gocognit",
build_file_proto_mode = "disable",
importpath = "github.com/uudashr/gocognit",
sum = "h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs=",
version = "v1.0.1",
)
go_repository(
name = "com_github_valyala_bytebufferpool",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/bytebufferpool",
sum = "h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_valyala_fasthttp",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/fasthttp",
sum = "h1:dzZJf2IuMiclVjdw0kkT+f9u4YdrapbNyGAN47E/qnk=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_quicktemplate",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/quicktemplate",
sum = "h1:BaO1nHTkspYzmAjPXj0QiDJxai96tlcZyKcI9dyEGvM=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_tcplisten",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/tcplisten",
sum = "h1:0R4NLDRDZX6JcmhJgXi5E4b8Wg84ihbmUKp/GvSPEzc=",
version = "v0.0.0-20161114210144-ceec8f93295a",
)
go_repository(
name = "com_github_vdemeester_k8s_pkg_credentialprovider",
build_file_proto_mode = "disable",
importpath = "github.com/vdemeester/k8s-pkg-credentialprovider",
sum = "h1:czKEIG2Q3YRTgs6x/8xhjVMJD5byPo6cZuostkbTM74=",
version = "v1.17.4",
)
go_repository(
name = "com_github_vektah_gqlparser",
build_file_proto_mode = "disable",
importpath = "github.com/vektah/gqlparser",
sum = "h1:ZsyLGn7/7jDNI+y4SEhI4yAxRChlv15pUHMjijT+e68=",
version = "v1.1.2",
)
go_repository(
name = "com_github_venafi_vcert_v4",
build_file_proto_mode = "disable",
importpath = "github.com/Venafi/vcert/v4",
sum = "h1:37gfyjS9v5YvZcIABwNPo1fAC31lIZT7glVK1vfUxk4=",
version = "v4.11.0",
)
go_repository(
name = "com_github_vmware_govmomi",
build_file_proto_mode = "disable",
importpath = "github.com/vmware/govmomi",
sum = "h1:gpw/0Ku+6RgF3jsi7fnCLmlcikBHfKBCUcu1qgc16OU=",
version = "v0.20.3",
)
go_repository(
name = "com_github_xanzy_go_gitlab",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/go-gitlab",
sum = "h1:tBm+OXv1t+KBsqlXkSDFz+YUjRM0GFsjpOWYOod3Ebs=",
version = "v0.32.0",
)
go_repository(
name = "com_github_xanzy_ssh_agent",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/ssh-agent",
sum = "h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=",
version = "v0.3.0",
)
go_repository(
name = "com_github_xdg_scram",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/scram",
sum = "h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=",
version = "v0.0.0-20180814205039-7eeb5667e42c",
)
go_repository(
name = "com_github_xdg_stringprep",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/stringprep",
sum = "h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_xeipuuv_gojsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonpointer",
sum = "h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=",
version = "v0.0.0-20180127040702-4e3ac2762d5f",
)
go_repository(
name = "com_github_xeipuuv_gojsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonreference",
sum = "h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=",
version = "v0.0.0-20180127040603-bd5ef7bd5415",
)
go_repository(
name = "com_github_xeipuuv_gojsonschema",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonschema",
sum = "h1:ngVtJC9TY/lg0AA/1k48FYhBrhRoFlEmWzsehpNAaZg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_xi2_xz",
build_file_proto_mode = "disable",
importpath = "github.com/xi2/xz",
sum = "h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=",
version = "v0.0.0-20171230120015-48954b6210f8",
)
go_repository(
name = "com_github_xiang90_probing",
build_file_proto_mode = "disable",
importpath = "github.com/xiang90/probing",
sum = "h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=",
version = "v0.0.0-20190116061207-43a291ad63a2",
)
go_repository(
name = "com_github_xlab_handysort",
build_file_proto_mode = "disable",
importpath = "github.com/xlab/handysort",
sum = "h1:j2hhcujLRHAg872RWAV5yaUrEjHEObwDv3aImCaNLek=",
version = "v0.0.0-20150421192137-fb3537ed64a1",
)
go_repository(
name = "com_github_xordataexchange_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/xordataexchange/crypt",
sum = "h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=",
version = "v0.0.3-0.20170626215501-b2862e3d0a77",
)
go_repository(
name = "com_github_yuin_goldmark",
build_file_proto_mode = "disable",
importpath = "github.com/yuin/goldmark",
sum = "h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_yvasiyarov_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/go-metrics",
sum = "h1:+lm10QQTNSBd8DVTNGHx7o/IKu9HYDvLMffDhbyLccI=",
version = "v0.0.0-20140926110328-57bccd1ccd43",
)
go_repository(
name = "com_github_yvasiyarov_gorelic",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/gorelic",
sum = "h1:hlE8//ciYMztlGpl/VA+Zm1AcTPHYkHJPbHqE6WJUXE=",
version = "v0.0.0-20141212073537-a9bba5b9ab50",
)
go_repository(
name = "com_github_yvasiyarov_newrelic_platform_go",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/newrelic_platform_go",
sum = "h1:ERexzlUfuTvpE74urLSbIQW0Z/6hF9t8U4NsJLaioAY=",
version = "v0.0.0-20140908184405-b21fdbd4370f",
)
go_repository(
name = "com_google_cloud_go",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go",
sum = "h1:DZeAkuQGQqnm9Xv36SbMJEU8aFBz4wL04UpMWPWwjzg=",
version = "v0.66.0",
)
go_repository(
name = "com_google_cloud_go_bigquery",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/bigquery",
sum = "h1:PQcPefKFdaIzjQFbiyOgAqyx8q5djaE7x9Sqe712DPA=",
version = "v1.8.0",
)
go_repository(
name = "com_google_cloud_go_datastore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/datastore",
sum = "h1:/May9ojXjRkPBNVrq+oWLqmWCkr4OU5uRY29bu0mRyQ=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_firestore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/firestore",
sum = "h1:9x7Bx0A9R5/M9jibeJeZWqjeVEIxYW9fZYqB9a70/bY=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_logging",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/logging",
sum = "h1:kaunpnoEh9L4hu6JUsBa8Y20LBfKnCuDhKUgdZp7oK8=",
version = "v1.0.0",
)
go_repository(
name = "com_google_cloud_go_pubsub",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/pubsub",
sum = "h1:76oR7VBOkL7ivoIrFKyW0k7YDCRelrlxktIzQiIUGgg=",
version = "v1.4.0",
)
go_repository(
name = "com_google_cloud_go_storage",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/storage",
sum = "h1:4y3gHptW1EHVtcPAVE0eBBlFuGqEejTTG3KdIE0lUX4=",
version = "v1.12.0",
)
go_repository(
name = "com_shuralyov_dmitri_gpu_mtl",
build_file_proto_mode = "disable",
importpath = "dmitri.shuralyov.com/gpu/mtl",
sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=",
version = "v0.0.0-20190408044501-666a987793e9",
)
go_repository(
name = "com_sourcegraph_sqs_pbtypes",
build_file_proto_mode = "disable",
importpath = "sourcegraph.com/sqs/pbtypes",
sum = "h1:f7lAwqviDEGvON4kRv0o5V7FT/IQK+tbkF664XMbP3o=",
version = "v1.0.0",
)
go_repository(
name = "com_sslmate_software_src_go_pkcs12",
build_file_proto_mode = "disable",
importpath = "software.sslmate.com/src/go-pkcs12",
sum = "h1:AVd6O+azYjVQYW1l55IqkbL8/JxjrLtO6q4FCmV8N5c=",
version = "v0.0.0-20200830195227-52f69702a001",
)
go_repository(
name = "dev_gocloud",
build_file_proto_mode = "disable",
importpath = "gocloud.dev",
sum = "h1:EDRyaRAnMGSq/QBto486gWFxMLczAfIYUmusV7XLNBM=",
version = "v0.19.0",
)
go_repository(
name = "dev_knative_caching",
build_file_proto_mode = "disable",
importpath = "knative.dev/caching",
sum = "h1:mxrur6DsVK8uIjhIq7c1OMls4YjBcRlyvnh3Vx13a0M=",
version = "v0.0.0-20200116200605-67bca2c83dfa",
)
go_repository(
name = "dev_knative_eventing_contrib",
build_file_proto_mode = "disable",
importpath = "knative.dev/eventing-contrib",
sum = "h1:xncT+JrokPG+hPUFJwue8ubPpzmziV9GUIZqYt01JDo=",
version = "v0.11.2",
)
go_repository(
name = "dev_knative_pkg",
build_file_proto_mode = "disable",
importpath = "knative.dev/pkg",
sum = "h1:NDQS+236vhwCP9oiBBGvQ5WGzbD0Y8Pcv9dtE2stg+Q=",
version = "v0.0.0-20200711004937-22502028e31a",
)
go_repository(
name = "dev_knative_test_infra",
build_file_proto_mode = "disable",
importpath = "knative.dev/test-infra",
sum = "h1:wNlGK4f5Ykqh3KLC5RlyR9kvzvRgo/LwJQNsZWGVHnU=",
version = "v0.0.0-20200707183444-aed09e56ddc7",
)
go_repository(
name = "in_gopkg_airbrake_gobrake_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/airbrake/gobrake.v2",
sum = "h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=",
version = "v2.0.9",
)
go_repository(
name = "in_gopkg_alecthomas_kingpin_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/alecthomas/kingpin.v2",
sum = "h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=",
version = "v2.2.6",
)
go_repository(
name = "in_gopkg_asn1_ber_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/asn1-ber.v1",
sum = "h1:TxyelI5cVkbREznMhfzycHdkp5cLA7DpE+GKjSslYhM=",
version = "v1.0.0-20181015200546-f715ec2f112d",
)
go_repository(
name = "in_gopkg_check_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/check.v1",
sum = "h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=",
version = "v1.0.0-20201130134442-10cb98267c6c",
)
go_repository(
name = "in_gopkg_cheggaaa_pb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/cheggaaa/pb.v1",
sum = "h1:Ev7yu1/f6+d+b3pi5vPdRPc6nNtP1umSfcWiEfRqv6I=",
version = "v1.0.25",
)
go_repository(
name = "in_gopkg_errgo_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/errgo.v2",
sum = "h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=",
version = "v2.1.0",
)
go_repository(
name = "in_gopkg_fsnotify_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/fsnotify.v1",
sum = "h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=",
version = "v1.4.7",
)
go_repository(
name = "in_gopkg_gcfg_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gcfg.v1",
sum = "h1:0HIbH907iBTAntm+88IJV2qmJALDAh8sPekI9Vc1fm0=",
version = "v1.2.0",
)
go_repository(
name = "in_gopkg_gemnasium_logrus_airbrake_hook_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gemnasium/logrus-airbrake-hook.v2",
sum = "h1:OAj3g0cR6Dx/R07QgQe8wkA9RNjB2u4i700xBkIT4e0=",
version = "v2.1.2",
)
go_repository(
name = "in_gopkg_inf_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/inf.v0",
sum = "h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=",
version = "v0.9.1",
)
go_repository(
name = "in_gopkg_ini_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/ini.v1",
sum = "h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=",
version = "v1.56.0",
)
go_repository(
name = "in_gopkg_jcmturner_aescts_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/aescts.v1",
sum = "h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_dnsutils_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/dnsutils.v1",
sum = "h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_gokrb5_v7",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/gokrb5.v7",
sum = "h1:0709Jtq/6QXEuWRfAm260XqlpcwL1vxtO1tUE2qK8Z4=",
version = "v7.3.0",
)
go_repository(
name = "in_gopkg_jcmturner_rpc_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/rpc.v1",
sum = "h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU=",
version = "v1.1.0",
)
go_repository(
name = "in_gopkg_natefinch_lumberjack_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/natefinch/lumberjack.v2",
sum = "h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=",
version = "v2.0.0",
)
go_repository(
name = "in_gopkg_resty_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/resty.v1",
sum = "h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=",
version = "v1.12.0",
)
go_repository(
name = "in_gopkg_robfig_cron_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/robfig/cron.v2",
sum = "h1:E846t8CnR+lv5nE+VuiKTDG/v1U2stad0QzddfJC7kY=",
version = "v2.0.0-20150107220207-be2e0b0deed5",
)
go_repository(
name = "in_gopkg_square_go_jose_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/square/go-jose.v2",
sum = "h1:SK5KegNXmKmqE342YYN2qPHEnUYeoMiXXl1poUlI+o4=",
version = "v2.3.1",
)
go_repository(
name = "in_gopkg_src_d_go_billy_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-billy.v4",
sum = "h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=",
version = "v4.3.2",
)
go_repository(
name = "in_gopkg_src_d_go_git_fixtures_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git-fixtures.v3",
sum = "h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=",
version = "v3.5.0",
)
go_repository(
name = "in_gopkg_src_d_go_git_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git.v4",
sum = "h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=",
version = "v4.13.1",
)
go_repository(
name = "in_gopkg_tomb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/tomb.v1",
sum = "h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=",
version = "v1.0.0-20141024135613-dd632973f1e7",
)
go_repository(
name = "in_gopkg_warnings_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/warnings.v0",
sum = "h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=",
version = "v0.1.2",
)
go_repository(
name = "in_gopkg_yaml_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v1",
sum = "h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU=",
version = "v1.0.0-20140924161607-9f9df34309c0",
)
go_repository(
name = "in_gopkg_yaml_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v2",
sum = "h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=",
version = "v2.4.0",
)
go_repository(
name = "in_gopkg_yaml_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v3",
sum = "h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=",
version = "v3.0.0-20210107192922-496545a6307b",
)
go_repository(
name = "io_etcd_go_bbolt",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/bbolt",
sum = "h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=",
version = "v1.3.5",
)
go_repository(
name = "io_etcd_go_etcd",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/etcd",
sum = "h1:1JFLBqwIgdyHN1ZtgjTBwO+blA6gVOmZurpiMEsETKo=",
version = "v0.5.0-alpha.5.0.20200910180754-dd1b699fc489",
)
go_repository(
name = "io_gitea_code_sdk_gitea",
build_file_proto_mode = "disable",
importpath = "code.gitea.io/sdk/gitea",
sum = "h1:hvDCz4wtFvo7rf5Ebj8tGd4aJ4wLPKX3BKFX9Dk1Pgs=",
version = "v0.12.0",
)
go_repository(
name = "io_k8s_api",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/api",
sum = "h1:94bbZ5NTjdINJEdzOkpS4vdPhkb1VFpTYC9zh43f75c=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiextensions_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiextensions-apiserver",
sum = "h1:AA+cnsb6w7SZ1vD32Z+zdgfXdXY8X9uGX5bN6EoPEIo=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apimachinery",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/apimachinery",
sum = "h1:Q6XuHGlj2xc+hlMCvqyYfbv3H7SRGn2c8NycxJquDVs=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiserver",
sum = "h1:wTRcid53IhxhbFt4KTrFSw8tAncfr01EP91lzfcygVg=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cli_runtime",
build_file_proto_mode = "disable",
importpath = "k8s.io/cli-runtime",
sum = "h1:wLe+osHSqcItyS3MYQXVyGFa54fppORVA8Jn7DBGSWw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_client_go",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/client-go",
replace = "k8s.io/client-go",
sum = "h1:bhblWYLZKUu+pm50plvQF8WpY6TXdRRtcS/K9WauOj4=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "k8s.io/cloud-provider",
sum = "h1:ELMIQwweSNu8gfVEnLDypxd9034S1sZJg6QcdWJOvMI=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_code_generator",
build_file_proto_mode = "disable",
importpath = "k8s.io/code-generator",
sum = "h1:jvcxHpVu5dm/LMXr3GOj/jroiP8+v2YnJE9i2OVRenk=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_component_base",
build_file_proto_mode = "disable",
importpath = "k8s.io/component-base",
sum = "h1:iLpj2btXbR326s/xNQWmPNGu0gaYSjzn7IN/5i28nQw=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_csi_translation_lib",
build_file_proto_mode = "disable",
importpath = "k8s.io/csi-translation-lib",
sum = "h1:bP9yGfCJDknP7tklCwizZtwgJNRePMVcEaFIfeA11ho=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_gengo",
build_file_proto_mode = "disable",
importpath = "k8s.io/gengo",
sum = "h1:Uusb3oh8XcdzDF/ndlI4ToKTYVlkCSJP39SRY2mfRAw=",
version = "v0.0.0-20201214224949-b6c5ce23f027",
)
go_repository(
name = "io_k8s_klog",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog",
sum = "h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=",
version = "v1.0.0",
)
go_repository(
name = "io_k8s_klog_v2",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog/v2",
sum = "h1:Q3gmuM9hKEjefWFFYF0Mat+YyFJvsUyYuwyNNJ5C9Ts=",
version = "v2.8.0",
)
go_repository(
name = "io_k8s_kube_aggregator",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-aggregator",
sum = "h1:rL4fsftMaqkKjaibArYDaBeqN41CHaJzgRJjUB9IrIg=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kube_openapi",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-openapi",
sum = "h1:vEx13qjvaZ4yfObSSXW7BrMc/KQBBT/Jyee8XtLf4x0=",
version = "v0.0.0-20210305001622-591a79e4bda7",
)
go_repository(
name = "io_k8s_kubectl",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubectl",
sum = "h1:t9uxaZzGvqc2jY96mjnPSjFHtaKOxoUegeGZdaGT6aw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kubernetes",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubernetes",
sum = "h1:wJx/r2HuPVaaBeCUk/P47GSK0eyrj3mI/kESRFBp6/A=",
version = "v1.14.7",
)
go_repository(
name = "io_k8s_legacy_cloud_providers",
build_file_proto_mode = "disable",
importpath = "k8s.io/legacy-cloud-providers",
sum = "h1:VvFqJGiYAr2gIdoNuqbeZLEdxIFeN4Yt6OLJS9l2oIE=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_metrics",
build_file_proto_mode = "disable",
importpath = "k8s.io/metrics",
sum = "h1:cKq0+Z7wg5qkK1n8dryNffKfU22DBX83JguGpR+TCk0=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_sigs_apiserver_network_proxy_konnectivity_client",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/apiserver-network-proxy/konnectivity-client",
sum = "h1:4uqm9Mv+w2MmBYD+F4qf/v6tDFUdPOk29C095RbU5mY=",
version = "v0.0.15",
)
go_repository(
name = "io_k8s_sigs_boskos",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/boskos",
sum = "h1:OCr84Jrq4HgrYxP9wrfSsGioR1VSpTZMh/RXMu5sm+8=",
version = "v0.0.0-20210730172138-093b54882439",
)
go_repository(
name = "io_k8s_sigs_controller_runtime",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-runtime",
sum = "h1:ZIZ/dtpboPSbZYY7uUz2OzrkaBTOThx2yekLtpGB+zY=",
version = "v0.9.0",
)
go_repository(
name = "io_k8s_sigs_controller_tools",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-tools",
sum = "h1:PXOHvyYAjWfO0UfQvaUo33HpXNCOilV3i/Vjc7iM1/A=",
version = "v0.2.9-0.20200414181213-645d44dca7c0",
)
go_repository(
name = "io_k8s_sigs_kustomize",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/kustomize",
sum = "h1:JUufWFNlI44MdtnjUqVnvh29rR37PQFzPbLXqhyOyX0=",
version = "v2.0.3+incompatible",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff",
sum = "h1:LOs1LZWMsz1xs77Phr/pkB4LFaavH7IVq/3+WTN9XTA=",
version = "v1.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v2",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v2",
sum = "h1:I0h4buiCqDtPztO3NOiyoNMtqSIfld49D4Wj3UBXYZA=",
version = "v2.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v3",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v3",
sum = "h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E=",
version = "v3.0.0",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v4",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v4",
sum = "h1:C4r9BgJ98vrKnnVCjwCSXcWjWe0NKcUQkmzDXZXGwH8=",
version = "v4.1.0",
)
go_repository(
name = "io_k8s_sigs_testing_frameworks",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/testing_frameworks",
sum = "h1:vK0+tvjF0BZ/RYFeZ1E6BYBwHJJXhjuZ3TdsEKH+UQM=",
version = "v0.1.2",
)
go_repository(
name = "io_k8s_sigs_yaml",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/yaml",
sum = "h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=",
version = "v1.2.0",
)
go_repository(
name = "io_k8s_test_infra",
build_file_proto_mode = "disable",
importpath = "k8s.io/test-infra",
sum = "h1:g5GLdRIOMs9vnEM/ZWG67f1Stn8cW1dER+MNK9P7Xn8=",
version = "v0.0.0-20210903101950-5c7809e9c5e9",
)
go_repository(
name = "io_k8s_utils",
build_file_proto_mode = "disable",
importpath = "k8s.io/utils",
sum = "h1:MSqsVQ3pZvPGTqCjptfimO2WjG7A9un2zcpiHkA6M/s=",
version = "v0.0.0-20210527160623-6fdb442a123b",
)
go_repository(
name = "io_opencensus_go",
build_file_proto_mode = "disable",
importpath = "go.opencensus.io",
sum = "h1:LYy1Hy3MJdrCdMwwzxA/dRok4ejH+RwNGbuoD9fCjto=",
version = "v0.22.4",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_aws",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/aws",
sum = "h1:YsbWYxDZkC7x2OxlsDEYvvEXZ3cBI3qBgUK5BqkZvRw=",
version = "v0.0.0-20181029163544-2befc13012d0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_ocagent",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/ocagent",
sum = "h1:Z1n6UAyr0QwM284yUuh5Zd8JlvxUGAhFZcgMJkMPrGM=",
version = "v0.6.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_prometheus",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/prometheus",
sum = "h1:SByaIoWwNgMdPSgl5sMqM2KDE5H/ukPWBRo314xiDvg=",
version = "v0.1.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_stackdriver",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/stackdriver",
sum = "h1:RX9W6FelAqTVnBi/bRXJLXr9n18v4QkQwZYIdnNS51I=",
version = "v0.13.1",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_zipkin",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/zipkin",
sum = "h1:PR+1zWqY8ceXs1qDQQIlgXe+sdiwCf0n32bH4+Epk8g=",
version = "v0.1.1",
)
go_repository(
name = "io_opencensus_go_contrib_integrations_ocsql",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/integrations/ocsql",
sum = "h1:kfg5Yyy1nYUrqzyfW5XX+dzMASky8IJXhtHe0KTYNS4=",
version = "v0.1.4",
)
go_repository(
name = "io_opencensus_go_contrib_resource",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/resource",
sum = "h1:4r2CANuYhKGmYWP02+5E94rLRcS/YeD+KlxSrOsMxk0=",
version = "v0.1.1",
)
go_repository(
name = "io_rsc_binaryregexp",
build_file_proto_mode = "disable",
importpath = "rsc.io/binaryregexp",
sum = "h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=",
version = "v0.2.0",
)
go_repository(
name = "io_rsc_letsencrypt",
build_file_proto_mode = "disable",
importpath = "rsc.io/letsencrypt",
sum = "h1:H7xDfhkaFFSYEJlKeq38RwX2jYcnTeHuDQyT+mMNMwM=",
version = "v0.0.3",
)
go_repository(
name = "io_rsc_quote_v3",
build_file_proto_mode = "disable",
importpath = "rsc.io/quote/v3",
sum = "h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY=",
version = "v3.1.0",
)
go_repository(
name = "io_rsc_sampler",
build_file_proto_mode = "disable",
importpath = "rsc.io/sampler",
sum = "h1:7uVkIFmeBqHfdjD+gZwtXXI+RODJ2Wc4O7MPEh/QiW4=",
version = "v1.3.0",
)
go_repository(
name = "ml_vbom_util",
build_file_proto_mode = "disable",
importpath = "vbom.ml/util",
sum = "h1:O69FD9pJA4WUZlEwYatBEEkRWKQ5cKodWpdKTrCS/iQ=",
version = "v0.0.0-20180919145318-efcd4e0f9787",
)
go_repository(
name = "org_apache_git_thrift_git",
build_file_proto_mode = "disable",
importpath = "git.apache.org/thrift.git",
sum = "h1:CMxsZlAmxKs+VAZMlDDL0wXciMblJcutQbEe3A9CYUM=",
version = "v0.12.0",
)
go_repository(
name = "org_bazil_fuse",
build_file_proto_mode = "disable",
importpath = "bazil.org/fuse",
sum = "h1:FNCRpXiquG1aoyqcIWVFmpTSKVcx2bQD38uZZeGtdlw=",
version = "v0.0.0-20180421153158-65cc252bf669",
)
go_repository(
name = "org_go4",
build_file_proto_mode = "disable",
importpath = "go4.org",
sum = "h1:iqAGo78tVOJXELHQFRjR6TMwItrvXH4hrGJ32I/NFF8=",
version = "v0.0.0-20201209231011-d4a079459e60",
)
go_repository(
name = "org_golang_google_api",
build_file_proto_mode = "disable",
importpath = "google.golang.org/api",
sum = "h1:Le77IccnTqEa8ryp9wIpX5W3zYm7Gf9LhOp9PHcwFts=",
version = "v0.32.0",
)
go_repository(
name = "org_golang_google_appengine",
build_file_proto_mode = "disable",
importpath = "google.golang.org/appengine",
sum = "h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=",
version = "v1.6.7",
)
go_repository(
name = "org_golang_google_cloud",
build_file_proto_mode = "disable",
importpath = "google.golang.org/cloud",
sum = "h1:Cpp2P6TPjujNoC5M2KHY6g7wfyLYfIWRZaSdIKfDasA=",
version = "v0.0.0-20151119220103-975617b05ea8",
)
go_repository(
name = "org_golang_google_genproto",
build_file_proto_mode = "disable",
importpath = "google.golang.org/genproto",
sum = "h1:pOwg4OoaRYScjmR4LlLgdtnyoHYTSAVhhqe5uPdpII8=",
version = "v0.0.0-20201110150050-8816d57aaa9a",
)
go_repository(
name = "org_golang_google_grpc",
build_file_proto_mode = "disable",
importpath = "google.golang.org/grpc",
sum = "h1:zWTV+LMdc3kaiJMSTOFz2UgSBgx8RNQoTGiZu3fR9S0=",
version = "v1.32.0",
)
go_repository(
name = "org_golang_google_protobuf",
build_file_proto_mode = "disable",
importpath = "google.golang.org/protobuf",
sum = "h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=",
version = "v1.26.0",
)
go_repository(
name = "org_golang_x_crypto",
build_file_proto_mode = "disable",
importpath = "golang.org/x/crypto",
sum = "h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w=",
version = "v0.0.0-20210322153248-0c34fe9e7dc2",
)
go_repository(
name = "org_golang_x_exp",
build_file_proto_mode = "disable",
importpath = "golang.org/x/exp",
sum = "h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y=",
version = "v0.0.0-20200224162631-6cc2880d07d6",
)
go_repository(
name = "org_golang_x_image",
build_file_proto_mode = "disable",
importpath = "golang.org/x/image",
sum = "h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=",
version = "v0.0.0-20190802002840-cff245a6509b",
)
go_repository(
name = "org_golang_x_lint",
build_file_proto_mode = "disable",
importpath = "golang.org/x/lint",
replace = "golang.org/x/lint",
sum = "h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI=",
version = "v0.0.0-20190409202823-959b441ac422",
)
go_repository(
name = "org_golang_x_mobile",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mobile",
sum = "h1:b373EGXtj0o+ssqkOkdVphTCZ/fVg2LwhctJn2QQbqA=",
version = "v0.0.0-20190806162312-597adff16ade",
)
go_repository(
name = "org_golang_x_mod",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mod",
sum = "h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8=",
version = "v0.4.0",
)
go_repository(
name = "org_golang_x_net",
build_file_proto_mode = "disable",
importpath = "golang.org/x/net",
sum = "h1:DzZ89McO9/gWPsQXS/FVKAlG02ZjaQ6AlZRBimEYOd0=",
version = "v0.0.0-20210428140749-89ef3d95e781",
)
go_repository(
name = "org_golang_x_oauth2",
build_file_proto_mode = "disable",
importpath = "golang.org/x/oauth2",
sum = "h1:ld7aEMNHoBnnDAX15v1T6z31v8HwR2A9FYOuAhWqkwc=",
version = "v0.0.0-20200902213428-5d25da1a8d43",
)
go_repository(
name = "org_golang_x_sync",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sync",
sum = "h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs=",
version = "v0.0.0-20201207232520-09787c993a3a",
)
go_repository(
name = "org_golang_x_sys",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sys",
sum = "h1:JWgyZ1qgdTaF3N3oxC+MdTV7qvEEgHo3otj+HB5CM7Q=",
version = "v0.0.0-20210603081109-ebe580a85c40",
)
go_repository(
name = "org_golang_x_term",
build_file_proto_mode = "disable",
importpath = "golang.org/x/term",
sum = "h1:SZxvLBoTP5yHO3Frd4z4vrF+DBX9vMVanchswa69toE=",
version = "v0.0.0-20210220032956-6a3ed077a48d",
)
go_repository(
name = "org_golang_x_text",
build_file_proto_mode = "disable",
importpath = "golang.org/x/text",
sum = "h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=",
version = "v0.3.6",
)
go_repository(
name = "org_golang_x_time",
build_file_proto_mode = "disable",
importpath = "golang.org/x/time",
sum = "h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE=",
version = "v0.0.0-20210220033141-f8bda1e9f3ba",
)
go_repository(
name = "org_golang_x_tools",
build_file_proto_mode = "disable",
importpath = "golang.org/x/tools",
sum = "h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=",
version = "v0.1.0",
)
go_repository(
name = "org_golang_x_xerrors",
build_file_proto_mode = "disable",
importpath = "golang.org/x/xerrors",
sum = "h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=",
version = "v0.0.0-20200804184101-5ec99f83aff1",
)
go_repository(
name = "org_gonum_v1_gonum",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/gonum",
sum = "h1:OB/uP/Puiu5vS5QMRPrXCDWUPb+kt8f1KW8oQzFejQw=",
version = "v0.0.0-20190331200053-3d26580ed485",
)
go_repository(
name = "org_gonum_v1_netlib",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/netlib",
sum = "h1:jRyg0XfpwWlhEV8mDfdNGBeSJM2fuyh9Yjrnd8kF2Ts=",
version = "v0.0.0-20190331212654-76723241ea4e",
)
go_repository(
name = "org_modernc_cc",
build_file_proto_mode = "disable",
importpath = "modernc.org/cc",
sum = "h1:nPibNuDEx6tvYrUAtvDTTw98rx5juGsa5zuDnKwEEQQ=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_golex",
build_file_proto_mode = "disable",
importpath = "modernc.org/golex",
sum = "h1:wWpDlbK8ejRfSyi0frMyhilD3JBvtcx2AdGDnU+JtsE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_mathutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/mathutil",
sum = "h1:93vKjrJopTPrtTNpZ8XIovER7iCIH1QU7wNbOQXC60I=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_strutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/strutil",
sum = "h1:XVFtQwFVwc02Wk+0L/Z/zDDXO81r5Lhe6iMKmGX3KhE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_xc",
build_file_proto_mode = "disable",
importpath = "modernc.org/xc",
sum = "h1:7ccXrupWZIS3twbUGrtKmHS2DXY6xegFua+6O3xgAFU=",
version = "v1.0.0",
)
go_repository(
name = "org_mongodb_go_mongo_driver",
build_file_proto_mode = "disable",
importpath = "go.mongodb.org/mongo-driver",
sum = "h1:jxcFYjlkl8xaERsgLo+RNquI0epW6zuy/ZRQs6jnrFA=",
version = "v1.1.2",
)
go_repository(
name = "org_uber_go_atomic",
build_file_proto_mode = "disable",
importpath = "go.uber.org/atomic",
sum = "h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=",
version = "v1.7.0",
)
go_repository(
name = "org_uber_go_goleak",
build_file_proto_mode = "disable",
importpath = "go.uber.org/goleak",
sum = "h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=",
version = "v1.1.10",
)
go_repository(
name = "org_uber_go_multierr",
build_file_proto_mode = "disable",
importpath = "go.uber.org/multierr",
sum = "h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=",
version = "v1.6.0",
)
go_repository(
name = "org_uber_go_tools",
build_file_proto_mode = "disable",
importpath = "go.uber.org/tools",
sum = "h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4=",
version = "v0.0.0-20190618225709-2cfd321de3ee",
)
go_repository(
name = "org_uber_go_zap",
build_file_proto_mode = "disable",
importpath = "go.uber.org/zap",
sum = "h1:MTjgFu6ZLKvY6Pvaqk97GlxNBuMpV4Hy/3P6tRGlI2U=",
version = "v1.17.0",
)
go_repository(
name = "sh_helm_helm_v3",
build_file_proto_mode = "disable",
importpath = "helm.sh/helm/v3",
sum = "h1:aykwPMVyQyncZ8iLNVMXgJ1l3c6W0+LSOPmqp8JdCjs=",
version = "v3.1.1",
)
go_repository(
name = "tools_gotest",
build_file_proto_mode = "disable",
importpath = "gotest.tools",
sum = "h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "tools_gotest_v3",
build_file_proto_mode = "disable",
importpath = "gotest.tools/v3",
sum = "h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=",
version = "v3.0.3",
)
go_repository(
name = "xyz_gomodules_jsonpatch_v2",
build_file_proto_mode = "disable",
importpath = "gomodules.xyz/jsonpatch/v2",
sum = "h1:4pT439QV83L+G9FkcCriY6EkpcK6r6bK+A5FBUMI7qY=",
version = "v2.2.0",
)
| 38.474667 | 81 | 0.641536 | load("@bazel_gazelle//:deps.bzl", "go_repository")
def go_deps():
go_repository(
name = "ag_pack_amqp",
build_file_proto_mode = "disable",
importpath = "pack.ag/amqp",
sum = "h1:cuNDWLUTbKRtEZwhB0WQBXf9pGbm87pUBXQhvcFxBWg=",
version = "v0.11.2",
)
go_repository(
name = "cc_mvdan_interfacer",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/interfacer",
sum = "h1:WX1yoOaKQfddO/mLzdV4wptyWgoH/6hwLs7QHTixo0I=",
version = "v0.0.0-20180901003855-c20040233aed",
)
go_repository(
name = "cc_mvdan_lint",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/lint",
sum = "h1:DxJ5nJdkhDlLok9K6qO+5290kphDJbHOQO1DFFFTeBo=",
version = "v0.0.0-20170908181259-adc824a0674b",
)
go_repository(
name = "cc_mvdan_unparam",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/unparam",
sum = "h1:kAREL6MPwpsk1/PQPFD3Eg7WAQR5mPTWZJaBiG5LDbY=",
version = "v0.0.0-20200501210554-b37ab49443f7",
)
go_repository(
name = "cc_mvdan_xurls_v2",
build_file_proto_mode = "disable",
importpath = "mvdan.cc/xurls/v2",
sum = "h1:r1zSOSNS/kqtpmATyMMMvaZ4/djsesbYz5kr0+qMRWc=",
version = "v2.0.0",
)
go_repository(
name = "co_honnef_go_tools",
build_file_proto_mode = "disable",
importpath = "honnef.co/go/tools",
sum = "h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=",
version = "v0.0.1-2020.1.4",
)
go_repository(
name = "com_github_agnivade_levenshtein",
build_file_proto_mode = "disable",
importpath = "github.com/agnivade/levenshtein",
sum = "h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_alcortesm_tgz",
build_file_proto_mode = "disable",
importpath = "github.com/alcortesm/tgz",
sum = "h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=",
version = "v0.0.0-20161220082320-9c5fe88206d7",
)
go_repository(
name = "com_github_alecthomas_kingpin",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/kingpin",
sum = "h1:5svnBTFgJjZvGKyYBtMB0+m5wvrbUHiqye8wRJMlnYI=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_alecthomas_template",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/template",
sum = "h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=",
version = "v0.0.0-20190718012654-fb15b899a751",
)
go_repository(
name = "com_github_alecthomas_units",
build_file_proto_mode = "disable",
importpath = "github.com/alecthomas/units",
sum = "h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E=",
version = "v0.0.0-20190924025748-f65c72e2690d",
)
go_repository(
name = "com_github_andreyvit_diff",
build_file_proto_mode = "disable",
importpath = "github.com/andreyvit/diff",
sum = "h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=",
version = "v0.0.0-20170406064948-c7f18ee00883",
)
go_repository(
name = "com_github_andybalholm_brotli",
build_file_proto_mode = "disable",
importpath = "github.com/andybalholm/brotli",
sum = "h1:bZ28Hqta7TFAK3Q08CMvv8y3/8ATaEqv2nGoc6yff6c=",
version = "v0.0.0-20190621154722-5f990b63d2d6",
)
go_repository(
name = "com_github_andygrunwald_go_gerrit",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-gerrit",
sum = "h1:7gokoTWteZhP1t2f0OzrFFXlyL8o0+b0r4ZaRV9PXOs=",
version = "v0.0.0-20210709065208-9d38b0be0268",
)
go_repository(
name = "com_github_andygrunwald_go_jira",
build_file_proto_mode = "disable",
importpath = "github.com/andygrunwald/go-jira",
sum = "h1:vvIImGgX32bHfoiyUwkNo+/YrPnRczNarvhLOncP6dE=",
version = "v1.13.0",
)
go_repository(
name = "com_github_anmitsu_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/anmitsu/go-shlex",
sum = "h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=",
version = "v0.0.0-20161002113705-648efa622239",
)
go_repository(
name = "com_github_antihax_optional",
build_file_proto_mode = "disable",
importpath = "github.com/antihax/optional",
sum = "h1:uZuxRZCz65cG1o6K/xUqImNcYKtmk9ylqaH0itMSvzA=",
version = "v0.0.0-20180407024304-ca021399b1a6",
)
go_repository(
name = "com_github_apache_thrift",
build_file_proto_mode = "disable",
importpath = "github.com/apache/thrift",
sum = "h1:pODnxUFNcjP9UTLZGTdeh+j16A8lJbRvD3rOtrk/7bs=",
version = "v0.12.0",
)
go_repository(
name = "com_github_apex_log",
build_file_proto_mode = "disable",
importpath = "github.com/apex/log",
sum = "h1:1fyfbPvUwD10nMoh3hY6MXzvZShJQn9/ck7ATgAt5pA=",
version = "v1.3.0",
)
go_repository(
name = "com_github_apex_logs",
build_file_proto_mode = "disable",
importpath = "github.com/apex/logs",
sum = "h1:KmEBVwfDUOTFcBO8cfkJYwdQ5487UZSN+GteOGPmiro=",
version = "v0.0.4",
)
go_repository(
name = "com_github_aphistic_golf",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/golf",
sum = "h1:2KLQMJ8msqoPHIPDufkxVcoTtcmE5+1sL9950m4R9Pk=",
version = "v0.0.0-20180712155816-02c07f170c5a",
)
go_repository(
name = "com_github_aphistic_sweet",
build_file_proto_mode = "disable",
importpath = "github.com/aphistic/sweet",
sum = "h1:I4z+fAUqvKfvZV/CHi5dV0QuwbmIvYYFDjG0Ss5QpAs=",
version = "v0.2.0",
)
go_repository(
name = "com_github_armon_circbuf",
build_file_proto_mode = "disable",
importpath = "github.com/armon/circbuf",
sum = "h1:QEF07wC0T1rKkctt1RINW/+RMTVmiwxETico2l3gxJA=",
version = "v0.0.0-20150827004946-bbbad097214e",
)
go_repository(
name = "com_github_armon_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/armon/consul-api",
sum = "h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=",
version = "v0.0.0-20180202201655-eb2c6b5be1b6",
)
go_repository(
name = "com_github_armon_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-metrics",
sum = "h1:8GUt8eRujhVEGZFFEjBj46YV4rDjvGrNxb0KMWYkL2I=",
version = "v0.0.0-20180917152333-f0300d1749da",
)
go_repository(
name = "com_github_armon_go_radix",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-radix",
sum = "h1:BUAU3CGlLvorLI26FmByPp2eC2qla6E1Tw+scpcg/to=",
version = "v0.0.0-20180808171621-7fddfc383310",
)
go_repository(
name = "com_github_armon_go_socks5",
build_file_proto_mode = "disable",
importpath = "github.com/armon/go-socks5",
sum = "h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio=",
version = "v0.0.0-20160902184237-e75332964ef5",
)
go_repository(
name = "com_github_asaskevich_govalidator",
build_file_proto_mode = "disable",
importpath = "github.com/asaskevich/govalidator",
sum = "h1:zV3ejI06GQ59hwDQAvmK1qxOQGB3WuVTRoY0okPTAv0=",
version = "v0.0.0-20200108200545-475eaeb16496",
)
go_repository(
name = "com_github_aws_aws_k8s_tester",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-k8s-tester",
sum = "h1:Zr5NWiRK5fhmRIlhrsTwrY8yB488FyN6iulci2D7VaI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_aws_aws_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/aws/aws-sdk-go",
sum = "h1:cyZp8TvUbH9rrShdrwULtCj4pB5szddrw9aKHUsw1Ic=",
version = "v1.37.22",
)
go_repository(
name = "com_github_aybabtme_rgbterm",
build_file_proto_mode = "disable",
importpath = "github.com/aybabtme/rgbterm",
sum = "h1:WWB576BN5zNSZc/M9d/10pqEx5VHNhaQ/yOVAkmj5Yo=",
version = "v0.0.0-20170906152045-cc83f3b3ce59",
)
go_repository(
name = "com_github_azure_azure_amqp_common_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-amqp-common-go/v2",
sum = "h1:+QbFgmWCnPzdaRMfsI0Yb6GrRdBj5jVL8N3EXuEUcBQ=",
version = "v2.1.0",
)
go_repository(
name = "com_github_azure_azure_pipeline_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-pipeline-go",
sum = "h1:6oiIS9yaG6XCCzhgAgKFfIWyo4LLCiDhZot6ltoThhY=",
version = "v0.2.2",
)
go_repository(
name = "com_github_azure_azure_sdk_for_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-sdk-for-go",
sum = "h1:m4oQOm3HXtQh2Ipata+pLSS1kGUD/7ikkvNq81XM/7s=",
version = "v46.3.0+incompatible",
)
go_repository(
name = "com_github_azure_azure_service_bus_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-service-bus-go",
sum = "h1:G1qBLQvHCFDv9pcpgwgFkspzvnGknJRR0PYJ9ytY/JA=",
version = "v0.9.1",
)
go_repository(
name = "com_github_azure_azure_storage_blob_go",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/azure-storage-blob-go",
sum = "h1:53qhf0Oxa0nOjgbDeeYPUeyiNmafAFEY95rZLK0Tj6o=",
version = "v0.8.0",
)
go_repository(
name = "com_github_azure_go_ansiterm",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-ansiterm",
sum = "h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8=",
version = "v0.0.0-20170929234023-d6e3b3328b78",
)
go_repository(
name = "com_github_azure_go_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest",
replace = "github.com/Azure/go-autorest",
sum = "h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=",
version = "v14.2.0+incompatible",
)
go_repository(
name = "com_github_azure_go_autorest_autorest",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest",
sum = "h1:gI8ytXbxMfI+IVbI9mP2JGCTXIuhHLgRlvQ9X4PsnHE=",
version = "v0.11.12",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_adal",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/adal",
sum = "h1:Y3bBUV4rTuxenJJs41HU3qmqsb+auo+a3Lz+PlJPpL0=",
version = "v0.9.5",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_auth",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/auth",
sum = "h1:iM6UAvjR97ZIeR93qTcwpKNMpV+/FTWjwEbuPD495Tk=",
version = "v0.4.2",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_azure_cli",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/azure/cli",
sum = "h1:LXl088ZQlP0SBppGFsRZonW6hSvwgL5gRByMbvUbx8U=",
version = "v0.3.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_date",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/date",
sum = "h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_mocks",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/mocks",
sum = "h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=",
version = "v0.4.1",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_to",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/to",
sum = "h1:oXVqrxakqqV1UZdSazDOPOLvOIz+XA683u8EctwboHk=",
version = "v0.4.0",
)
go_repository(
name = "com_github_azure_go_autorest_autorest_validation",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/autorest/validation",
sum = "h1:3I9AAI63HfcLtphd9g39ruUwRI+Ca+z/f36KHPFRUss=",
version = "v0.3.0",
)
go_repository(
name = "com_github_azure_go_autorest_logger",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/logger",
sum = "h1:e4RVHVZKC5p6UANLJHkM4OfR1UKZPj8Wt8Pcx+3oqrE=",
version = "v0.2.0",
)
go_repository(
name = "com_github_azure_go_autorest_tracing",
build_file_proto_mode = "disable",
importpath = "github.com/Azure/go-autorest/tracing",
sum = "h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=",
version = "v0.6.0",
)
go_repository(
name = "com_github_beorn7_perks",
build_file_proto_mode = "disable",
importpath = "github.com/beorn7/perks",
sum = "h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=",
version = "v1.0.1",
)
go_repository(
name = "com_github_bgentry_speakeasy",
build_file_proto_mode = "disable",
importpath = "github.com/bgentry/speakeasy",
sum = "h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_bitly_go_simplejson",
build_file_proto_mode = "disable",
importpath = "github.com/bitly/go-simplejson",
sum = "h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=",
version = "v0.5.0",
)
go_repository(
name = "com_github_bketelsen_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/bketelsen/crypt",
sum = "h1:+0HFd5KSZ/mm3JmhmrDukiId5iR6w4+BdFtfSy4yWIc=",
version = "v0.0.3-0.20200106085610-5cbc8cc4026c",
)
go_repository(
name = "com_github_blakesmith_ar",
build_file_proto_mode = "disable",
importpath = "github.com/blakesmith/ar",
sum = "h1:m935MPodAbYS46DG4pJSv7WO+VECIWUQ7OJYSoTrMh4=",
version = "v0.0.0-20190502131153-809d4375e1fb",
)
go_repository(
name = "com_github_blang_semver",
build_file_proto_mode = "disable",
importpath = "github.com/blang/semver",
sum = "h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ=",
version = "v3.5.1+incompatible",
)
go_repository(
name = "com_github_bmizerany_assert",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/assert",
sum = "h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=",
version = "v0.0.0-20160611221934-b7ed37b82869",
)
go_repository(
name = "com_github_bmizerany_perks",
build_file_proto_mode = "disable",
importpath = "github.com/bmizerany/perks",
sum = "h1:AP/Y7sqYicnjGDfD5VcY4CIfh1hRXBUavxrvELjTiOE=",
version = "v0.0.0-20141205001514-d9a9656a3a4b",
)
go_repository(
name = "com_github_bombsimon_wsl_v2",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v2",
sum = "h1:/DdSteYCq4lPX+LqDg7mdoxm14UxzZPoDT0taYc3DTU=",
version = "v2.2.0",
)
go_repository(
name = "com_github_bombsimon_wsl_v3",
build_file_proto_mode = "disable",
importpath = "github.com/bombsimon/wsl/v3",
sum = "h1:E5SRssoBgtVFPcYWUOFJEcgaySgdtTNYzsSKDOY7ss8=",
version = "v3.1.0",
)
go_repository(
name = "com_github_bshuster_repo_logrus_logstash_hook",
build_file_proto_mode = "disable",
importpath = "github.com/bshuster-repo/logrus-logstash-hook",
sum = "h1:pgAtgj+A31JBVtEHu2uHuEx0n+2ukqUJnS2vVe5pQNA=",
version = "v0.4.1",
)
go_repository(
name = "com_github_bugsnag_bugsnag_go",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/bugsnag-go",
sum = "h1:rFt+Y/IK1aEZkEHchZRSq9OQbsSzIT/OrI8YFFmRIng=",
version = "v0.0.0-20141110184014-b1d153021fcd",
)
go_repository(
name = "com_github_bugsnag_osext",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/osext",
sum = "h1:otBG+dV+YK+Soembjv71DPz3uX/V/6MMlSyD9JBQ6kQ=",
version = "v0.0.0-20130617224835-0dd3f918b21b",
)
go_repository(
name = "com_github_bugsnag_panicwrap",
build_file_proto_mode = "disable",
importpath = "github.com/bugsnag/panicwrap",
sum = "h1:nvj0OLI3YqYXer/kZD8Ri1aaunCxIEsOst1BVJswV0o=",
version = "v0.0.0-20151223152923-e2c28503fcd0",
)
go_repository(
name = "com_github_burntsushi_toml",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/toml",
sum = "h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=",
version = "v0.3.1",
)
go_repository(
name = "com_github_burntsushi_xgb",
build_file_proto_mode = "disable",
importpath = "github.com/BurntSushi/xgb",
sum = "h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=",
version = "v0.0.0-20160522181843-27f122750802",
)
go_repository(
name = "com_github_bwmarrin_snowflake",
build_file_proto_mode = "disable",
importpath = "github.com/bwmarrin/snowflake",
sum = "h1:dRbqXFjM10uA3wdrVZ8Kh19uhciRMOroUYJ7qAqDLhY=",
version = "v0.0.0",
)
go_repository(
name = "com_github_caarlos0_ctrlc",
build_file_proto_mode = "disable",
importpath = "github.com/caarlos0/ctrlc",
sum = "h1:2DtF8GSIcajgffDFJzyG15vO+1PuBWOMUdFut7NnXhw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_campoy_unique",
build_file_proto_mode = "disable",
importpath = "github.com/campoy/unique",
sum = "h1:V9a67dfYqPLAvzk5hMQOXYJlZ4SLIXgyKIE+ZiHzgGQ=",
version = "v0.0.0-20180121183637-88950e537e7e",
)
go_repository(
name = "com_github_cavaliercoder_go_cpio",
build_file_proto_mode = "disable",
importpath = "github.com/cavaliercoder/go-cpio",
sum = "h1:hHg27A0RSSp2Om9lubZpiMgVbvn39bsUmW9U5h0twqc=",
version = "v0.0.0-20180626203310-925f9528c45e",
)
go_repository(
name = "com_github_census_instrumentation_opencensus_proto",
build_file_proto_mode = "disable",
importpath = "github.com/census-instrumentation/opencensus-proto",
sum = "h1:glEXhBS5PSLLv4IXzLA5yPRVX4bilULVyxxbrfOtDAk=",
version = "v0.2.1",
)
go_repository(
name = "com_github_cespare_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash",
sum = "h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=",
version = "v1.1.0",
)
go_repository(
name = "com_github_cespare_xxhash_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cespare/xxhash/v2",
sum = "h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=",
version = "v2.1.1",
)
go_repository(
name = "com_github_chai2010_gettext_go",
build_file_proto_mode = "disable",
importpath = "github.com/chai2010/gettext-go",
sum = "h1:7aWHqerlJ41y6FOsEUvknqgXnGmJyJSbjhAWq5pO4F8=",
version = "v0.0.0-20160711120539-c6fed771bfd5",
)
go_repository(
name = "com_github_chzyer_logex",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/logex",
sum = "h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=",
version = "v1.1.10",
)
go_repository(
name = "com_github_chzyer_readline",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/readline",
sum = "h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=",
version = "v0.0.0-20180603132655-2972be24d48e",
)
go_repository(
name = "com_github_chzyer_test",
build_file_proto_mode = "disable",
importpath = "github.com/chzyer/test",
sum = "h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=",
version = "v0.0.0-20180213035817-a1ea475d72b1",
)
go_repository(
name = "com_github_cihub_seelog",
build_file_proto_mode = "disable",
importpath = "github.com/cihub/seelog",
sum = "h1:kHaBemcxl8o/pQ5VM1c8PVE1PubbNx3mjUr09OqWGCs=",
version = "v0.0.0-20170130134532-f561c5e57575",
)
go_repository(
name = "com_github_clarketm_json",
build_file_proto_mode = "disable",
importpath = "github.com/clarketm/json",
sum = "h1:0JketcMdLC16WGnRGJiNmTXuQznDEQaiknxSPRBxg+k=",
version = "v1.13.4",
)
go_repository(
name = "com_github_client9_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/client9/misspell",
sum = "h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=",
version = "v0.3.4",
)
go_repository(
name = "com_github_cloudevents_sdk_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go",
sum = "h1:gS5I0s2qPmdc4GBPlUmzZU7RH30BaiOdcRJ1RkXnPrc=",
version = "v1.0.0",
)
go_repository(
name = "com_github_cloudevents_sdk_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cloudevents/sdk-go/v2",
sum = "h1:AUdGJwaSUnA+VvepKqgjy6XDkPcf0hf/3L7icEs1ibs=",
version = "v2.0.0",
)
go_repository(
name = "com_github_cloudflare_cloudflare_go",
build_file_proto_mode = "disable",
importpath = "github.com/cloudflare/cloudflare-go",
sum = "h1:bhMGoNhAg21DuqJjU9jQepRRft6vYfo6pejT3NN4V6A=",
version = "v0.13.2",
)
go_repository(
name = "com_github_cncf_udpa_go",
build_file_proto_mode = "disable",
importpath = "github.com/cncf/udpa/go",
sum = "h1:WBZRG4aNOuI15bLRrCgN8fCq8E5Xuty6jGbmSNEvSsU=",
version = "v0.0.0-20191209042840-269d4d468f6f",
)
go_repository(
name = "com_github_cockroachdb_datadriven",
build_file_proto_mode = "disable",
importpath = "github.com/cockroachdb/datadriven",
sum = "h1:OaNxuTZr7kxeODyLWsRMC+OD03aFUH+mW6r2d+MWa5Y=",
version = "v0.0.0-20190809214429-80d97fb3cbaa",
)
go_repository(
name = "com_github_containerd_cgroups",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/cgroups",
sum = "h1:tSNMc+rJDfmYntojat8lljbt1mgKNpTxUZJsSzJ9Y1s=",
version = "v0.0.0-20190919134610-bf292b21730f",
)
go_repository(
name = "com_github_containerd_console",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/console",
sum = "h1:uict5mhHFTzKLUCufdSLym7z/J0CbBJT59lYbP9wtbg=",
version = "v0.0.0-20180822173158-c12b1e7919c1",
)
go_repository(
name = "com_github_containerd_containerd",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/containerd",
sum = "h1:LoIzb5y9x5l8VKAlyrbusNPXqBY0+kviRloxFUMFwKc=",
version = "v1.3.3",
)
go_repository(
name = "com_github_containerd_continuity",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/continuity",
sum = "h1:kIFnQBO7rQ0XkMe6xEwbybYHBEaWmh/f++laI6Emt7M=",
version = "v0.0.0-20200107194136-26c1120b8d41",
)
go_repository(
name = "com_github_containerd_fifo",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/fifo",
sum = "h1:PUD50EuOMkXVcpBIA/R95d56duJR9VxhwncsFbNnxW4=",
version = "v0.0.0-20190226154929-a9fb20d87448",
)
go_repository(
name = "com_github_containerd_go_runc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/go-runc",
sum = "h1:esQOJREg8nw8aXj6uCN5dfW5cKUBiEJ/+nni1Q/D/sw=",
version = "v0.0.0-20180907222934-5a6d9f37cfa3",
)
go_repository(
name = "com_github_containerd_ttrpc",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/ttrpc",
sum = "h1:dlfGmNcE3jDAecLqwKPMNX6nk2qh1c1Vg1/YTzpOOF4=",
version = "v0.0.0-20190828154514-0e0f228740de",
)
go_repository(
name = "com_github_containerd_typeurl",
build_file_proto_mode = "disable",
importpath = "github.com/containerd/typeurl",
sum = "h1:JNn81o/xG+8NEo3bC/vx9pbi/g2WI8mtP2/nXzu297Y=",
version = "v0.0.0-20180627222232-a93fcdb778cd",
)
go_repository(
name = "com_github_coreos_bbolt",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/bbolt",
sum = "h1:n6AiVyVRKQFNb6mJlwESEvvLoDyiTzXX7ORAUlkeBdY=",
version = "v1.3.3",
)
go_repository(
name = "com_github_coreos_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/etcd",
sum = "h1:f/Z3EoDSx1yjaIjLQGo1diYUlQYSBrrAQ5vP8NjwXwo=",
version = "v3.3.17+incompatible",
)
go_repository(
name = "com_github_coreos_go_etcd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-etcd",
sum = "h1:bXhRBIXoTm9BYHS3gE0TtQuyNZyeEMux2sDi4oo5YOo=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_oidc",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-oidc",
sum = "h1:sdJrfw8akMnCuUlaZU3tE/uYXFgfqom8DBE9so9EBsM=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_coreos_go_semver",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-semver",
sum = "h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=",
version = "v0.3.0",
)
go_repository(
name = "com_github_coreos_go_systemd",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/go-systemd",
sum = "h1:Wf6HqHfScWJN9/ZjdUKyjop4mf3Qdd+1TvvltAvM3m8=",
version = "v0.0.0-20190321100706-95778dfbb74e",
)
go_repository(
name = "com_github_coreos_pkg",
build_file_proto_mode = "disable",
importpath = "github.com/coreos/pkg",
sum = "h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=",
version = "v0.0.0-20180928190104-399ea9e2e55f",
)
go_repository(
name = "com_github_cpu_goacmedns",
build_file_proto_mode = "disable",
importpath = "github.com/cpu/goacmedns",
sum = "h1:QOeMpIEsIdm1LSASSswjaTf8CXmzcrgy5OeCfHjppA4=",
version = "v0.0.3",
)
go_repository(
name = "com_github_cpuguy83_go_md2man",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man",
sum = "h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=",
version = "v1.0.10",
)
go_repository(
name = "com_github_cpuguy83_go_md2man_v2",
build_file_proto_mode = "disable",
importpath = "github.com/cpuguy83/go-md2man/v2",
sum = "h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM=",
version = "v2.0.0",
)
go_repository(
name = "com_github_creack_pty",
build_file_proto_mode = "disable",
importpath = "github.com/creack/pty",
sum = "h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw=",
version = "v1.1.11",
)
go_repository(
name = "com_github_cyphar_filepath_securejoin",
build_file_proto_mode = "disable",
importpath = "github.com/cyphar/filepath-securejoin",
sum = "h1:jCwT2GTP+PY5nBz3c/YL5PAIbusElVrPujOBSCj8xRg=",
version = "v0.2.2",
)
go_repository(
name = "com_github_datadog_zstd",
build_file_proto_mode = "disable",
importpath = "github.com/DataDog/zstd",
sum = "h1:3oxKN3wbHibqx897utPC2LTQU4J+IHWWJO+glkAkpFM=",
version = "v1.4.1",
)
go_repository(
name = "com_github_davecgh_go_spew",
build_file_proto_mode = "disable",
importpath = "github.com/davecgh/go-spew",
sum = "h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=",
version = "v1.1.1",
)
go_repository(
name = "com_github_daviddengcn_go_colortext",
build_file_proto_mode = "disable",
importpath = "github.com/daviddengcn/go-colortext",
sum = "h1:uVsMphB1eRx7xB1njzL3fuMdWRN8HtVzoUOItHMwv5c=",
version = "v0.0.0-20160507010035-511bcaf42ccd",
)
go_repository(
name = "com_github_deislabs_oras",
build_file_proto_mode = "disable",
importpath = "github.com/deislabs/oras",
sum = "h1:If674KraJVpujYR00rzdi0QAmW4BxzMJPVAZJKuhQ0c=",
version = "v0.8.1",
)
go_repository(
name = "com_github_denisenkom_go_mssqldb",
build_file_proto_mode = "disable",
importpath = "github.com/denisenkom/go-mssqldb",
sum = "h1:83Wprp6ROGeiHFAP8WJdI2RoxALQYgdllERc3N5N2DM=",
version = "v0.0.0-20191124224453-732737034ffd",
)
go_repository(
name = "com_github_denverdino_aliyungo",
build_file_proto_mode = "disable",
importpath = "github.com/denverdino/aliyungo",
sum = "h1:p6poVbjHDkKa+wtC8frBMwQtT3BmqGYBjzMwJ63tuR4=",
version = "v0.0.0-20190125010748-a747050bb1ba",
)
go_repository(
name = "com_github_devigned_tab",
build_file_proto_mode = "disable",
importpath = "github.com/devigned/tab",
sum = "h1:3mD6Kb1mUOYeLpJvTVSDwSg5ZsfSxfvxGRTxRsJsITA=",
version = "v0.1.1",
)
go_repository(
name = "com_github_dgrijalva_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go",
sum = "h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=",
version = "v3.2.0+incompatible",
)
go_repository(
name = "com_github_dgrijalva_jwt_go_v4",
build_file_proto_mode = "disable",
importpath = "github.com/dgrijalva/jwt-go/v4",
sum = "h1:CaO/zOnF8VvUfEbhRatPcwKVWamvbYd8tQGRWacE9kU=",
version = "v4.0.0-preview1",
)
go_repository(
name = "com_github_dgryski_go_gk",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-gk",
sum = "h1:XOPLOMn/zT4jIgxfxSsoXPxkrzz0FaCHwp33x5POJ+Q=",
version = "v0.0.0-20200319235926-a69029f61654",
)
go_repository(
name = "com_github_dgryski_go_sip13",
build_file_proto_mode = "disable",
importpath = "github.com/dgryski/go-sip13",
sum = "h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=",
version = "v0.0.0-20181026042036-e10d5fee7954",
)
go_repository(
name = "com_github_digitalocean_godo",
build_file_proto_mode = "disable",
importpath = "github.com/digitalocean/godo",
sum = "h1:IMElzMUpO1dVR8qjSg53+5vDkOLzMbhJt4yTAq7NGCQ=",
version = "v1.44.0",
)
go_repository(
name = "com_github_dimchansky_utfbom",
build_file_proto_mode = "disable",
importpath = "github.com/dimchansky/utfbom",
sum = "h1:FcM3g+nofKgUteL8dm/UpdRXNC9KmADgTpLKsu0TRo4=",
version = "v1.1.0",
)
go_repository(
name = "com_github_djarvur_go_err113",
build_file_proto_mode = "disable",
importpath = "github.com/Djarvur/go-err113",
sum = "h1:uCRZZOdMQ0TZPHYTdYpoC0bLYJKPEHPUJ8MeAa51lNU=",
version = "v0.1.0",
)
go_repository(
name = "com_github_djherbis_atime",
build_file_proto_mode = "disable",
importpath = "github.com/djherbis/atime",
sum = "h1:ySLvBAM0EvOGaX7TI4dAM5lWj+RdJUCKtGSEHN8SGBg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_dnaeon_go_vcr",
build_file_proto_mode = "disable",
importpath = "github.com/dnaeon/go-vcr",
sum = "h1:r8L/HqC0Hje5AXMu1ooW8oyQyOFv4GxqpL0nRP7SLLY=",
version = "v1.0.1",
)
go_repository(
name = "com_github_docker_cli",
build_file_proto_mode = "disable",
importpath = "github.com/docker/cli",
sum = "h1:AbI1uj9w4yt6TvfKHfRu7G55KuQe7NCvWPQRKDoXggE=",
version = "v0.0.0-20200210162036-a4bedce16568",
)
go_repository(
name = "com_github_docker_distribution",
build_file_proto_mode = "disable",
importpath = "github.com/docker/distribution",
sum = "h1:a5mlkVzth6W5A4fOsS3D2EO5BUmsJpcB+cRlLU7cSug=",
version = "v2.7.1+incompatible",
)
go_repository(
name = "com_github_docker_docker",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker",
sum = "h1:IkZjBSIc8hBjLpqeAbeE5mca5mNgeatLHBy3GO78BWo=",
version = "v1.13.1",
)
go_repository(
name = "com_github_docker_docker_credential_helpers",
build_file_proto_mode = "disable",
importpath = "github.com/docker/docker-credential-helpers",
sum = "h1:zI2p9+1NQYdnG6sMU26EX4aVGlqbInSQxQXLvzJ4RPQ=",
version = "v0.6.3",
)
go_repository(
name = "com_github_docker_go_connections",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-connections",
sum = "h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-metrics",
sum = "h1:yWHOI+vFjEsAakUTSrtqc/SAHrhSkmn48pqjidZX3QA=",
version = "v0.0.0-20180209012529-399ea8c73916",
)
go_repository(
name = "com_github_docker_go_units",
build_file_proto_mode = "disable",
importpath = "github.com/docker/go-units",
sum = "h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw=",
version = "v0.4.0",
)
go_repository(
name = "com_github_docker_libtrust",
build_file_proto_mode = "disable",
importpath = "github.com/docker/libtrust",
sum = "h1:ZClxb8laGDf5arXfYcAtECDFgAgHklGI8CxgjHnXKJ4=",
version = "v0.0.0-20150114040149-fa567046d9b1",
)
go_repository(
name = "com_github_docker_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/docker/spdystream",
sum = "h1:cenwrSVm+Z7QLSV/BsnenAOcDXdX4cMv4wP0B/5QbPg=",
version = "v0.0.0-20160310174837-449fdfce4d96",
)
go_repository(
name = "com_github_docopt_docopt_go",
build_file_proto_mode = "disable",
importpath = "github.com/docopt/docopt-go",
sum = "h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=",
version = "v0.0.0-20180111231733-ee0de3bc6815",
)
go_repository(
name = "com_github_dsnet_compress",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/compress",
sum = "h1:PlZu0n3Tuv04TzpfPbrnI0HW/YwodEXDS+oPKahKF0Q=",
version = "v0.0.1",
)
go_repository(
name = "com_github_dsnet_golib",
build_file_proto_mode = "disable",
importpath = "github.com/dsnet/golib",
sum = "h1:tFh1tRc4CA31yP6qDcu+Trax5wW5GuMxvkIba07qVLY=",
version = "v0.0.0-20171103203638-1ea166775780",
)
go_repository(
name = "com_github_dustin_go_humanize",
build_file_proto_mode = "disable",
importpath = "github.com/dustin/go-humanize",
sum = "h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_eapache_go_resiliency",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-resiliency",
sum = "h1:v7g92e/KSN71Rq7vSThKaWIq68fL4YHvWyiUKorFR1Q=",
version = "v1.2.0",
)
go_repository(
name = "com_github_eapache_go_xerial_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/go-xerial-snappy",
sum = "h1:YEetp8/yCZMuEPMUDHG0CW/brkkEp8mzqk2+ODEitlw=",
version = "v0.0.0-20180814174437-776d5712da21",
)
go_repository(
name = "com_github_eapache_queue",
build_file_proto_mode = "disable",
importpath = "github.com/eapache/queue",
sum = "h1:YOEu7KNc61ntiQlcEeUIoDTJ2o8mQznoNvUhiigpIqc=",
version = "v1.1.0",
)
go_repository(
name = "com_github_elazarl_goproxy",
build_file_proto_mode = "disable",
importpath = "github.com/elazarl/goproxy",
sum = "h1:yUdfgN0XgIJw7foRItutHYUIhlcKzcSf5vDpdhQAKTc=",
version = "v0.0.0-20180725130230-947c36da3153",
)
go_repository(
name = "com_github_emicklei_go_restful",
build_file_proto_mode = "disable",
importpath = "github.com/emicklei/go-restful",
sum = "h1:spTtZBk5DYEvbxMVutUuTyh1Ao2r4iyvLdACqsl/Ljk=",
version = "v2.9.5+incompatible",
)
go_repository(
name = "com_github_emirpasic_gods",
build_file_proto_mode = "disable",
importpath = "github.com/emirpasic/gods",
sum = "h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=",
version = "v1.12.0",
)
go_repository(
name = "com_github_envoyproxy_go_control_plane",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/go-control-plane",
sum = "h1:rEvIZUSZ3fx39WIi3JkQqQBitGwpELBIYWeBVh6wn+E=",
version = "v0.9.4",
)
go_repository(
name = "com_github_envoyproxy_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/envoyproxy/protoc-gen-validate",
sum = "h1:EQciDnbrYxy13PgWoY8AqoxGiPrpgBZ1R8UNe3ddc+A=",
version = "v0.1.0",
)
go_repository(
name = "com_github_erikstmartin_go_testdb",
build_file_proto_mode = "disable",
importpath = "github.com/erikstmartin/go-testdb",
sum = "h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=",
version = "v0.0.0-20160219214506-8d10e4a1bae5",
)
go_repository(
name = "com_github_evanphx_json_patch",
build_file_proto_mode = "disable",
importpath = "github.com/evanphx/json-patch",
sum = "h1:glyUF9yIYtMHzn8xaKw5rMhdWcwsYV8dZHIq5567/xs=",
version = "v4.11.0+incompatible",
)
go_repository(
name = "com_github_exponent_io_jsonpath",
build_file_proto_mode = "disable",
importpath = "github.com/exponent-io/jsonpath",
sum = "h1:105gxyaGwCFad8crR9dcMQWvV9Hvulu6hwUh4tWPJnM=",
version = "v0.0.0-20151013193312-d6023ce2651d",
)
go_repository(
name = "com_github_fatih_camelcase",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/camelcase",
sum = "h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_fatih_color",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/color",
sum = "h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=",
version = "v1.9.0",
)
go_repository(
name = "com_github_fatih_structs",
build_file_proto_mode = "disable",
importpath = "github.com/fatih/structs",
sum = "h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo=",
version = "v1.1.0",
)
go_repository(
name = "com_github_felixge_fgprof",
build_file_proto_mode = "disable",
importpath = "github.com/felixge/fgprof",
sum = "h1:E6FUJ2Mlv043ipLOCFqo8+cHo9MhQ203E2cdEK/isEs=",
version = "v0.9.1",
)
go_repository(
name = "com_github_flynn_go_shlex",
build_file_proto_mode = "disable",
importpath = "github.com/flynn/go-shlex",
sum = "h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=",
version = "v0.0.0-20150515145356-3f9db97f8568",
)
go_repository(
name = "com_github_form3tech_oss_jwt_go",
build_file_proto_mode = "disable",
importpath = "github.com/form3tech-oss/jwt-go",
sum = "h1:TcekIExNqud5crz4xD2pavyTgWiPvpYe4Xau31I0PRk=",
version = "v3.2.2+incompatible",
)
go_repository(
name = "com_github_fortytw2_leaktest",
build_file_proto_mode = "disable",
importpath = "github.com/fortytw2/leaktest",
sum = "h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_frankban_quicktest",
build_file_proto_mode = "disable",
importpath = "github.com/frankban/quicktest",
sum = "h1:PvpJR0Uq8SdX+zagCMsarBMlhz6ysGTf1+pRmCsRXqY=",
version = "v1.8.1",
)
go_repository(
name = "com_github_fsnotify_fsnotify",
build_file_proto_mode = "disable",
importpath = "github.com/fsnotify/fsnotify",
sum = "h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=",
version = "v1.4.9",
)
go_repository(
name = "com_github_fsouza_fake_gcs_server",
build_file_proto_mode = "disable",
importpath = "github.com/fsouza/fake-gcs-server",
sum = "h1:3bRRh/rQnB2XbrMolHAj9oX/PFiWVQFVVfPR5y2pxb8=",
version = "v1.19.4",
)
go_repository(
name = "com_github_fvbommel_sortorder",
build_file_proto_mode = "disable",
importpath = "github.com/fvbommel/sortorder",
sum = "h1:dSnXLt4mJYH25uDDGa3biZNQsozaUWDSWeKJ0qqFfzE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_garyburd_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/garyburd/redigo",
sum = "h1:LofdAjjjqCSXMwLGgOgnE+rdPuvX9DxCqaHwKy7i/ko=",
version = "v0.0.0-20150301180006-535138d7bcd7",
)
go_repository(
name = "com_github_ghodss_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/ghodss/yaml",
sum = "h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_gliderlabs_ssh",
build_file_proto_mode = "disable",
importpath = "github.com/gliderlabs/ssh",
sum = "h1:6zsha5zo/TWhRhwqCD3+EarCAgZ2yN28ipRnGPnwkI0=",
version = "v0.2.2",
)
go_repository(
name = "com_github_globalsign_mgo",
build_file_proto_mode = "disable",
importpath = "github.com/globalsign/mgo",
sum = "h1:DujepqpGd1hyOd7aW59XpK7Qymp8iy83xq74fLr21is=",
version = "v0.0.0-20181015135952-eeefdecb41b8",
)
go_repository(
name = "com_github_go_bindata_go_bindata_v3",
build_file_proto_mode = "disable",
importpath = "github.com/go-bindata/go-bindata/v3",
sum = "h1:F0nVttLC3ws0ojc7p60veTurcOm//D4QBODNM7EGrCI=",
version = "v3.1.3",
)
go_repository(
name = "com_github_go_critic_go_critic",
build_file_proto_mode = "disable",
importpath = "github.com/go-critic/go-critic",
sum = "h1:sGEEdiuvLV0OC7/yC6MnK3K6LCPBplspK45B0XVdFAc=",
version = "v0.4.3",
)
go_repository(
name = "com_github_go_git_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/gcfg",
sum = "h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_git_go_billy_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-billy/v5",
sum = "h1:4pl5BV4o7ZG/lterP4S6WzJ6xr49Ba5ET9ygheTYahk=",
version = "v5.1.0",
)
go_repository(
name = "com_github_go_git_go_git_fixtures_v4",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git-fixtures/v4",
sum = "h1:PbKy9zOy4aAKrJ5pibIRpVO2BXnK1Tlcg+caKI7Ox5M=",
version = "v4.0.2-0.20200613231340-f56387b50c12",
)
go_repository(
name = "com_github_go_git_go_git_v5",
build_file_proto_mode = "disable",
importpath = "github.com/go-git/go-git/v5",
sum = "h1:8WKMtJR2j8RntEXR/uvTKagfEt4GYlwQ7mntE4+0GWc=",
version = "v5.3.0",
)
go_repository(
name = "com_github_go_gl_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw",
sum = "h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0=",
version = "v0.0.0-20190409004039-e6da0acd62b1",
)
go_repository(
name = "com_github_go_gl_glfw_v3_3_glfw",
build_file_proto_mode = "disable",
importpath = "github.com/go-gl/glfw/v3.3/glfw",
sum = "h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I=",
version = "v0.0.0-20200222043503-6f7a984d4dc4",
)
go_repository(
name = "com_github_go_ini_ini",
build_file_proto_mode = "disable",
importpath = "github.com/go-ini/ini",
sum = "h1:0wVcG9udk2C3TGgmdIGKK9ScOZHZB5nbG+gwji9fhhc=",
version = "v1.55.0",
)
go_repository(
name = "com_github_go_kit_kit",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/kit",
sum = "h1:wDJmvq38kDhkVxi50ni9ykkdUr1PKgqKOoi01fa0Mdk=",
version = "v0.9.0",
)
go_repository(
name = "com_github_go_kit_log",
build_file_proto_mode = "disable",
importpath = "github.com/go-kit/log",
sum = "h1:DGJh0Sm43HbOeYDNnVZFl8BvcYVvjD5bqYJvp0REbwQ=",
version = "v0.1.0",
)
go_repository(
name = "com_github_go_ldap_ldap",
build_file_proto_mode = "disable",
importpath = "github.com/go-ldap/ldap",
sum = "h1:kD5HQcAzlQ7yrhfn+h+MSABeAy/jAJhvIJ/QDllP44g=",
version = "v3.0.2+incompatible",
)
go_repository(
name = "com_github_go_lintpack_lintpack",
build_file_proto_mode = "disable",
importpath = "github.com/go-lintpack/lintpack",
sum = "h1:DI5mA3+eKdWeJ40nU4d6Wc26qmdG8RCi/btYq0TuRN0=",
version = "v0.5.2",
)
go_repository(
name = "com_github_go_logfmt_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-logfmt/logfmt",
sum = "h1:TrB8swr/68K7m9CcGut2g3UOihhbcbiMAYiuTXdEih4=",
version = "v0.5.0",
)
go_repository(
name = "com_github_go_logr_logr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/logr",
sum = "h1:K7/B1jt6fIBQVd4Owv2MqGQClcgf0R266+7C/QjRcLc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_logr_zapr",
build_file_proto_mode = "disable",
importpath = "github.com/go-logr/zapr",
sum = "h1:uc1uML3hRYL9/ZZPdgHS/n8Nzo+eaYL/Efxkkamf7OM=",
version = "v0.4.0",
)
go_repository(
name = "com_github_go_ole_go_ole",
build_file_proto_mode = "disable",
importpath = "github.com/go-ole/go-ole",
sum = "h1:2lOsA72HgjxAuMlKpFiCbHTvu44PIVkZ5hqm3RSdI/E=",
version = "v1.2.1",
)
go_repository(
name = "com_github_go_openapi_analysis",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/analysis",
sum = "h1:8b2ZgKfKIUTVQpTb77MoRDIMEIwvDVw40o3aOXdfYzI=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_openapi_errors",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/errors",
sum = "h1:a2kIyV3w+OS3S97zxUndRVD46+FhGOUBDFY7nmu4CsY=",
version = "v0.19.2",
)
go_repository(
name = "com_github_go_openapi_jsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonpointer",
sum = "h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_jsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/jsonreference",
sum = "h1:5cxNfTy0UVC3X8JL5ymxzyoUZmo8iZb+jeTWn7tUa8o=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_loads",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/loads",
sum = "h1:5I4CCSqoWzT+82bBkNIvmLc0UOsoKKQ4Fz+3VxOB7SY=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_runtime",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/runtime",
sum = "h1:csnOgcgAiuGoM/Po7PEpKDoNulCcF3FGbSnbHfxgjMI=",
version = "v0.19.4",
)
go_repository(
name = "com_github_go_openapi_spec",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/spec",
sum = "h1:rMMMj8cV38KVXK7SFc+I2MWClbEfbK705+j+dyqun5g=",
version = "v0.19.6",
)
go_repository(
name = "com_github_go_openapi_strfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/strfmt",
sum = "h1:eRfyY5SkaNJCAwmmMcADjY31ow9+N7MCLW7oRkbsINA=",
version = "v0.19.3",
)
go_repository(
name = "com_github_go_openapi_swag",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/swag",
sum = "h1:VRuXN2EnMSsZdauzdss6JBC29YotDqG59BZ+tdlIL1s=",
version = "v0.19.7",
)
go_repository(
name = "com_github_go_openapi_validate",
build_file_proto_mode = "disable",
importpath = "github.com/go-openapi/validate",
sum = "h1:QhCBKRYqZR+SKo4gl1lPhPahope8/RLt6EVgY8X80w0=",
version = "v0.19.5",
)
go_repository(
name = "com_github_go_sql_driver_mysql",
build_file_proto_mode = "disable",
importpath = "github.com/go-sql-driver/mysql",
sum = "h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=",
version = "v1.5.0",
)
go_repository(
name = "com_github_go_stack_stack",
build_file_proto_mode = "disable",
importpath = "github.com/go-stack/stack",
sum = "h1:5SgMzNM5HxrEjV0ww2lTmX6E2Izsfxas4+YHWRs3Lsk=",
version = "v1.8.0",
)
go_repository(
name = "com_github_go_task_slim_sprig",
build_file_proto_mode = "disable",
importpath = "github.com/go-task/slim-sprig",
sum = "h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=",
version = "v0.0.0-20210107165309-348f09dbbbc0",
)
go_repository(
name = "com_github_go_test_deep",
build_file_proto_mode = "disable",
importpath = "github.com/go-test/deep",
sum = "h1:/VSMRlnY/JSyqxQUzQLKVMAskpY/NZKFA5j2P+0pP2M=",
version = "v1.0.7",
)
go_repository(
name = "com_github_go_toolsmith_astcast",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcast",
sum = "h1:JojxlmI6STnFVG9yOImLeGREv8W2ocNUM+iOhR6jE7g=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astcopy",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astcopy",
sum = "h1:OMgl1b1MEpjFQ1m5ztEO06rz5CUd3oBv9RF7+DyvdG8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astequal",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astequal",
sum = "h1:4zxD8j3JRFNyLN46lodQuqz3xdKSrur7U/sr0SDS/gQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astfmt",
sum = "h1:A0vDDXt+vsvLEdbMFJAUBI/uTbRw1ffOPnxsILnFL6k=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_astinfo",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astinfo",
sum = "h1:wP6mXeB2V/d1P1K7bZ5vDUO3YqEzcvOREOxZPEu3gVI=",
version = "v0.0.0-20180906194353-9809ff7efb21",
)
go_repository(
name = "com_github_go_toolsmith_astp",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/astp",
sum = "h1:alXE75TXgcmupDsMK1fRAy0YUzLzqPVvBKoyWV+KPXg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_pkgload",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/pkgload",
sum = "h1:4DFWWMXVfbcN5So1sBNW9+yeiMqLFGl1wFLTL5R0Tgg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_strparse",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/strparse",
sum = "h1:Vcw78DnpCAKlM20kSbAyO4mPfJn/lyYA4BJUDxe2Jb4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_go_toolsmith_typep",
build_file_proto_mode = "disable",
importpath = "github.com/go-toolsmith/typep",
sum = "h1:8xdsa1+FSIH/RhEkgnD1j2CJOy5mNllW1Q9tRiYwvlk=",
version = "v1.0.2",
)
go_repository(
name = "com_github_go_xmlfmt_xmlfmt",
build_file_proto_mode = "disable",
importpath = "github.com/go-xmlfmt/xmlfmt",
sum = "h1:khEcpUM4yFcxg4/FHQWkvVRmgijNXRfzkIDHh23ggEo=",
version = "v0.0.0-20191208150333-d5b6f63a941b",
)
go_repository(
name = "com_github_go_yaml_yaml",
build_file_proto_mode = "disable",
importpath = "github.com/go-yaml/yaml",
sum = "h1:RYi2hDdss1u4YE7GwixGzWwVo47T8UQwnTLB6vQiq+o=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_gobuffalo_envy",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/envy",
sum = "h1:OQl5ys5MBea7OGCdvPbBJWRgnhC/fGona6QKfvFeau8=",
version = "v1.7.1",
)
go_repository(
name = "com_github_gobuffalo_flect",
build_file_proto_mode = "disable",
importpath = "github.com/gobuffalo/flect",
sum = "h1:EWCvMGGxOjsgwlWaP+f4+Hh6yrrte7JeFL2S6b+0hdM=",
version = "v0.2.0",
)
go_repository(
name = "com_github_gobwas_glob",
build_file_proto_mode = "disable",
importpath = "github.com/gobwas/glob",
sum = "h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=",
version = "v0.2.3",
)
go_repository(
name = "com_github_godbus_dbus",
build_file_proto_mode = "disable",
importpath = "github.com/godbus/dbus",
sum = "h1:BWhy2j3IXJhjCbC68FptL43tDKIq8FladmaTs3Xs7Z8=",
version = "v0.0.0-20190422162347-ade71ed3457e",
)
go_repository(
name = "com_github_gofrs_flock",
build_file_proto_mode = "disable",
importpath = "github.com/gofrs/flock",
sum = "h1:DP+LD/t0njgoPBvT5MJLeliUIVQR03hiKR6vezdwHlc=",
version = "v0.7.1",
)
go_repository(
name = "com_github_gogo_googleapis",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/googleapis",
sum = "h1:kFkMAZBNAn4j7K0GiZr8cRYzejq68VbheufiV3YuyFI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_gogo_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/gogo/protobuf",
sum = "h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=",
version = "v1.3.2",
)
go_repository(
name = "com_github_golang_gddo",
build_file_proto_mode = "disable",
importpath = "github.com/golang/gddo",
sum = "h1:KRMr9A3qfbVM7iV/WcLY/rL5LICqwMHLhwRXKu99fXw=",
version = "v0.0.0-20190419222130-af0f2af80721",
)
go_repository(
name = "com_github_golang_glog",
build_file_proto_mode = "disable",
importpath = "github.com/golang/glog",
sum = "h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=",
version = "v0.0.0-20160126235308-23def4e6c14b",
)
go_repository(
name = "com_github_golang_groupcache",
build_file_proto_mode = "disable",
importpath = "github.com/golang/groupcache",
sum = "h1:1r7pUrabqp18hOBcwBwiTsbnFeTZHV9eER/QT5JVZxY=",
version = "v0.0.0-20200121045136-8c9f03a8e57e",
)
go_repository(
name = "com_github_golang_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golang/lint",
sum = "h1:2hRPrmiwPrp3fQX967rNJIhQPtiGXdlQWAxKbKw3VHA=",
version = "v0.0.0-20180702182130-06c8688daad7",
)
go_repository(
name = "com_github_golang_mock",
build_file_proto_mode = "disable",
importpath = "github.com/golang/mock",
sum = "h1:jlYHihg//f7RRwuPfptm04yp4s7O6Kw8EZiVYIGcH0g=",
version = "v1.5.0",
)
go_repository(
name = "com_github_golang_protobuf",
build_file_proto_mode = "disable",
importpath = "github.com/golang/protobuf",
sum = "h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=",
version = "v1.5.2",
)
go_repository(
name = "com_github_golang_snappy",
build_file_proto_mode = "disable",
importpath = "github.com/golang/snappy",
sum = "h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=",
version = "v0.0.1",
)
go_repository(
name = "com_github_golang_sql_civil",
build_file_proto_mode = "disable",
importpath = "github.com/golang-sql/civil",
sum = "h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=",
version = "v0.0.0-20190719163853-cb61b32ac6fe",
)
go_repository(
name = "com_github_golangci_check",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/check",
sum = "h1:23T5iq8rbUYlhpt5DB4XJkc6BU31uODLD1o1gKvZmD0=",
version = "v0.0.0-20180506172741-cfe4005ccda2",
)
go_repository(
name = "com_github_golangci_dupl",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/dupl",
sum = "h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM=",
version = "v0.0.0-20180902072040-3e9179ac440a",
)
go_repository(
name = "com_github_golangci_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/errcheck",
sum = "h1:YYWNAGTKWhKpcLLt7aSj/odlKrSrelQwlovBpDuf19w=",
version = "v0.0.0-20181223084120-ef45e06d44b6",
)
go_repository(
name = "com_github_golangci_go_misc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/go-misc",
sum = "h1:9kfjN3AdxcbsZBf8NjltjWihK2QfBBBZuv91cMFfDHw=",
version = "v0.0.0-20180628070357-927a3d87b613",
)
go_repository(
name = "com_github_golangci_goconst",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/goconst",
sum = "h1:pe9JHs3cHHDQgOFXJJdYkK6fLz2PWyYtP4hthoCMvs8=",
version = "v0.0.0-20180610141641-041c5f2b40f3",
)
go_repository(
name = "com_github_golangci_gocyclo",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gocyclo",
sum = "h1:pXTK/gkVNs7Zyy7WKgLXmpQ5bHTrq5GDsp8R9Qs67g0=",
version = "v0.0.0-20180528144436-0a533e8fa43d",
)
go_repository(
name = "com_github_golangci_gofmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/gofmt",
sum = "h1:iR3fYXUjHCR97qWS8ch1y9zPNsgXThGwjKPrYfqMPks=",
version = "v0.0.0-20190930125516-244bba706f1a",
)
go_repository(
name = "com_github_golangci_golangci_lint",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/golangci-lint",
sum = "h1:VYLx63qb+XJsHdZ27PMS2w5JZacN0XG8ffUwe7yQomo=",
version = "v1.27.0",
)
go_repository(
name = "com_github_golangci_ineffassign",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/ineffassign",
sum = "h1:gLLhTLMk2/SutryVJ6D4VZCU3CUqr8YloG7FPIBWFpI=",
version = "v0.0.0-20190609212857-42439a7714cc",
)
go_repository(
name = "com_github_golangci_lint_1",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/lint-1",
sum = "h1:MfyDlzVjl1hoaPzPD4Gpb/QgoRfSBR0jdhwGyAWwMSA=",
version = "v0.0.0-20191013205115-297bf364a8e0",
)
go_repository(
name = "com_github_golangci_maligned",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/maligned",
sum = "h1:kNY3/svz5T29MYHubXix4aDDuE3RWHkPvopM/EDv/MA=",
version = "v0.0.0-20180506175553-b1d89398deca",
)
go_repository(
name = "com_github_golangci_misspell",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/misspell",
sum = "h1:pLzmVdl3VxTOncgzHcvLOKirdvcx/TydsClUQXTehjo=",
version = "v0.3.5",
)
go_repository(
name = "com_github_golangci_prealloc",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/prealloc",
sum = "h1:leSNB7iYzLYSSx3J/s5sVf4Drkc68W2wm4Ixh/mr0us=",
version = "v0.0.0-20180630174525-215b22d4de21",
)
go_repository(
name = "com_github_golangci_revgrep",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/revgrep",
sum = "h1:XQKc8IYQOeRwVs36tDrEmTgDgP88d5iEURwpmtiAlOM=",
version = "v0.0.0-20180812185044-276a5c0a1039",
)
go_repository(
name = "com_github_golangci_unconvert",
build_file_proto_mode = "disable",
importpath = "github.com/golangci/unconvert",
sum = "h1:zwtduBRr5SSWhqsYNgcuWO2kFlpdOZbP0+yRjmvPGys=",
version = "v0.0.0-20180507085042-28b1c447d1f4",
)
go_repository(
name = "com_github_golangplus_bytes",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/bytes",
sum = "h1:7xqw01UYS+KCI25bMrPxwNYkSns2Db1ziQPpVq99FpE=",
version = "v0.0.0-20160111154220-45c989fe5450",
)
go_repository(
name = "com_github_golangplus_fmt",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/fmt",
sum = "h1:f5gsjBiF9tRRVomCvrkGMMWI8W1f2OBFar2c5oakAP0=",
version = "v0.0.0-20150411045040-2a5d6d7d2995",
)
go_repository(
name = "com_github_golangplus_testing",
build_file_proto_mode = "disable",
importpath = "github.com/golangplus/testing",
sum = "h1:KhcknUwkWHKZPbFy2P7jH5LKJ3La+0ZeknkkmrSgqb0=",
version = "v0.0.0-20180327235837-af21d9c3145e",
)
go_repository(
name = "com_github_gomodule_redigo",
build_file_proto_mode = "disable",
importpath = "github.com/gomodule/redigo",
sum = "h1:nRAxCa+SVsyjSBrtZmG/cqb6VbTmuRzpg/PoTFlpumc=",
version = "v1.8.5",
)
go_repository(
name = "com_github_google_btree",
build_file_proto_mode = "disable",
importpath = "github.com/google/btree",
sum = "h1:0udJVsspx3VBr5FwtLhQQtuAsVc79tTq0ocGIPAU6qo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_google_go_cmp",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-cmp",
sum = "h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=",
version = "v0.5.5",
)
go_repository(
name = "com_github_google_go_containerregistry",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-containerregistry",
sum = "h1:AG8FSAfXglim2l5qSrqp5VK2Xl03PiBf25NiTGGamws=",
version = "v0.1.1",
)
go_repository(
name = "com_github_google_go_github",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github",
sum = "h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=",
version = "v17.0.0+incompatible",
)
go_repository(
name = "com_github_google_go_github_v27",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v27",
sum = "h1:oiOZuBmGHvrGM1X9uNUAUlLgp5r1UUO/M/KnbHnLRlQ=",
version = "v27.0.6",
)
go_repository(
name = "com_github_google_go_github_v28",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v28",
sum = "h1:kORf5ekX5qwXO2mGzXXOjMe/g6ap8ahVe0sBEulhSxo=",
version = "v28.1.1",
)
go_repository(
name = "com_github_google_go_github_v29",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v29",
sum = "h1:IktKCTwU//aFHnpA+2SLIi7Oo9uhAzgsdZNbcAqhgdc=",
version = "v29.0.3",
)
go_repository(
name = "com_github_google_go_github_v32",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-github/v32",
sum = "h1:q74KVb22spUq0U5HqZ9VCYqQz8YRuOtL/39ZnfwO+NM=",
version = "v32.0.0",
)
go_repository(
name = "com_github_google_go_licenses",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-licenses",
sum = "h1:eVR9gT5gBPTHXeyGAcA8OF/SKNUFFg+a0BJqfx4z5eE=",
version = "v0.0.0-20200227160636-0fa8c766a591",
)
go_repository(
name = "com_github_google_go_querystring",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-querystring",
sum = "h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8=",
version = "v1.1.0",
)
go_repository(
name = "com_github_google_go_replayers_grpcreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/grpcreplay",
sum = "h1:eNb1y9rZFmY4ax45uEEECSa8fsxGRU+8Bil52ASAwic=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_go_replayers_httpreplay",
build_file_proto_mode = "disable",
importpath = "github.com/google/go-replayers/httpreplay",
sum = "h1:AX7FUb4BjrrzNvblr/OlgwrmFiep6soj5K2QSDW7BGk=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_gofuzz",
build_file_proto_mode = "disable_global",
importpath = "github.com/google/gofuzz",
sum = "h1:VcIYpAGBae3Z6BVncE0OnTE/ZjlDXqtYhOZky88neLM=",
version = "v1.2.1-0.20210504230335-f78f29fc09ea",
)
go_repository(
name = "com_github_google_licenseclassifier",
build_file_proto_mode = "disable",
importpath = "github.com/google/licenseclassifier",
sum = "h1:OggOMmdI0JLwg1FkOKH9S7fVHF0oEm8PX6S8kAdpOps=",
version = "v0.0.0-20200402202327-879cb1424de0",
)
go_repository(
name = "com_github_google_mako",
build_file_proto_mode = "disable",
importpath = "github.com/google/mako",
sum = "h1:/o5e44nTD/QEEiWPGSFT3bSqcq3Qg7q27N9bv4gKh5M=",
version = "v0.0.0-20190821191249-122f8dcef9e3",
)
go_repository(
name = "com_github_google_martian",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian",
sum = "h1:xmapqc1AyLoB+ddYT6r04bD9lIjlOqGaREovi0SzFaE=",
version = "v2.1.1-0.20190517191504-25dcb96d9e51+incompatible",
)
go_repository(
name = "com_github_google_martian_v3",
build_file_proto_mode = "disable",
importpath = "github.com/google/martian/v3",
sum = "h1:pMen7vLs8nvgEYhywH3KDWJIJTeEr2ULsVWHWYHQyBs=",
version = "v3.0.0",
)
go_repository(
name = "com_github_google_pprof",
build_file_proto_mode = "disable",
importpath = "github.com/google/pprof",
sum = "h1:k+KkMRk8mGOu1xG38StS7dQ+Z6oW1i9n3dgrAVU9Q/E=",
version = "v0.0.0-20200905233945-acf8798be1f7",
)
go_repository(
name = "com_github_google_renameio",
build_file_proto_mode = "disable",
importpath = "github.com/google/renameio",
sum = "h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=",
version = "v0.1.0",
)
go_repository(
name = "com_github_google_rpmpack",
build_file_proto_mode = "disable",
importpath = "github.com/google/rpmpack",
sum = "h1:BW6OvS3kpT5UEPbCZ+KyX/OB4Ks9/MNMhWjqPPkZxsE=",
version = "v0.0.0-20191226140753-aa36bfddb3a0",
)
go_repository(
name = "com_github_google_subcommands",
build_file_proto_mode = "disable",
importpath = "github.com/google/subcommands",
sum = "h1:/eqq+otEXm5vhfBrbREPCSVQbvofip6kIz+mX5TUH7k=",
version = "v1.0.1",
)
go_repository(
name = "com_github_google_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/google/uuid",
sum = "h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_google_wire",
build_file_proto_mode = "disable",
importpath = "github.com/google/wire",
sum = "h1:kXcsA/rIGzJImVqPdhfnr6q0xsS9gU0515q1EPpJ9fE=",
version = "v0.4.0",
)
go_repository(
name = "com_github_googleapis_gax_go",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go",
sum = "h1:silFMLAnr330+NRuag/VjIGF7TLp/LBrV2CJKFLWEww=",
version = "v2.0.2+incompatible",
)
go_repository(
name = "com_github_googleapis_gax_go_v2",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gax-go/v2",
sum = "h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+Tv3SM=",
version = "v2.0.5",
)
go_repository(
name = "com_github_googleapis_gnostic",
build_file_generation = "on",
build_file_proto_mode = "disable",
importpath = "github.com/googleapis/gnostic",
replace = "github.com/googleapis/gnostic",
sum = "h1:DLJCy1n/vrD4HPjOvYcT8aYQXpPIzoRZONaYwyycI+I=",
version = "v0.4.1",
)
go_repository(
name = "com_github_googlecloudplatform_cloud_builders_gcs_fetcher",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloud-builders/gcs-fetcher",
sum = "h1:Pjo3SOZigEnIGevhFqcbFndnqyCH8WimcREd3hRM9vU=",
version = "v0.0.0-20191203181535-308b93ad1f39",
)
go_repository(
name = "com_github_googlecloudplatform_cloudsql_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/cloudsql-proxy",
sum = "h1:sTOp2Ajiew5XIH92YSdwhYc+bgpUX5j5TKK/Ac8Saw8=",
version = "v0.0.0-20191009163259-e802c2cb94ae",
)
go_repository(
name = "com_github_googlecloudplatform_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/k8s-cloud-provider",
sum = "h1:N7lSsF+R7wSulUADi36SInSQA3RvfO/XclHQfedr0qk=",
version = "v0.0.0-20190822182118-27a4ced34534",
)
go_repository(
name = "com_github_googlecloudplatform_testgrid",
build_file_proto_mode = "disable",
importpath = "github.com/GoogleCloudPlatform/testgrid",
sum = "h1:qs3/BQpz3j3qsgnfjV8aVBfPopkGxp/TnWjjiboUVf8=",
version = "v0.0.68",
)
go_repository(
name = "com_github_gookit_color",
build_file_proto_mode = "disable",
importpath = "github.com/gookit/color",
sum = "h1:xOYBan3Fwlrqj1M1UN2TlHOCRiek3bGzWf/vPnJ1roE=",
version = "v1.2.4",
)
go_repository(
name = "com_github_gophercloud_gophercloud",
build_file_proto_mode = "disable",
importpath = "github.com/gophercloud/gophercloud",
sum = "h1:P/nh25+rzXouhytV2pUHBb65fnds26Ghl8/391+sT5o=",
version = "v0.1.0",
)
go_repository(
name = "com_github_gopherjs_gopherjs",
build_file_proto_mode = "disable",
importpath = "github.com/gopherjs/gopherjs",
sum = "h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=",
version = "v0.0.0-20181017120253-0766667cb4d1",
)
go_repository(
name = "com_github_goreleaser_goreleaser",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/goreleaser",
sum = "h1:Z+7XPrfGK11s/Sp+a06sx2FzGuCjTBdxN2ubpGvQbjY=",
version = "v0.136.0",
)
go_repository(
name = "com_github_goreleaser_nfpm",
build_file_proto_mode = "disable",
importpath = "github.com/goreleaser/nfpm",
sum = "h1:BPwIomC+e+yuDX9poJowzV7JFVcYA0+LwGSkbAPs2Hw=",
version = "v1.3.0",
)
go_repository(
name = "com_github_gorilla_context",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/context",
sum = "h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_csrf",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/csrf",
sum = "h1:QqQ/OWwuFp4jMKgBFAzJVW3FMULdyUW7JoM4pEWuqKg=",
version = "v1.6.2",
)
go_repository(
name = "com_github_gorilla_handlers",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/handlers",
sum = "h1:0QniY0USkHQ1RGCLfKxeNHK9bkDHGRYGNDFBCS+YARg=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gorilla_mux",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/mux",
sum = "h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=",
version = "v1.8.0",
)
go_repository(
name = "com_github_gorilla_securecookie",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/securecookie",
sum = "h1:miw7JPhV+b/lAHSXz4qd/nN9jRiAFV5FwjeKyCS8BvQ=",
version = "v1.1.1",
)
go_repository(
name = "com_github_gorilla_sessions",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/sessions",
sum = "h1:S7P+1Hm5V/AT9cjEcUD5uDaQSX0OE577aCXgoaKpYbQ=",
version = "v1.2.0",
)
go_repository(
name = "com_github_gorilla_websocket",
build_file_proto_mode = "disable",
importpath = "github.com/gorilla/websocket",
sum = "h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=",
version = "v1.4.2",
)
go_repository(
name = "com_github_gostaticanalysis_analysisutil",
build_file_proto_mode = "disable",
importpath = "github.com/gostaticanalysis/analysisutil",
sum = "h1:iwp+5/UAyzQSFgQ4uR2sni99sJ8Eo9DEacKWM5pekIg=",
version = "v0.0.3",
)
go_repository(
name = "com_github_gosuri_uitable",
build_file_proto_mode = "disable",
importpath = "github.com/gosuri/uitable",
sum = "h1:IG2xLKRvErL3uhY6e1BylFzG+aJiwQviDDTfOKeKTpY=",
version = "v0.0.4",
)
go_repository(
name = "com_github_gotestyourself_gotestyourself",
build_file_proto_mode = "disable",
importpath = "github.com/gotestyourself/gotestyourself",
sum = "h1:AQwinXlbQR2HvPjQZOmDhRqsv5mZf+Jb1RnSLxcqZcI=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "com_github_gregjones_httpcache",
build_file_proto_mode = "disable",
importpath = "github.com/gregjones/httpcache",
sum = "h1:f8eY6cV/x1x+HLjOp4r72s/31/V2aTUtg5oKRRPf8/Q=",
version = "v0.0.0-20190212212710-3befbb6ad0cc",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_middleware",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-middleware",
sum = "h1:z53tR0945TRRQO/fLEVPI6SMv7ZflF0TEaTAoU7tOzg=",
version = "v1.0.1-0.20190118093823-f849b5445de4",
)
go_repository(
name = "com_github_grpc_ecosystem_go_grpc_prometheus",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/go-grpc-prometheus",
sum = "h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=",
version = "v1.2.0",
)
go_repository(
name = "com_github_grpc_ecosystem_grpc_gateway",
build_file_proto_mode = "disable",
importpath = "github.com/grpc-ecosystem/grpc-gateway",
sum = "h1:D0EVSTwQoQOyfY35QNSuPJA4jpZRtkoGYWQMB7XNg5o=",
version = "v1.12.2",
)
go_repository(
name = "com_github_h2non_gock",
build_file_proto_mode = "disable",
importpath = "github.com/h2non/gock",
sum = "h1:17gCehSo8ZOgEsFKpQgqHiR7VLyjxdAG3lkhVvO9QZU=",
version = "v1.0.9",
)
go_repository(
name = "com_github_hashicorp_consul_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/api",
sum = "h1:BNQPM9ytxj6jbjjdRPioQ94T6YXriSopn0i8COv6SRA=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_consul_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/consul/sdk",
sum = "h1:LnuDWGNsoajlhGyHJvuWW6FVqRl8JOTPqS6CPTsYjhY=",
version = "v0.1.1",
)
go_repository(
name = "com_github_hashicorp_errwrap",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/errwrap",
sum = "h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_cleanhttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-cleanhttp",
sum = "h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM=",
version = "v0.5.1",
)
go_repository(
name = "com_github_hashicorp_go_hclog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-hclog",
sum = "h1:CG6TE5H9/JXsFWJCfoIVpKFIkFe6ysEuHirp4DxCsHI=",
version = "v0.9.2",
)
go_repository(
name = "com_github_hashicorp_go_immutable_radix",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-immutable-radix",
sum = "h1:AKDB1HM5PWEA7i4nhcpwOrO2byshxBjXVn/J/3+z5/0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_msgpack",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-msgpack",
sum = "h1:zKjpN5BK/P5lMYrLmBHdBULWbJ0XpYR+7NGzqkZzoD4=",
version = "v0.5.3",
)
go_repository(
name = "com_github_hashicorp_go_multierror",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-multierror",
sum = "h1:B9UzwGQJehnUY1yNrnwREHc3fGbC2xefo8g4TbElacI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_hashicorp_go_net",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go.net",
sum = "h1:sNCoNyDEvN1xa+X0baata4RdcpKwcMS6DH+xwfqPgjw=",
version = "v0.0.1",
)
go_repository(
name = "com_github_hashicorp_go_plugin",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-plugin",
sum = "h1:4OtAfUGbnKC6yS48p0CtMX2oFYtzFZVv6rok3cRWgnE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_retryablehttp",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-retryablehttp",
sum = "h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM=",
version = "v0.6.6",
)
go_repository(
name = "com_github_hashicorp_go_rootcerts",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-rootcerts",
sum = "h1:DMo4fmknnz0E0evoNYnV48RjWndOsmd6OW+09R3cEP8=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_sockaddr",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-sockaddr",
sum = "h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_hashicorp_go_syslog",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-syslog",
sum = "h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-uuid",
sum = "h1:fv1ep09latC32wFoVwnqcnKJGnMSdBanPczbHAYm1BE=",
version = "v1.0.1",
)
go_repository(
name = "com_github_hashicorp_go_version",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/go-version",
sum = "h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E=",
version = "v1.2.0",
)
go_repository(
name = "com_github_hashicorp_golang_lru",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/golang-lru",
sum = "h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=",
version = "v0.5.4",
)
go_repository(
name = "com_github_hashicorp_hcl",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/hcl",
sum = "h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_logutils",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/logutils",
sum = "h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_mdns",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/mdns",
sum = "h1:WhIgCr5a7AaVH6jPUwjtRuuE7/RDufnUvzIr48smyxs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_hashicorp_memberlist",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/memberlist",
sum = "h1:EmmoJme1matNzb+hMpDuR/0sbJSUisxyqBGG676r31M=",
version = "v0.1.3",
)
go_repository(
name = "com_github_hashicorp_serf",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/serf",
sum = "h1:YZ7UKsJv+hKjqGVUUbtE3HNj79Eln2oQ75tniF6iPt0=",
version = "v0.8.2",
)
go_repository(
name = "com_github_hashicorp_vault_api",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/api",
sum = "h1:j08Or/wryXT4AcHj1oCbMd7IijXcKzYUGw59LGu9onU=",
version = "v1.0.4",
)
go_repository(
name = "com_github_hashicorp_vault_sdk",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/vault/sdk",
sum = "h1:mOEPeOhT7jl0J4AMl1E705+BcmeRs1VmKNb9F0sMLy8=",
version = "v0.1.13",
)
go_repository(
name = "com_github_hashicorp_yamux",
build_file_proto_mode = "disable",
importpath = "github.com/hashicorp/yamux",
sum = "h1:kJCB4vdITiW1eC1vq2e6IsrXKrZit1bv/TDYFGMp4BQ=",
version = "v0.0.0-20181012175058-2f1d1f20f75d",
)
go_repository(
name = "com_github_howeyc_gopass",
build_file_proto_mode = "disable",
importpath = "github.com/howeyc/gopass",
sum = "h1:kQWxfPIHVLbgLzphqk3QUflDy9QdksZR4ygR807bpy0=",
version = "v0.0.0-20170109162249-bf9dde6d0d2c",
)
go_repository(
name = "com_github_hpcloud_tail",
build_file_proto_mode = "disable",
importpath = "github.com/hpcloud/tail",
sum = "h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_huandu_xstrings",
build_file_proto_mode = "disable",
importpath = "github.com/huandu/xstrings",
sum = "h1:yPeWdRnmynF7p+lLYz0H2tthW9lqhMJrQV/U7yy4wX0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_ianlancetaylor_demangle",
build_file_proto_mode = "disable",
importpath = "github.com/ianlancetaylor/demangle",
sum = "h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=",
version = "v0.0.0-20181102032728-5e5cf60278f6",
)
go_repository(
name = "com_github_imdario_mergo",
build_file_proto_mode = "disable",
importpath = "github.com/imdario/mergo",
sum = "h1:b6R2BslTbIEToALKP7LxUvijTsNI9TAe80pLWN2g/HU=",
version = "v0.3.12",
)
go_repository(
name = "com_github_inconshreveable_mousetrap",
build_file_proto_mode = "disable",
importpath = "github.com/inconshreveable/mousetrap",
sum = "h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_influxdata_influxdb",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/influxdb",
sum = "h1:AciJ2ei/llFRundm7CtqwF6B2aOds1A7QG3sMW8QiaQ=",
version = "v0.0.0-20161215172503-049f9b42e9a5",
)
go_repository(
name = "com_github_influxdata_tdigest",
build_file_proto_mode = "disable",
importpath = "github.com/influxdata/tdigest",
sum = "h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE=",
version = "v0.0.0-20181121200506-bf2b5ad3c0a9",
)
go_repository(
name = "com_github_jarcoal_httpmock",
build_file_proto_mode = "disable",
importpath = "github.com/jarcoal/httpmock",
sum = "h1:cHtVEcTxRSX4J0je7mWPfc9BpDpqzXSJ5HbymZmyHck=",
version = "v1.0.5",
)
go_repository(
name = "com_github_jbenet_go_context",
build_file_proto_mode = "disable",
importpath = "github.com/jbenet/go-context",
sum = "h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=",
version = "v0.0.0-20150711004518-d14ea06fba99",
)
go_repository(
name = "com_github_jcmturner_gofork",
build_file_proto_mode = "disable",
importpath = "github.com/jcmturner/gofork",
sum = "h1:J7uCkflzTEhUZ64xqKnkDxq3kzc96ajM1Gli5ktUem8=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jenkins_x_go_scm",
build_file_proto_mode = "disable",
importpath = "github.com/jenkins-x/go-scm",
sum = "h1:D7d1sDWUU+xocCNLQVoYKpMjVKnQvsPva+hPzruchbM=",
version = "v1.5.117",
)
go_repository(
name = "com_github_jessevdk_go_flags",
build_file_proto_mode = "disable",
importpath = "github.com/jessevdk/go-flags",
sum = "h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=",
version = "v1.5.0",
)
go_repository(
name = "com_github_jetstack_cert_manager",
build_file_proto_mode = "disable",
importpath = "github.com/jetstack/cert-manager",
sum = "h1:gEhBV9I83m+kpQShDhNO4+J8O2qfNDjvAEL27pThGmg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_jingyugao_rowserrcheck",
build_file_proto_mode = "disable",
importpath = "github.com/jingyugao/rowserrcheck",
sum = "h1:GmsqmapfzSJkm28dhRoHz2tLRbJmqhU86IPgBtN3mmk=",
version = "v0.0.0-20191204022205-72ab7603b68a",
)
go_repository(
name = "com_github_jinzhu_gorm",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/gorm",
sum = "h1:Drgk1clyWT9t9ERbzHza6Mj/8FY/CqMyVzOiHviMo6Q=",
version = "v1.9.12",
)
go_repository(
name = "com_github_jinzhu_inflection",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/inflection",
sum = "h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=",
version = "v1.0.0",
)
go_repository(
name = "com_github_jinzhu_now",
build_file_proto_mode = "disable",
importpath = "github.com/jinzhu/now",
sum = "h1:g39TucaRWyV3dwDO++eEc6qf8TVIQ/Da48WmqjZ3i7E=",
version = "v1.1.1",
)
go_repository(
name = "com_github_jirfag_go_printf_func_name",
build_file_proto_mode = "disable",
importpath = "github.com/jirfag/go-printf-func-name",
sum = "h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48=",
version = "v0.0.0-20200119135958-7558a9eaa5af",
)
go_repository(
name = "com_github_jmespath_go_jmespath",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath",
sum = "h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=",
version = "v0.4.0",
)
go_repository(
name = "com_github_jmespath_go_jmespath_internal_testify",
build_file_proto_mode = "disable",
importpath = "github.com/jmespath/go-jmespath/internal/testify",
sum = "h1:shLQSRRSCCPj3f2gpwzGwWFoC7ycTf1rcQZHOlsJ6N8=",
version = "v1.5.1",
)
go_repository(
name = "com_github_jmoiron_sqlx",
build_file_proto_mode = "disable",
importpath = "github.com/jmoiron/sqlx",
sum = "h1:lrdPtrORjGv1HbbEvKWDUAy97mPpFm4B8hp77tcCUJY=",
version = "v1.2.1-0.20190826204134-d7d95172beb5",
)
go_repository(
name = "com_github_joefitzgerald_rainbow_reporter",
build_file_proto_mode = "disable",
importpath = "github.com/joefitzgerald/rainbow-reporter",
sum = "h1:AuMG652zjdzI0YCCnXAqATtRBpGXMcAnrajcaTrSeuo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joho_godotenv",
build_file_proto_mode = "disable",
importpath = "github.com/joho/godotenv",
sum = "h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_jonboulle_clockwork",
build_file_proto_mode = "disable",
importpath = "github.com/jonboulle/clockwork",
sum = "h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=",
version = "v0.1.0",
)
go_repository(
name = "com_github_joshdk_go_junit",
build_file_proto_mode = "disable",
importpath = "github.com/joshdk/go-junit",
sum = "h1:Bp5LAZasx/ev9wUmIIC74+MsXgwD99VjV1JmDVbpJm8=",
version = "v0.0.0-20190428045703-ad7e11aa49ff",
)
go_repository(
name = "com_github_jpillora_backoff",
build_file_proto_mode = "disable",
importpath = "github.com/jpillora/backoff",
sum = "h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA=",
version = "v1.0.0",
)
go_repository(
name = "com_github_json_iterator_go",
build_file_proto_mode = "disable",
importpath = "github.com/json-iterator/go",
sum = "h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=",
version = "v1.1.11",
)
go_repository(
name = "com_github_jstemmer_go_junit_report",
build_file_proto_mode = "disable",
importpath = "github.com/jstemmer/go-junit-report",
sum = "h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=",
version = "v0.9.1",
)
go_repository(
name = "com_github_jtolds_gls",
build_file_proto_mode = "disable",
importpath = "github.com/jtolds/gls",
sum = "h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=",
version = "v4.20.0+incompatible",
)
go_repository(
name = "com_github_julienschmidt_httprouter",
build_file_proto_mode = "disable",
importpath = "github.com/julienschmidt/httprouter",
sum = "h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=",
version = "v1.3.0",
)
go_repository(
name = "com_github_kballard_go_shellquote",
build_file_proto_mode = "disable",
importpath = "github.com/kballard/go-shellquote",
sum = "h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=",
version = "v0.0.0-20180428030007-95032a82bc51",
)
go_repository(
name = "com_github_kelseyhightower_envconfig",
build_file_proto_mode = "disable",
importpath = "github.com/kelseyhightower/envconfig",
sum = "h1:Im6hONhd3pLkfDFsbRgu68RDNkGF1r3dvMUtDTo2cv8=",
version = "v1.4.0",
)
go_repository(
name = "com_github_kevinburke_ssh_config",
build_file_proto_mode = "disable",
importpath = "github.com/kevinburke/ssh_config",
sum = "h1:DowS9hvgyYSX4TO5NpyC606/Z4SxnNYbT+WX27or6Ck=",
version = "v0.0.0-20201106050909-4977a11b4351",
)
go_repository(
name = "com_github_kisielk_errcheck",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/errcheck",
sum = "h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY=",
version = "v1.5.0",
)
go_repository(
name = "com_github_kisielk_gotool",
build_file_proto_mode = "disable",
importpath = "github.com/kisielk/gotool",
sum = "h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=",
version = "v1.0.0",
)
go_repository(
name = "com_github_klauspost_compress",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/compress",
sum = "h1:Znfn6hXZAHaLPNnlqUYRrBSReFHYybslgv4PTiyz6P0=",
version = "v1.10.2",
)
go_repository(
name = "com_github_klauspost_cpuid",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/cpuid",
sum = "h1:1xAgYebNnsb9LKCdLOvFWtAxGU/33mjJtyOVbmUa0Us=",
version = "v1.2.2",
)
go_repository(
name = "com_github_klauspost_pgzip",
build_file_proto_mode = "disable",
importpath = "github.com/klauspost/pgzip",
sum = "h1:oIPZROsWuPHpOdMVWLuJZXwgjhrW8r1yEX8UqMyeNHM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_knative_build",
build_file_proto_mode = "disable",
importpath = "github.com/knative/build",
sum = "h1:o/VYWA3HKyZlNqdU2hDE5LHpanBe8gazgPKL97XJ6bo=",
version = "v0.1.2",
)
go_repository(
name = "com_github_konsorten_go_windows_terminal_sequences",
build_file_proto_mode = "disable",
importpath = "github.com/konsorten/go-windows-terminal-sequences",
sum = "h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=",
version = "v1.0.3",
)
go_repository(
name = "com_github_kr_logfmt",
build_file_proto_mode = "disable",
importpath = "github.com/kr/logfmt",
sum = "h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=",
version = "v0.0.0-20140226030751-b84e30acd515",
)
go_repository(
name = "com_github_kr_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pretty",
sum = "h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=",
version = "v0.2.1",
)
go_repository(
name = "com_github_kr_pty",
build_file_proto_mode = "disable",
importpath = "github.com/kr/pty",
sum = "h1:AkaSdXYQOWeaO3neb8EM634ahkXXe3jYbVh/F9lq+GI=",
version = "v1.1.8",
)
go_repository(
name = "com_github_kr_text",
build_file_proto_mode = "disable",
importpath = "github.com/kr/text",
sum = "h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=",
version = "v0.2.0",
)
go_repository(
name = "com_github_lib_pq",
build_file_proto_mode = "disable",
importpath = "github.com/lib/pq",
sum = "h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=",
version = "v1.2.0",
)
go_repository(
name = "com_github_liggitt_tabwriter",
build_file_proto_mode = "disable",
importpath = "github.com/liggitt/tabwriter",
sum = "h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=",
version = "v0.0.0-20181228230101-89fcab3d43de",
)
go_repository(
name = "com_github_lightstep_tracecontext_go",
build_file_proto_mode = "disable",
importpath = "github.com/lightstep/tracecontext.go",
sum = "h1:+2b6iGRJe3hvV/yVXrd41yVEjxuFHxasJqDhkIjS4gk=",
version = "v0.0.0-20181129014701-1757c391b1ac",
)
go_repository(
name = "com_github_lithammer_dedent",
build_file_proto_mode = "disable",
importpath = "github.com/lithammer/dedent",
sum = "h1:VNzHMVCBNG1j0fh3OrsFRkVUwStdDArbgBWoPAffktY=",
version = "v1.1.0",
)
go_repository(
name = "com_github_logrusorgru_aurora",
build_file_proto_mode = "disable",
importpath = "github.com/logrusorgru/aurora",
sum = "h1:9MlwzLdW7QSDrhDjFlsEYmxpFyIoXmYRon3dt0io31k=",
version = "v0.0.0-20181002194514-a7b3b318ed4e",
)
go_repository(
name = "com_github_lyft_protoc_gen_validate",
build_file_proto_mode = "disable",
importpath = "github.com/lyft/protoc-gen-validate",
sum = "h1:KNt/RhmQTOLr7Aj8PsJ7mTronaFyx80mRTT9qF261dA=",
version = "v0.0.13",
)
go_repository(
name = "com_github_magiconair_properties",
build_file_proto_mode = "disable",
importpath = "github.com/magiconair/properties",
sum = "h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=",
version = "v1.8.1",
)
go_repository(
name = "com_github_mailru_easyjson",
build_file_proto_mode = "disable",
importpath = "github.com/mailru/easyjson",
sum = "h1:jcoUdG1TzY/M/eM5BLFLP8DJeMximx5NQYSlLL9YeWc=",
version = "v0.7.1-0.20191009090205-6c0755d89d1e",
)
go_repository(
name = "com_github_makenowjust_heredoc",
build_file_proto_mode = "disable",
importpath = "github.com/MakeNowJust/heredoc",
sum = "h1:sjQovDkwrZp8u+gxLtPgKGjk5hCxuy2hrRejBTA9xFU=",
version = "v0.0.0-20170808103936-bb23615498cd",
)
go_repository(
name = "com_github_maratori_testpackage",
build_file_proto_mode = "disable",
importpath = "github.com/maratori/testpackage",
sum = "h1:QtJ5ZjqapShm0w5DosRjg0PRlSdAdlx+W6cCKoALdbQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_markbates_inflect",
build_file_proto_mode = "disable",
importpath = "github.com/markbates/inflect",
sum = "h1:5fh1gzTFhfae06u3hzHYO9xe3l3v3nW5Pwt3naLTP5g=",
version = "v1.0.4",
)
go_repository(
name = "com_github_marstr_guid",
build_file_proto_mode = "disable",
importpath = "github.com/marstr/guid",
sum = "h1:/M4H/1G4avsieL6BbUwCOBzulmoeKVP5ux/3mQNnbyI=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_goutils",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/goutils",
sum = "h1:zukEsf/1JZwCMgHiK3GZftabmxiCw4apj3a28RPBiVg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_masterminds_semver",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver",
sum = "h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=",
version = "v1.5.0",
)
go_repository(
name = "com_github_masterminds_semver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/semver/v3",
sum = "h1:Y2lUDsFKVRSYGojLJ1yLxSXdMmMYTYls0rCvoqmMUQk=",
version = "v3.1.0",
)
go_repository(
name = "com_github_masterminds_sprig_v3",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/sprig/v3",
sum = "h1:wz22D0CiSctrliXiI9ZO3HoNApweeRGftyDN+BQa3B8=",
version = "v3.0.2",
)
go_repository(
name = "com_github_masterminds_vcs",
build_file_proto_mode = "disable",
importpath = "github.com/Masterminds/vcs",
sum = "h1:NL3G1X7/7xduQtA2sJLpVpfHTNBALVNSjob6KEjPXNQ=",
version = "v1.13.1",
)
go_repository(
name = "com_github_matoous_godox",
build_file_proto_mode = "disable",
importpath = "github.com/matoous/godox",
sum = "h1:RHba4YImhrUVQDHUCe2BNSOz4tVy2yGyXhvYDvxGgeE=",
version = "v0.0.0-20190911065817-5d6d842e92eb",
)
go_repository(
name = "com_github_mattbaird_jsonpatch",
build_file_proto_mode = "disable",
importpath = "github.com/mattbaird/jsonpatch",
sum = "h1:+J2gw7Bw77w/fbK7wnNJJDKmw1IbWft2Ul5BzrG1Qm8=",
version = "v0.0.0-20171005235357-81af80346b1a",
)
go_repository(
name = "com_github_mattn_go_colorable",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-colorable",
sum = "h1:6Su7aK7lXmJ/U79bYtBjLNaha4Fs1Rg9plHpcH+vvnE=",
version = "v0.1.6",
)
go_repository(
name = "com_github_mattn_go_ieproxy",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-ieproxy",
sum = "h1:qiyop7gCflfhwCzGyeT0gro3sF9AIg9HU98JORTkqfI=",
version = "v0.0.1",
)
go_repository(
name = "com_github_mattn_go_isatty",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-isatty",
sum = "h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=",
version = "v0.0.12",
)
go_repository(
name = "com_github_mattn_go_runewidth",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-runewidth",
sum = "h1:3tS41NlGYSmhhe/8fhGRzc+z3AYCw1Fe1WAyLuujKs0=",
version = "v0.0.8",
)
go_repository(
name = "com_github_mattn_go_shellwords",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-shellwords",
sum = "h1:Y7Xqm8piKOO3v10Thp7Z36h4FYFjt5xB//6XvOrs2Gw=",
version = "v1.0.10",
)
go_repository(
name = "com_github_mattn_go_sqlite3",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-sqlite3",
sum = "h1:xQ15muvnzGBHpIpdrNi1DA5x0+TcBZzsIDwmw9uTHzw=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_mattn_go_zglob",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/go-zglob",
sum = "h1:0qT24o2wsZ8cOXQAERwBX6s+rPMs/bJTKxLVVtgfDXc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_mattn_goveralls",
build_file_proto_mode = "disable",
importpath = "github.com/mattn/goveralls",
sum = "h1:7eJB6EqsPhRVxvwEXGnqdO2sJI0PTsrWoTMXEk9/OQc=",
version = "v0.0.2",
)
go_repository(
name = "com_github_matttproud_golang_protobuf_extensions",
build_file_proto_mode = "disable",
importpath = "github.com/matttproud/golang_protobuf_extensions",
sum = "h1:I0XW9+e1XWDxdcEniV4rQAIOPUGDq67JSCiRCgGCZLI=",
version = "v1.0.2-0.20181231171920-c182affec369",
)
go_repository(
name = "com_github_maxbrunsfeld_counterfeiter_v6",
build_file_proto_mode = "disable",
importpath = "github.com/maxbrunsfeld/counterfeiter/v6",
sum = "h1:g+4J5sZg6osfvEfkRZxJ1em0VT95/UOZgi/l7zi1/oE=",
version = "v6.2.2",
)
go_repository(
name = "com_github_mgutz_ansi",
build_file_proto_mode = "disable",
importpath = "github.com/mgutz/ansi",
sum = "h1:j7+1HpAFS1zy5+Q4qx1fWh90gTKwiN4QCGoY9TWyyO4=",
version = "v0.0.0-20170206155736-9520e82c474b",
)
go_repository(
name = "com_github_mholt_archiver_v3",
build_file_proto_mode = "disable",
importpath = "github.com/mholt/archiver/v3",
sum = "h1:vWjhY8SQp5yzM9P6OJ/eZEkmi3UAbRrxCq48MxjAzig=",
version = "v3.3.0",
)
go_repository(
name = "com_github_microsoft_go_winio",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/go-winio",
sum = "h1:FtSW/jqD+l4ba5iPBj9CODVtgfYAD8w2wS923g/cFDk=",
version = "v0.4.16",
)
go_repository(
name = "com_github_microsoft_hcsshim",
build_file_proto_mode = "disable",
importpath = "github.com/Microsoft/hcsshim",
sum = "h1:ptnOoufxGSzauVTsdE+wMYnCWA301PdoN4xg5oRdZpg=",
version = "v0.8.7",
)
go_repository(
name = "com_github_miekg_dns",
build_file_proto_mode = "disable",
importpath = "github.com/miekg/dns",
sum = "h1:sJFOl9BgwbYAWOGEwr61FU28pqsBNdpRBnhGXtO06Oo=",
version = "v1.1.31",
)
go_repository(
name = "com_github_mitchellh_cli",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/cli",
sum = "h1:iGBIsUe3+HZ/AD/Vd7DErOt5sU9fa8Uj7A2s1aggv1Y=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_copystructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/copystructure",
sum = "h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_homedir",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-homedir",
sum = "h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=",
version = "v1.1.0",
)
go_repository(
name = "com_github_mitchellh_go_ps",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-ps",
sum = "h1:9+ke9YJ9KGWw5ANXK6ozjoK47uI3uNbXv4YVINBnGm8=",
version = "v0.0.0-20190716172923-621e5597135b",
)
go_repository(
name = "com_github_mitchellh_go_testing_interface",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-testing-interface",
sum = "h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_go_wordwrap",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/go-wordwrap",
sum = "h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_gox",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/gox",
sum = "h1:lfGJxY7ToLJQjHHwi0EX6uYBdK78egf954SQl13PQJc=",
version = "v0.4.0",
)
go_repository(
name = "com_github_mitchellh_iochan",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/iochan",
sum = "h1:C+X3KsSTLFVBr/tK1eYN/vs4rJcvsiLU338UhYPJWeY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mitchellh_ioprogress",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/ioprogress",
sum = "h1:Qa6dnn8DlasdXRnacluu8HzPts0S1I9zvvUPDbBnXFI=",
version = "v0.0.0-20180201004757-6a23b12fa88e",
)
go_repository(
name = "com_github_mitchellh_mapstructure",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/mapstructure",
sum = "h1:cCBH2gTD2K0OtLlv/Y5H01VQCqmlDxz30kS5Y5bqfLA=",
version = "v1.3.1",
)
go_repository(
name = "com_github_mitchellh_osext",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/osext",
sum = "h1:2+myh5ml7lgEU/51gbeLHfKGNfgEQQIWrlbdaOsidbQ=",
version = "v0.0.0-20151018003038-5e2d6d41470f",
)
go_repository(
name = "com_github_mitchellh_reflectwalk",
build_file_proto_mode = "disable",
importpath = "github.com/mitchellh/reflectwalk",
sum = "h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=",
version = "v1.0.0",
)
go_repository(
name = "com_github_moby_spdystream",
build_file_proto_mode = "disable",
importpath = "github.com/moby/spdystream",
sum = "h1:cjW1zVyyoiM0T7b6UoySUFqzXMoqRckQtXwGPiBhOM8=",
version = "v0.2.0",
)
go_repository(
name = "com_github_moby_term",
build_file_proto_mode = "disable",
importpath = "github.com/moby/term",
sum = "h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk=",
version = "v0.0.0-20201216013528-df9cb8a40635",
)
go_repository(
name = "com_github_modern_go_concurrent",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/concurrent",
sum = "h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg=",
version = "v0.0.0-20180306012644-bacd9c7ef1dd",
)
go_repository(
name = "com_github_modern_go_reflect2",
build_file_proto_mode = "disable",
importpath = "github.com/modern-go/reflect2",
sum = "h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_mohae_deepcopy",
build_file_proto_mode = "disable",
importpath = "github.com/mohae/deepcopy",
sum = "h1:RWengNIwukTxcDr9M+97sNutRR1RKhG96O6jWumTTnw=",
version = "v0.0.0-20170929034955-c48cc78d4826",
)
go_repository(
name = "com_github_morikuni_aec",
build_file_proto_mode = "disable",
importpath = "github.com/morikuni/aec",
sum = "h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=",
version = "v1.0.0",
)
go_repository(
name = "com_github_mozilla_tls_observatory",
build_file_proto_mode = "disable",
importpath = "github.com/mozilla/tls-observatory",
sum = "h1:1xJ+Xi9lYWLaaP4yB67ah0+548CD3110mCPWhVVjFkI=",
version = "v0.0.0-20200317151703-4fa42e1c2dee",
)
go_repository(
name = "com_github_munnerz_crd_schema_fuzz",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/crd-schema-fuzz",
sum = "h1:8erI9yzEnOGw9K5O+a8zZdoo8N/OwrFi7c7SjBtkHAs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_munnerz_goautoneg",
build_file_proto_mode = "disable",
importpath = "github.com/munnerz/goautoneg",
sum = "h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=",
version = "v0.0.0-20191010083416-a7dc8b61c822",
)
go_repository(
name = "com_github_mwitkow_go_conntrack",
build_file_proto_mode = "disable",
importpath = "github.com/mwitkow/go-conntrack",
sum = "h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=",
version = "v0.0.0-20190716064945-2f068394615f",
)
go_repository(
name = "com_github_mxk_go_flowrate",
build_file_proto_mode = "disable",
importpath = "github.com/mxk/go-flowrate",
sum = "h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=",
version = "v0.0.0-20140419014527-cca7078d478f",
)
go_repository(
name = "com_github_nakabonne_nestif",
build_file_proto_mode = "disable",
importpath = "github.com/nakabonne/nestif",
sum = "h1:+yOViDGhg8ygGrmII72nV9B/zGxY188TYpfolntsaPw=",
version = "v0.3.0",
)
go_repository(
name = "com_github_natefinch_lumberjack",
build_file_proto_mode = "disable",
importpath = "github.com/natefinch/lumberjack",
sum = "h1:4QJd3OLAMgj7ph+yZTuX13Ld4UpgHp07nNdFX7mqFfM=",
version = "v2.0.0+incompatible",
)
go_repository(
name = "com_github_nats_io_gnatsd",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/gnatsd",
sum = "h1:RconcfDeWpKCD6QIIwiVFcvForlXpWeJP7i5/lDLy44=",
version = "v1.4.1",
)
go_repository(
name = "com_github_nats_io_go_nats",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/go-nats",
sum = "h1:oQOfHcLr8hb43QG8yeVyY2jtarIaTjOv41CGdF3tTvQ=",
version = "v1.7.0",
)
go_repository(
name = "com_github_nats_io_jwt",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/jwt",
sum = "h1:+RB5hMpXUUA2dfxuhBTEkMOrYmM+gKIZYS1KjSostMI=",
version = "v0.3.2",
)
go_repository(
name = "com_github_nats_io_nats_go",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats.go",
sum = "h1:ik3HbLhZ0YABLto7iX80pZLPw/6dx3T+++MZJwLnMrQ=",
version = "v1.9.1",
)
go_repository(
name = "com_github_nats_io_nats_server_v2",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nats-server/v2",
sum = "h1:i2Ly0B+1+rzNZHHWtD4ZwKi+OU5l+uQo1iDHZ2PmiIc=",
version = "v2.1.2",
)
go_repository(
name = "com_github_nats_io_nkeys",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nkeys",
sum = "h1:6JrEfig+HzTH85yxzhSVbjHRJv9cn0p6n3IngIcM5/k=",
version = "v0.1.3",
)
go_repository(
name = "com_github_nats_io_nuid",
build_file_proto_mode = "disable",
importpath = "github.com/nats-io/nuid",
sum = "h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=",
version = "v1.0.1",
)
go_repository(
name = "com_github_nbio_st",
build_file_proto_mode = "disable",
importpath = "github.com/nbio/st",
sum = "h1:W6apQkHrMkS0Muv8G/TipAy/FJl/rCYT0+EuS8+Z0z4=",
version = "v0.0.0-20140626010706-e9e8d9816f32",
)
go_repository(
name = "com_github_nbutton23_zxcvbn_go",
build_file_proto_mode = "disable",
importpath = "github.com/nbutton23/zxcvbn-go",
sum = "h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=",
version = "v0.0.0-20180912185939-ae427f1e4c1d",
)
go_repository(
name = "com_github_ncw_swift",
build_file_proto_mode = "disable",
importpath = "github.com/ncw/swift",
sum = "h1:4DQRPj35Y41WogBxyhOXlrI37nzGlyEcsforeudyYPQ=",
version = "v1.0.47",
)
go_repository(
name = "com_github_niemeyer_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/niemeyer/pretty",
sum = "h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=",
version = "v0.0.0-20200227124842-a10e7caefd8e",
)
go_repository(
name = "com_github_nwaples_rardecode",
build_file_proto_mode = "disable",
importpath = "github.com/nwaples/rardecode",
sum = "h1:r7vGuS5akxOnR4JQSkko62RJ1ReCMXxQRPtxsiFMBOs=",
version = "v1.0.0",
)
go_repository(
name = "com_github_nxadm_tail",
build_file_proto_mode = "disable",
importpath = "github.com/nxadm/tail",
sum = "h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=",
version = "v1.4.8",
)
go_repository(
name = "com_github_nytimes_gziphandler",
build_file_proto_mode = "disable",
importpath = "github.com/NYTimes/gziphandler",
sum = "h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=",
version = "v1.1.1",
)
go_repository(
name = "com_github_octago_sflags",
build_file_proto_mode = "disable",
importpath = "github.com/octago/sflags",
sum = "h1:XceYzkRXGAHa/lSFmKLcaxSrsh4MTuOMQdIGsUD0wlk=",
version = "v0.2.0",
)
go_repository(
name = "com_github_oklog_run",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/run",
sum = "h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_oklog_ulid",
build_file_proto_mode = "disable",
importpath = "github.com/oklog/ulid",
sum = "h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4=",
version = "v1.3.1",
)
go_repository(
name = "com_github_olekukonko_tablewriter",
build_file_proto_mode = "disable",
importpath = "github.com/olekukonko/tablewriter",
sum = "h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=",
version = "v0.0.4",
)
go_repository(
name = "com_github_oneofone_xxhash",
build_file_proto_mode = "disable",
importpath = "github.com/OneOfOne/xxhash",
sum = "h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE=",
version = "v1.2.2",
)
go_repository(
name = "com_github_onsi_ginkgo",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/ginkgo",
sum = "h1:29JGrr5oVBm5ulCWet69zQkzWipVXIol6ygQUe/EzNc=",
version = "v1.16.4",
)
go_repository(
name = "com_github_onsi_gomega",
build_file_proto_mode = "disable",
importpath = "github.com/onsi/gomega",
sum = "h1:7lLHu94wT9Ij0o6EWWclhu0aOh32VxhkwEJvzuWPeak=",
version = "v1.13.0",
)
go_repository(
name = "com_github_op_go_logging",
build_file_proto_mode = "disable",
importpath = "github.com/op/go-logging",
sum = "h1:lDH9UUVJtmYCjyT0CI4q8xvlXPxeZ0gYCVvWbmPlp88=",
version = "v0.0.0-20160315200505-970db520ece7",
)
go_repository(
name = "com_github_opencontainers_go_digest",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/go-digest",
sum = "h1:WzifXhOVOEOuFYOJAW6aQqW0TooG2iki3E3Ii+WN7gQ=",
version = "v1.0.0-rc1",
)
go_repository(
name = "com_github_opencontainers_image_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/image-spec",
sum = "h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI=",
version = "v1.0.1",
)
go_repository(
name = "com_github_opencontainers_runc",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runc",
sum = "h1:GlxAyO6x8rfZYN9Tt0Kti5a/cP41iuiO2yYT0IJGY8Y=",
version = "v0.1.1",
)
go_repository(
name = "com_github_opencontainers_runtime_spec",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-spec",
sum = "h1:eNUVfm/RFLIi1G7flU5/ZRTHvd4kcVuzfRnL6OFlzCI=",
version = "v0.1.2-0.20190507144316-5b71a03e2700",
)
go_repository(
name = "com_github_opencontainers_runtime_tools",
build_file_proto_mode = "disable",
importpath = "github.com/opencontainers/runtime-tools",
sum = "h1:H7DMc6FAjgwZZi8BRqjrAAHWoqEr5e5L6pS4V0ezet4=",
version = "v0.0.0-20181011054405-1d69bd0f9c39",
)
go_repository(
name = "com_github_openpeedeep_depguard",
build_file_proto_mode = "disable",
importpath = "github.com/OpenPeeDeeP/depguard",
sum = "h1:VlW4R6jmBIv3/u1JNlawEvJMM4J+dPORPaZasQee8Us=",
version = "v1.0.1",
)
go_repository(
name = "com_github_openzipkin_zipkin_go",
build_file_proto_mode = "disable",
importpath = "github.com/openzipkin/zipkin-go",
sum = "h1:nY8Hti+WKaP0cRsSeQ026wU03QsM762XBeCXBb9NAWI=",
version = "v0.2.2",
)
go_repository(
name = "com_github_otiai10_copy",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/copy",
sum = "h1:DDNipYy6RkIkjMwy+AWzgKiNTyj2RUI9yEMeETEpVyc=",
version = "v1.0.2",
)
go_repository(
name = "com_github_otiai10_curr",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/curr",
sum = "h1:+OLn68pqasWca0z5ryit9KGfp3sUsW4Lqg32iRMJyzs=",
version = "v0.0.0-20150429015615-9b4961190c95",
)
go_repository(
name = "com_github_otiai10_mint",
build_file_proto_mode = "disable",
importpath = "github.com/otiai10/mint",
sum = "h1:Ady6MKVezQwHBkGzLFbrsywyp09Ah7rkmfjV3Bcr5uc=",
version = "v1.3.0",
)
go_repository(
name = "com_github_pascaldekloe_goe",
build_file_proto_mode = "disable",
importpath = "github.com/pascaldekloe/goe",
sum = "h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=",
version = "v0.1.0",
)
go_repository(
name = "com_github_pavel_v_chernykh_keystore_go",
build_file_proto_mode = "disable",
importpath = "github.com/pavel-v-chernykh/keystore-go",
sum = "h1:Jd6xfriVlJ6hWPvYOE0Ni0QWcNTLRehfGPFxr3eSL80=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_pborman_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/pborman/uuid",
sum = "h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g=",
version = "v1.2.0",
)
go_repository(
name = "com_github_pelletier_go_buffruneio",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-buffruneio",
sum = "h1:U4t4R6YkofJ5xHm3dJzuRpPZ0mr5MMCoAWooScCR7aA=",
version = "v0.2.0",
)
go_repository(
name = "com_github_pelletier_go_toml",
build_file_proto_mode = "disable",
importpath = "github.com/pelletier/go-toml",
sum = "h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw=",
version = "v1.8.0",
)
go_repository(
name = "com_github_peterbourgon_diskv",
build_file_proto_mode = "disable",
importpath = "github.com/peterbourgon/diskv",
sum = "h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=",
version = "v2.0.1+incompatible",
)
go_repository(
name = "com_github_phayes_checkstyle",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/checkstyle",
sum = "h1:CdDQnGF8Nq9ocOS/xlSptM1N3BbrA6/kmaep5ggwaIA=",
version = "v0.0.0-20170904204023-bfd46e6a821d",
)
go_repository(
name = "com_github_phayes_freeport",
build_file_proto_mode = "disable",
importpath = "github.com/phayes/freeport",
sum = "h1:JhzVVoYvbOACxoUmOs6V/G4D5nPVUW73rKvXxP4XUJc=",
version = "v0.0.0-20180830031419-95f893ade6f2",
)
go_repository(
name = "com_github_pierrec_lz4",
build_file_proto_mode = "disable",
importpath = "github.com/pierrec/lz4",
sum = "h1:6aCX4/YZ9v8q69hTyiR7dNLnTA3fgtKHVVW5BCd5Znw=",
version = "v2.2.6+incompatible",
)
go_repository(
name = "com_github_pkg_errors",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/errors",
sum = "h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=",
version = "v0.9.1",
)
go_repository(
name = "com_github_pkg_profile",
build_file_proto_mode = "disable",
importpath = "github.com/pkg/profile",
sum = "h1:F++O52m40owAmADcojzM+9gyjmMOY/T4oYJkgFDH8RE=",
version = "v1.2.1",
)
go_repository(
name = "com_github_pmezard_go_difflib",
build_file_proto_mode = "disable",
importpath = "github.com/pmezard/go-difflib",
sum = "h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=",
version = "v1.0.0",
)
go_repository(
name = "com_github_posener_complete",
build_file_proto_mode = "disable",
importpath = "github.com/posener/complete",
sum = "h1:ccV59UEOTzVDnDUEFdT95ZzHVZ+5+158q8+SJb2QV5w=",
version = "v1.1.1",
)
go_repository(
name = "com_github_pquerna_cachecontrol",
build_file_proto_mode = "disable",
importpath = "github.com/pquerna/cachecontrol",
sum = "h1:0XM1XL/OFFJjXsYXlG30spTkV/E9+gmd5GD1w2HE8xM=",
version = "v0.0.0-20171018203845-0dec1b30a021",
)
go_repository(
name = "com_github_prometheus_client_golang",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_golang",
sum = "h1:HNkLOAEQMIDv/K+04rukrLx6ch7msSRwf3/SASFAGtQ=",
version = "v1.11.0",
)
go_repository(
name = "com_github_prometheus_client_model",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/client_model",
sum = "h1:uq5h0d+GuxiXLJLNABMgp2qUWDPiLvgCzz2dUR+/W/M=",
version = "v0.2.0",
)
go_repository(
name = "com_github_prometheus_common",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/common",
sum = "h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=",
version = "v0.26.0",
)
go_repository(
name = "com_github_prometheus_procfs",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/procfs",
sum = "h1:mxy4L2jP6qMonqmq+aTtOx1ifVWUgG/TAmntgbh3xv4=",
version = "v0.6.0",
)
go_repository(
name = "com_github_prometheus_tsdb",
build_file_proto_mode = "disable",
importpath = "github.com/prometheus/tsdb",
sum = "h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=",
version = "v0.7.1",
)
go_repository(
name = "com_github_puerkitobio_purell",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/purell",
sum = "h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=",
version = "v1.1.1",
)
go_repository(
name = "com_github_puerkitobio_urlesc",
build_file_proto_mode = "disable",
importpath = "github.com/PuerkitoBio/urlesc",
sum = "h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=",
version = "v0.0.0-20170810143723-de5bf2ad4578",
)
go_repository(
name = "com_github_quasilyte_go_consistent",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-consistent",
sum = "h1:JoUA0uz9U0FVFq5p4LjEq4C0VgQ0El320s3Ms0V4eww=",
version = "v0.0.0-20190521200055-c6f3937de18c",
)
go_repository(
name = "com_github_quasilyte_go_ruleguard",
build_file_proto_mode = "disable",
importpath = "github.com/quasilyte/go-ruleguard",
sum = "h1:DvnesvLtRPQOvaUbfXfh0tpMHg29by0H7F2U+QIkSu8=",
version = "v0.1.2-0.20200318202121-b00d7a75d3d8",
)
go_repository(
name = "com_github_rcrowley_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/rcrowley/go-metrics",
sum = "h1:eUm8ma4+yPknhXtkYlWh3tMkE6gBjXZToDned9s2gbQ=",
version = "v0.0.0-20190706150252-9beb055b7962",
)
go_repository(
name = "com_github_remyoudompheng_bigfft",
build_file_proto_mode = "disable",
importpath = "github.com/remyoudompheng/bigfft",
sum = "h1:/NRJ5vAYoqz+7sG51ubIDHXeWO8DlTSrToPu6q11ziA=",
version = "v0.0.0-20170806203942-52369c62f446",
)
go_repository(
name = "com_github_rogpeppe_fastuuid",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/fastuuid",
sum = "h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_rogpeppe_go_internal",
build_file_proto_mode = "disable",
importpath = "github.com/rogpeppe/go-internal",
sum = "h1:qLvObTrvO/XRCqmkKxUlOBc48bI3efyDuAZe25QiF0w=",
version = "v1.5.2",
)
go_repository(
name = "com_github_rubiojr_go_vhd",
build_file_proto_mode = "disable",
importpath = "github.com/rubiojr/go-vhd",
sum = "h1:ht7N4d/B7Ezf58nvMNVF3OlvDlz9pp+WHVcRNS0nink=",
version = "v0.0.0-20160810183302-0bfd3b39853c",
)
go_repository(
name = "com_github_russross_blackfriday",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday",
sum = "h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=",
version = "v1.5.2",
)
go_repository(
name = "com_github_russross_blackfriday_v2",
build_file_proto_mode = "disable",
importpath = "github.com/russross/blackfriday/v2",
sum = "h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=",
version = "v2.0.1",
)
go_repository(
name = "com_github_rwcarlsen_goexif",
build_file_proto_mode = "disable",
importpath = "github.com/rwcarlsen/goexif",
sum = "h1:CmH9+J6ZSsIjUK3dcGsnCnO41eRBOnY12zwkn5qVwgc=",
version = "v0.0.0-20190401172101-9e8deecbddbd",
)
go_repository(
name = "com_github_ryancurrah_gomodguard",
build_file_proto_mode = "disable",
importpath = "github.com/ryancurrah/gomodguard",
sum = "h1:DWbye9KyMgytn8uYpuHkwf0RHqAYO6Ay/D0TbCpPtVU=",
version = "v1.1.0",
)
go_repository(
name = "com_github_ryanuber_columnize",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/columnize",
sum = "h1:j1Wcmh8OrK4Q7GXY+V7SVSY8nUWQxHW5TkBe7YUl+2s=",
version = "v2.1.0+incompatible",
)
go_repository(
name = "com_github_ryanuber_go_glob",
build_file_proto_mode = "disable",
importpath = "github.com/ryanuber/go-glob",
sum = "h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sassoftware_go_rpmutils",
build_file_proto_mode = "disable",
importpath = "github.com/sassoftware/go-rpmutils",
sum = "h1:+gCnWOZV8Z/8jehJ2CdqB47Z3S+SREmQcuXkRFLNsiI=",
version = "v0.0.0-20190420191620-a8f1baeba37b",
)
go_repository(
name = "com_github_satori_go_uuid",
build_file_proto_mode = "disable",
importpath = "github.com/satori/go.uuid",
sum = "h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sclevine_spec",
build_file_proto_mode = "disable",
importpath = "github.com/sclevine/spec",
sum = "h1:1Jwdf9jSfDl9NVmt8ndHqbTZ7XCCPbh1jI3hkDBHVYA=",
version = "v1.2.0",
)
go_repository(
name = "com_github_sean_seed",
build_file_proto_mode = "disable",
importpath = "github.com/sean-/seed",
sum = "h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=",
version = "v0.0.0-20170313163322-e2103e2c3529",
)
go_repository(
name = "com_github_securego_gosec",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec",
sum = "h1:rq2/kILQnPtq5oL4+IAjgVOjh5e2yj2aaCYi7squEvI=",
version = "v0.0.0-20200401082031-e946c8c39989",
)
go_repository(
name = "com_github_securego_gosec_v2",
build_file_proto_mode = "disable",
importpath = "github.com/securego/gosec/v2",
sum = "h1:y/9mCF2WPDbSDpL3QDWZD3HHGrSYw0QSHnCqTfs4JPE=",
version = "v2.3.0",
)
go_repository(
name = "com_github_sergi_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sergi/go-diff",
sum = "h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=",
version = "v1.1.0",
)
go_repository(
name = "com_github_shirou_gopsutil",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/gopsutil",
sum = "h1:WokF3GuxBeL+n4Lk4Fa8v9mbdjlrl7bHuneF4N1bk2I=",
version = "v0.0.0-20190901111213-e4ec7b275ada",
)
go_repository(
name = "com_github_shirou_w32",
build_file_proto_mode = "disable",
importpath = "github.com/shirou/w32",
sum = "h1:udFKJ0aHUL60LboW/A+DfgoHVedieIzIXE8uylPue0U=",
version = "v0.0.0-20160930032740-bb4de0191aa4",
)
go_repository(
name = "com_github_shopify_logrus_bugsnag",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/logrus-bugsnag",
sum = "h1:UrqY+r/OJnIp5u0s1SbQ8dVfLCZJsnvazdBP5hS4iRs=",
version = "v0.0.0-20171204204709-577dee27f20d",
)
go_repository(
name = "com_github_shopify_sarama",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/sarama",
sum = "h1:XxJBCZEoWJtoWjf/xRbmGUpAmTZGnuuF0ON0EvxxBrs=",
version = "v1.23.1",
)
go_repository(
name = "com_github_shopify_toxiproxy",
build_file_proto_mode = "disable",
importpath = "github.com/Shopify/toxiproxy",
sum = "h1:TKdv8HiTLgE5wdJuEML90aBgNWsokNbMijUGhmcoBJc=",
version = "v2.1.4+incompatible",
)
go_repository(
name = "com_github_shurcool_githubv4",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/githubv4",
sum = "h1:N5B+JgvM/DVYIxreItPJMM3yWrNO/GB2q4nESrtBisM=",
version = "v0.0.0-20210725200734-83ba7b4c9228",
)
go_repository(
name = "com_github_shurcool_go",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go",
sum = "h1:MZM7FHLqUHYI0Y/mQAt3d2aYa0SiNms/hFqC9qJYolM=",
version = "v0.0.0-20180423040247-9e1955d9fb6e",
)
go_repository(
name = "com_github_shurcool_go_goon",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/go-goon",
sum = "h1:llrF3Fs4018ePo4+G/HV/uQUqEI1HMDjCeOf2V6puPc=",
version = "v0.0.0-20170922171312-37c2f522c041",
)
go_repository(
name = "com_github_shurcool_graphql",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/graphql",
sum = "h1:tygelZueB1EtXkPI6mQ4o9DQ0+FKW41hTbunoXZCTqk=",
version = "v0.0.0-20181231061246-d48a9a75455f",
)
go_repository(
name = "com_github_shurcool_sanitized_anchor_name",
build_file_proto_mode = "disable",
importpath = "github.com/shurcooL/sanitized_anchor_name",
sum = "h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=",
version = "v1.0.0",
)
go_repository(
name = "com_github_sirupsen_logrus",
build_file_proto_mode = "disable",
importpath = "github.com/sirupsen/logrus",
sum = "h1:dJKuHgqk1NNQlqoA6BTlM1Wf9DOH3NBjQyu0h9+AZZE=",
version = "v1.8.1",
)
go_repository(
name = "com_github_smartystreets_assertions",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/assertions",
sum = "h1:42S6lae5dvLc7BrLu/0ugRtcFVjoJNMC/N3yZFZkDFs=",
version = "v1.2.0",
)
go_repository(
name = "com_github_smartystreets_go_aws_auth",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/go-aws-auth",
sum = "h1:hp2CYQUINdZMHdvTdXtPOY2ainKl4IoMcpAXEf2xj3Q=",
version = "v0.0.0-20180515143844-0c1422d1fdb9",
)
go_repository(
name = "com_github_smartystreets_goconvey",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/goconvey",
sum = "h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=",
version = "v1.6.4",
)
go_repository(
name = "com_github_smartystreets_gunit",
build_file_proto_mode = "disable",
importpath = "github.com/smartystreets/gunit",
sum = "h1:RyPDUFcJbvtXlhJPk7v+wnxZRY2EUokhEYl2EJOPToI=",
version = "v1.0.0",
)
go_repository(
name = "com_github_soheilhy_cmux",
build_file_proto_mode = "disable",
importpath = "github.com/soheilhy/cmux",
sum = "h1:0HKaf1o97UwFjHH9o5XsHUOF+tqmdA7KEzXLpiyaw0E=",
version = "v0.1.4",
)
go_repository(
name = "com_github_sourcegraph_go_diff",
build_file_proto_mode = "disable",
importpath = "github.com/sourcegraph/go-diff",
sum = "h1:lhIKJ2nXLZZ+AfbHpYxTn0pXpNTTui0DX7DO3xeb1Zs=",
version = "v0.5.3",
)
go_repository(
name = "com_github_spaolacci_murmur3",
build_file_proto_mode = "disable",
importpath = "github.com/spaolacci/murmur3",
sum = "h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=",
version = "v0.0.0-20180118202830-f09979ecbc72",
)
go_repository(
name = "com_github_spf13_afero",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/afero",
sum = "h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=",
version = "v1.2.2",
)
go_repository(
name = "com_github_spf13_cast",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cast",
sum = "h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=",
version = "v1.3.1",
)
go_repository(
name = "com_github_spf13_cobra",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/cobra",
sum = "h1:xghbfqPkxzxP3C/f3n5DdpAbdKLj4ZE4BWQI362l53M=",
version = "v1.1.3",
)
go_repository(
name = "com_github_spf13_jwalterweatherman",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/jwalterweatherman",
sum = "h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=",
version = "v1.1.0",
)
go_repository(
name = "com_github_spf13_pflag",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/pflag",
sum = "h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=",
version = "v1.0.5",
)
go_repository(
name = "com_github_spf13_viper",
build_file_proto_mode = "disable",
importpath = "github.com/spf13/viper",
sum = "h1:pM5oEahlgWv/WnHXpgbKz7iLIxRf65tye2Ci+XFK5sk=",
version = "v1.7.1",
)
go_repository(
name = "com_github_src_d_gcfg",
build_file_proto_mode = "disable",
importpath = "github.com/src-d/gcfg",
sum = "h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=",
version = "v1.4.0",
)
go_repository(
name = "com_github_stackexchange_wmi",
build_file_proto_mode = "disable",
importpath = "github.com/StackExchange/wmi",
sum = "h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=",
version = "v0.0.0-20180116203802-5d049714c4a6",
)
go_repository(
name = "com_github_streadway_amqp",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/amqp",
sum = "h1:0ngsPmuP6XIjiFRNFYlvKwSr5zff2v+uPHaffZ6/M4k=",
version = "v0.0.0-20190404075320-75d898a42a94",
)
go_repository(
name = "com_github_streadway_quantile",
build_file_proto_mode = "disable",
importpath = "github.com/streadway/quantile",
sum = "h1:7z3LSn867ex6VSaahyKadf4WtSsJIgne6A1WLOAGM8A=",
version = "v0.0.0-20150917103942-b0c588724d25",
)
go_repository(
name = "com_github_stretchr_objx",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/objx",
sum = "h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=",
version = "v0.2.0",
)
go_repository(
name = "com_github_stretchr_testify",
build_file_proto_mode = "disable",
importpath = "github.com/stretchr/testify",
sum = "h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=",
version = "v1.7.0",
)
go_repository(
name = "com_github_subosito_gotenv",
build_file_proto_mode = "disable",
importpath = "github.com/subosito/gotenv",
sum = "h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=",
version = "v1.2.0",
)
go_repository(
name = "com_github_syndtr_gocapability",
build_file_proto_mode = "disable",
importpath = "github.com/syndtr/gocapability",
sum = "h1:zLV6q4e8Jv9EHjNg/iHfzwDkCve6Ua5jCygptrtXHvI=",
version = "v0.0.0-20170704070218-db04d3cc01c8",
)
go_repository(
name = "com_github_tdakkota_asciicheck",
build_file_proto_mode = "disable",
importpath = "github.com/tdakkota/asciicheck",
sum = "h1:HxLVTlqcHhFAz3nWUcuvpH7WuOMv8LQoCWmruLfFH2U=",
version = "v0.0.0-20200416200610-e657995f937b",
)
go_repository(
name = "com_github_tektoncd_pipeline",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/pipeline",
sum = "h1:hWdWj5bDjkSGYLlJS+u+Kh9ZktBJgs2JNUv/kP0LVOA=",
version = "v0.13.1-0.20200625065359-44f22a067b75",
)
go_repository(
name = "com_github_tektoncd_plumbing",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing",
sum = "h1:crv70CBAJ2gZFSbf13aRVwdbjR2GYwTms/ZEok/SnFM=",
version = "v0.0.0-20200430135134-e53521e1d887",
)
go_repository(
name = "com_github_tektoncd_plumbing_pipelinerun_logs",
build_file_proto_mode = "disable",
importpath = "github.com/tektoncd/plumbing/pipelinerun-logs",
sum = "h1:9qeyrQsoPZbHOyOPt0OeB1TCYXfYb5swrxlFWzTIYYk=",
version = "v0.0.0-20191206114338-712d544c2c21",
)
go_repository(
name = "com_github_tetafro_godot",
build_file_proto_mode = "disable",
importpath = "github.com/tetafro/godot",
sum = "h1:Dib7un+rYJFUi8vN0Bk6EHheKy6fv6ZzFURHw75g6m8=",
version = "v0.4.2",
)
go_repository(
name = "com_github_tidwall_pretty",
build_file_proto_mode = "disable",
importpath = "github.com/tidwall/pretty",
sum = "h1:HsD+QiTn7sK6flMKIvNmpqz1qrpP3Ps6jOKIKMooyg4=",
version = "v1.0.0",
)
go_repository(
name = "com_github_timakin_bodyclose",
build_file_proto_mode = "disable",
importpath = "github.com/timakin/bodyclose",
sum = "h1:ig99OeTyDwQWhPe2iw9lwfQVF1KB3Q4fpP3X7/2VBG8=",
version = "v0.0.0-20200424151742-cb6215831a94",
)
go_repository(
name = "com_github_tj_assert",
build_file_proto_mode = "disable",
importpath = "github.com/tj/assert",
sum = "h1:Rw8kxzWo1mr6FSaYXjQELRe88y2KdfynXdnK72rdjtA=",
version = "v0.0.0-20171129193455-018094318fb0",
)
go_repository(
name = "com_github_tj_go_elastic",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-elastic",
sum = "h1:eGaGNxrtoZf/mBURsnNQKDR7u50Klgcf2eFDQEnc8Bc=",
version = "v0.0.0-20171221160941-36157cbbebc2",
)
go_repository(
name = "com_github_tj_go_kinesis",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-kinesis",
sum = "h1:m74UWYy+HBs+jMFR9mdZU6shPewugMyH5+GV6LNgW8w=",
version = "v0.0.0-20171128231115-08b17f58cb1b",
)
go_repository(
name = "com_github_tj_go_spin",
build_file_proto_mode = "disable",
importpath = "github.com/tj/go-spin",
sum = "h1:lhdWZsvImxvZ3q1C5OIB7d72DuOwP4O2NdBg9PyzNds=",
version = "v1.1.0",
)
go_repository(
name = "com_github_tmc_grpc_websocket_proxy",
build_file_proto_mode = "disable",
importpath = "github.com/tmc/grpc-websocket-proxy",
sum = "h1:LnC5Kc/wtumK+WB441p7ynQJzVuNRJiqddSIE3IlSEQ=",
version = "v0.0.0-20190109142713-0ad062ec5ee5",
)
go_repository(
name = "com_github_tommy_muehle_go_mnd",
build_file_proto_mode = "disable",
importpath = "github.com/tommy-muehle/go-mnd",
sum = "h1:RC4maTWLKKwb7p1cnoygsbKIgNlJqSYBeAFON3Ar8As=",
version = "v1.3.1-0.20200224220436-e6f9a994e8fa",
)
go_repository(
name = "com_github_trivago_tgo",
build_file_proto_mode = "disable",
importpath = "github.com/trivago/tgo",
sum = "h1:bxatjJIXNIpV18bucU4Uk/LaoxvxuOlp/oowRHyncLQ=",
version = "v1.0.1",
)
go_repository(
name = "com_github_tsenart_vegeta",
build_file_proto_mode = "disable",
importpath = "github.com/tsenart/vegeta",
sum = "h1:ErZrHhRveAoznVW80gbrxz+qxJNydpA2fcQxTPHkZbU=",
version = "v12.7.1-0.20190725001342-b5f4fca92137+incompatible",
)
go_repository(
name = "com_github_ugorji_go",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go",
sum = "h1:j4s+tAvLfL3bZyefP2SEWmhBzmuIlH/eqNuPdFPgngw=",
version = "v1.1.4",
)
go_repository(
name = "com_github_ugorji_go_codec",
build_file_proto_mode = "disable",
importpath = "github.com/ugorji/go/codec",
sum = "h1:3SVOIvH7Ae1KRYyQWRjXWJEA9sS/c/pjvH++55Gr648=",
version = "v0.0.0-20181204163529-d75b2dcb6bc8",
)
go_repository(
name = "com_github_ulikunitz_xz",
build_file_proto_mode = "disable",
importpath = "github.com/ulikunitz/xz",
sum = "h1:YvTNdFzX6+W5m9msiYg/zpkSURPPtOlzbqYjrFn7Yt4=",
version = "v0.5.7",
)
go_repository(
name = "com_github_ultraware_funlen",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/funlen",
sum = "h1:Av96YVBwwNSe4MLR7iI/BIa3VyI7/djnto/pK3Uxbdo=",
version = "v0.0.2",
)
go_repository(
name = "com_github_ultraware_whitespace",
build_file_proto_mode = "disable",
importpath = "github.com/ultraware/whitespace",
sum = "h1:If7Va4cM03mpgrNH9k49/VOicWpGoG70XPBFFODYDsg=",
version = "v0.0.4",
)
go_repository(
name = "com_github_urfave_cli",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli",
sum = "h1:u7tSpNPPswAFymm8IehJhy4uJMlUuU/GmqSkvJ1InXA=",
version = "v1.22.4",
)
go_repository(
name = "com_github_urfave_cli_v2",
build_file_proto_mode = "disable",
importpath = "github.com/urfave/cli/v2",
sum = "h1:Qt8FeAtxE/vfdrLmR3rxR6JRE0RoVmbXu8+6kZtYU4k=",
version = "v2.1.1",
)
go_repository(
name = "com_github_uudashr_gocognit",
build_file_proto_mode = "disable",
importpath = "github.com/uudashr/gocognit",
sum = "h1:MoG2fZ0b/Eo7NXoIwCVFLG5JED3qgQz5/NEE+rOsjPs=",
version = "v1.0.1",
)
go_repository(
name = "com_github_valyala_bytebufferpool",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/bytebufferpool",
sum = "h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=",
version = "v1.0.0",
)
go_repository(
name = "com_github_valyala_fasthttp",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/fasthttp",
sum = "h1:dzZJf2IuMiclVjdw0kkT+f9u4YdrapbNyGAN47E/qnk=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_quicktemplate",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/quicktemplate",
sum = "h1:BaO1nHTkspYzmAjPXj0QiDJxai96tlcZyKcI9dyEGvM=",
version = "v1.2.0",
)
go_repository(
name = "com_github_valyala_tcplisten",
build_file_proto_mode = "disable",
importpath = "github.com/valyala/tcplisten",
sum = "h1:0R4NLDRDZX6JcmhJgXi5E4b8Wg84ihbmUKp/GvSPEzc=",
version = "v0.0.0-20161114210144-ceec8f93295a",
)
go_repository(
name = "com_github_vdemeester_k8s_pkg_credentialprovider",
build_file_proto_mode = "disable",
importpath = "github.com/vdemeester/k8s-pkg-credentialprovider",
sum = "h1:czKEIG2Q3YRTgs6x/8xhjVMJD5byPo6cZuostkbTM74=",
version = "v1.17.4",
)
go_repository(
name = "com_github_vektah_gqlparser",
build_file_proto_mode = "disable",
importpath = "github.com/vektah/gqlparser",
sum = "h1:ZsyLGn7/7jDNI+y4SEhI4yAxRChlv15pUHMjijT+e68=",
version = "v1.1.2",
)
go_repository(
name = "com_github_venafi_vcert_v4",
build_file_proto_mode = "disable",
importpath = "github.com/Venafi/vcert/v4",
sum = "h1:37gfyjS9v5YvZcIABwNPo1fAC31lIZT7glVK1vfUxk4=",
version = "v4.11.0",
)
go_repository(
name = "com_github_vmware_govmomi",
build_file_proto_mode = "disable",
importpath = "github.com/vmware/govmomi",
sum = "h1:gpw/0Ku+6RgF3jsi7fnCLmlcikBHfKBCUcu1qgc16OU=",
version = "v0.20.3",
)
go_repository(
name = "com_github_xanzy_go_gitlab",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/go-gitlab",
sum = "h1:tBm+OXv1t+KBsqlXkSDFz+YUjRM0GFsjpOWYOod3Ebs=",
version = "v0.32.0",
)
go_repository(
name = "com_github_xanzy_ssh_agent",
build_file_proto_mode = "disable",
importpath = "github.com/xanzy/ssh-agent",
sum = "h1:wUMzuKtKilRgBAD1sUb8gOwwRr2FGoBVumcjoOACClI=",
version = "v0.3.0",
)
go_repository(
name = "com_github_xdg_scram",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/scram",
sum = "h1:u40Z8hqBAAQyv+vATcGgV0YCnDjqSL7/q/JyPhhJSPk=",
version = "v0.0.0-20180814205039-7eeb5667e42c",
)
go_repository(
name = "com_github_xdg_stringprep",
build_file_proto_mode = "disable",
importpath = "github.com/xdg/stringprep",
sum = "h1:d9X0esnoa3dFsV0FG35rAT0RIhYFlPq7MiP+DW89La0=",
version = "v1.0.0",
)
go_repository(
name = "com_github_xeipuuv_gojsonpointer",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonpointer",
sum = "h1:J9EGpcZtP0E/raorCMxlFGSTBrsSlaDGf3jU/qvAE2c=",
version = "v0.0.0-20180127040702-4e3ac2762d5f",
)
go_repository(
name = "com_github_xeipuuv_gojsonreference",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonreference",
sum = "h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0=",
version = "v0.0.0-20180127040603-bd5ef7bd5415",
)
go_repository(
name = "com_github_xeipuuv_gojsonschema",
build_file_proto_mode = "disable",
importpath = "github.com/xeipuuv/gojsonschema",
sum = "h1:ngVtJC9TY/lg0AA/1k48FYhBrhRoFlEmWzsehpNAaZg=",
version = "v1.1.0",
)
go_repository(
name = "com_github_xi2_xz",
build_file_proto_mode = "disable",
importpath = "github.com/xi2/xz",
sum = "h1:nIPpBwaJSVYIxUFsDv3M8ofmx9yWTog9BfvIu0q41lo=",
version = "v0.0.0-20171230120015-48954b6210f8",
)
go_repository(
name = "com_github_xiang90_probing",
build_file_proto_mode = "disable",
importpath = "github.com/xiang90/probing",
sum = "h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=",
version = "v0.0.0-20190116061207-43a291ad63a2",
)
go_repository(
name = "com_github_xlab_handysort",
build_file_proto_mode = "disable",
importpath = "github.com/xlab/handysort",
sum = "h1:j2hhcujLRHAg872RWAV5yaUrEjHEObwDv3aImCaNLek=",
version = "v0.0.0-20150421192137-fb3537ed64a1",
)
go_repository(
name = "com_github_xordataexchange_crypt",
build_file_proto_mode = "disable",
importpath = "github.com/xordataexchange/crypt",
sum = "h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=",
version = "v0.0.3-0.20170626215501-b2862e3d0a77",
)
go_repository(
name = "com_github_yuin_goldmark",
build_file_proto_mode = "disable",
importpath = "github.com/yuin/goldmark",
sum = "h1:ruQGxdhGHe7FWOJPT0mKs5+pD2Xs1Bm/kdGlHO04FmM=",
version = "v1.2.1",
)
go_repository(
name = "com_github_yvasiyarov_go_metrics",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/go-metrics",
sum = "h1:+lm10QQTNSBd8DVTNGHx7o/IKu9HYDvLMffDhbyLccI=",
version = "v0.0.0-20140926110328-57bccd1ccd43",
)
go_repository(
name = "com_github_yvasiyarov_gorelic",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/gorelic",
sum = "h1:hlE8//ciYMztlGpl/VA+Zm1AcTPHYkHJPbHqE6WJUXE=",
version = "v0.0.0-20141212073537-a9bba5b9ab50",
)
go_repository(
name = "com_github_yvasiyarov_newrelic_platform_go",
build_file_proto_mode = "disable",
importpath = "github.com/yvasiyarov/newrelic_platform_go",
sum = "h1:ERexzlUfuTvpE74urLSbIQW0Z/6hF9t8U4NsJLaioAY=",
version = "v0.0.0-20140908184405-b21fdbd4370f",
)
go_repository(
name = "com_google_cloud_go",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go",
sum = "h1:DZeAkuQGQqnm9Xv36SbMJEU8aFBz4wL04UpMWPWwjzg=",
version = "v0.66.0",
)
go_repository(
name = "com_google_cloud_go_bigquery",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/bigquery",
sum = "h1:PQcPefKFdaIzjQFbiyOgAqyx8q5djaE7x9Sqe712DPA=",
version = "v1.8.0",
)
go_repository(
name = "com_google_cloud_go_datastore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/datastore",
sum = "h1:/May9ojXjRkPBNVrq+oWLqmWCkr4OU5uRY29bu0mRyQ=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_firestore",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/firestore",
sum = "h1:9x7Bx0A9R5/M9jibeJeZWqjeVEIxYW9fZYqB9a70/bY=",
version = "v1.1.0",
)
go_repository(
name = "com_google_cloud_go_logging",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/logging",
sum = "h1:kaunpnoEh9L4hu6JUsBa8Y20LBfKnCuDhKUgdZp7oK8=",
version = "v1.0.0",
)
go_repository(
name = "com_google_cloud_go_pubsub",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/pubsub",
sum = "h1:76oR7VBOkL7ivoIrFKyW0k7YDCRelrlxktIzQiIUGgg=",
version = "v1.4.0",
)
go_repository(
name = "com_google_cloud_go_storage",
build_file_proto_mode = "disable",
importpath = "cloud.google.com/go/storage",
sum = "h1:4y3gHptW1EHVtcPAVE0eBBlFuGqEejTTG3KdIE0lUX4=",
version = "v1.12.0",
)
go_repository(
name = "com_shuralyov_dmitri_gpu_mtl",
build_file_proto_mode = "disable",
importpath = "dmitri.shuralyov.com/gpu/mtl",
sum = "h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=",
version = "v0.0.0-20190408044501-666a987793e9",
)
go_repository(
name = "com_sourcegraph_sqs_pbtypes",
build_file_proto_mode = "disable",
importpath = "sourcegraph.com/sqs/pbtypes",
sum = "h1:f7lAwqviDEGvON4kRv0o5V7FT/IQK+tbkF664XMbP3o=",
version = "v1.0.0",
)
go_repository(
name = "com_sslmate_software_src_go_pkcs12",
build_file_proto_mode = "disable",
importpath = "software.sslmate.com/src/go-pkcs12",
sum = "h1:AVd6O+azYjVQYW1l55IqkbL8/JxjrLtO6q4FCmV8N5c=",
version = "v0.0.0-20200830195227-52f69702a001",
)
go_repository(
name = "dev_gocloud",
build_file_proto_mode = "disable",
importpath = "gocloud.dev",
sum = "h1:EDRyaRAnMGSq/QBto486gWFxMLczAfIYUmusV7XLNBM=",
version = "v0.19.0",
)
go_repository(
name = "dev_knative_caching",
build_file_proto_mode = "disable",
importpath = "knative.dev/caching",
sum = "h1:mxrur6DsVK8uIjhIq7c1OMls4YjBcRlyvnh3Vx13a0M=",
version = "v0.0.0-20200116200605-67bca2c83dfa",
)
go_repository(
name = "dev_knative_eventing_contrib",
build_file_proto_mode = "disable",
importpath = "knative.dev/eventing-contrib",
sum = "h1:xncT+JrokPG+hPUFJwue8ubPpzmziV9GUIZqYt01JDo=",
version = "v0.11.2",
)
go_repository(
name = "dev_knative_pkg",
build_file_proto_mode = "disable",
importpath = "knative.dev/pkg",
sum = "h1:NDQS+236vhwCP9oiBBGvQ5WGzbD0Y8Pcv9dtE2stg+Q=",
version = "v0.0.0-20200711004937-22502028e31a",
)
go_repository(
name = "dev_knative_test_infra",
build_file_proto_mode = "disable",
importpath = "knative.dev/test-infra",
sum = "h1:wNlGK4f5Ykqh3KLC5RlyR9kvzvRgo/LwJQNsZWGVHnU=",
version = "v0.0.0-20200707183444-aed09e56ddc7",
)
go_repository(
name = "in_gopkg_airbrake_gobrake_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/airbrake/gobrake.v2",
sum = "h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=",
version = "v2.0.9",
)
go_repository(
name = "in_gopkg_alecthomas_kingpin_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/alecthomas/kingpin.v2",
sum = "h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=",
version = "v2.2.6",
)
go_repository(
name = "in_gopkg_asn1_ber_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/asn1-ber.v1",
sum = "h1:TxyelI5cVkbREznMhfzycHdkp5cLA7DpE+GKjSslYhM=",
version = "v1.0.0-20181015200546-f715ec2f112d",
)
go_repository(
name = "in_gopkg_check_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/check.v1",
sum = "h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=",
version = "v1.0.0-20201130134442-10cb98267c6c",
)
go_repository(
name = "in_gopkg_cheggaaa_pb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/cheggaaa/pb.v1",
sum = "h1:Ev7yu1/f6+d+b3pi5vPdRPc6nNtP1umSfcWiEfRqv6I=",
version = "v1.0.25",
)
go_repository(
name = "in_gopkg_errgo_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/errgo.v2",
sum = "h1:0vLT13EuvQ0hNvakwLuFZ/jYrLp5F3kcWHXdRggjCE8=",
version = "v2.1.0",
)
go_repository(
name = "in_gopkg_fsnotify_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/fsnotify.v1",
sum = "h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=",
version = "v1.4.7",
)
go_repository(
name = "in_gopkg_gcfg_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gcfg.v1",
sum = "h1:0HIbH907iBTAntm+88IJV2qmJALDAh8sPekI9Vc1fm0=",
version = "v1.2.0",
)
go_repository(
name = "in_gopkg_gemnasium_logrus_airbrake_hook_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/gemnasium/logrus-airbrake-hook.v2",
sum = "h1:OAj3g0cR6Dx/R07QgQe8wkA9RNjB2u4i700xBkIT4e0=",
version = "v2.1.2",
)
go_repository(
name = "in_gopkg_inf_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/inf.v0",
sum = "h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc=",
version = "v0.9.1",
)
go_repository(
name = "in_gopkg_ini_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/ini.v1",
sum = "h1:DPMeDvGTM54DXbPkVIZsp19fp/I2K7zwA/itHYHKo8Y=",
version = "v1.56.0",
)
go_repository(
name = "in_gopkg_jcmturner_aescts_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/aescts.v1",
sum = "h1:cVVZBK2b1zY26haWB4vbBiZrfFQnfbTVrE3xZq6hrEw=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_dnsutils_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/dnsutils.v1",
sum = "h1:cIuC1OLRGZrld+16ZJvvZxVJeKPsvd5eUIvxfoN5hSM=",
version = "v1.0.1",
)
go_repository(
name = "in_gopkg_jcmturner_gokrb5_v7",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/gokrb5.v7",
sum = "h1:0709Jtq/6QXEuWRfAm260XqlpcwL1vxtO1tUE2qK8Z4=",
version = "v7.3.0",
)
go_repository(
name = "in_gopkg_jcmturner_rpc_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/jcmturner/rpc.v1",
sum = "h1:QHIUxTX1ISuAv9dD2wJ9HWQVuWDX/Zc0PfeC2tjc4rU=",
version = "v1.1.0",
)
go_repository(
name = "in_gopkg_natefinch_lumberjack_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/natefinch/lumberjack.v2",
sum = "h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=",
version = "v2.0.0",
)
go_repository(
name = "in_gopkg_resty_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/resty.v1",
sum = "h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=",
version = "v1.12.0",
)
go_repository(
name = "in_gopkg_robfig_cron_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/robfig/cron.v2",
sum = "h1:E846t8CnR+lv5nE+VuiKTDG/v1U2stad0QzddfJC7kY=",
version = "v2.0.0-20150107220207-be2e0b0deed5",
)
go_repository(
name = "in_gopkg_square_go_jose_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/square/go-jose.v2",
sum = "h1:SK5KegNXmKmqE342YYN2qPHEnUYeoMiXXl1poUlI+o4=",
version = "v2.3.1",
)
go_repository(
name = "in_gopkg_src_d_go_billy_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-billy.v4",
sum = "h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg=",
version = "v4.3.2",
)
go_repository(
name = "in_gopkg_src_d_go_git_fixtures_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git-fixtures.v3",
sum = "h1:ivZFOIltbce2Mo8IjzUHAFoq/IylO9WHhNOAJK+LsJg=",
version = "v3.5.0",
)
go_repository(
name = "in_gopkg_src_d_go_git_v4",
build_file_proto_mode = "disable",
importpath = "gopkg.in/src-d/go-git.v4",
sum = "h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE=",
version = "v4.13.1",
)
go_repository(
name = "in_gopkg_tomb_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/tomb.v1",
sum = "h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=",
version = "v1.0.0-20141024135613-dd632973f1e7",
)
go_repository(
name = "in_gopkg_warnings_v0",
build_file_proto_mode = "disable",
importpath = "gopkg.in/warnings.v0",
sum = "h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=",
version = "v0.1.2",
)
go_repository(
name = "in_gopkg_yaml_v1",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v1",
sum = "h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU=",
version = "v1.0.0-20140924161607-9f9df34309c0",
)
go_repository(
name = "in_gopkg_yaml_v2",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v2",
sum = "h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=",
version = "v2.4.0",
)
go_repository(
name = "in_gopkg_yaml_v3",
build_file_proto_mode = "disable",
importpath = "gopkg.in/yaml.v3",
sum = "h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=",
version = "v3.0.0-20210107192922-496545a6307b",
)
go_repository(
name = "io_etcd_go_bbolt",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/bbolt",
sum = "h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=",
version = "v1.3.5",
)
go_repository(
name = "io_etcd_go_etcd",
build_file_proto_mode = "disable",
importpath = "go.etcd.io/etcd",
sum = "h1:1JFLBqwIgdyHN1ZtgjTBwO+blA6gVOmZurpiMEsETKo=",
version = "v0.5.0-alpha.5.0.20200910180754-dd1b699fc489",
)
go_repository(
name = "io_gitea_code_sdk_gitea",
build_file_proto_mode = "disable",
importpath = "code.gitea.io/sdk/gitea",
sum = "h1:hvDCz4wtFvo7rf5Ebj8tGd4aJ4wLPKX3BKFX9Dk1Pgs=",
version = "v0.12.0",
)
go_repository(
name = "io_k8s_api",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/api",
sum = "h1:94bbZ5NTjdINJEdzOkpS4vdPhkb1VFpTYC9zh43f75c=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiextensions_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiextensions-apiserver",
sum = "h1:AA+cnsb6w7SZ1vD32Z+zdgfXdXY8X9uGX5bN6EoPEIo=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apimachinery",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/apimachinery",
sum = "h1:Q6XuHGlj2xc+hlMCvqyYfbv3H7SRGn2c8NycxJquDVs=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_apiserver",
build_file_proto_mode = "disable",
importpath = "k8s.io/apiserver",
sum = "h1:wTRcid53IhxhbFt4KTrFSw8tAncfr01EP91lzfcygVg=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cli_runtime",
build_file_proto_mode = "disable",
importpath = "k8s.io/cli-runtime",
sum = "h1:wLe+osHSqcItyS3MYQXVyGFa54fppORVA8Jn7DBGSWw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_client_go",
build_file_proto_mode = "disable_global",
importpath = "k8s.io/client-go",
replace = "k8s.io/client-go",
sum = "h1:bhblWYLZKUu+pm50plvQF8WpY6TXdRRtcS/K9WauOj4=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_cloud_provider",
build_file_proto_mode = "disable",
importpath = "k8s.io/cloud-provider",
sum = "h1:ELMIQwweSNu8gfVEnLDypxd9034S1sZJg6QcdWJOvMI=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_code_generator",
build_file_proto_mode = "disable",
importpath = "k8s.io/code-generator",
sum = "h1:jvcxHpVu5dm/LMXr3GOj/jroiP8+v2YnJE9i2OVRenk=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_component_base",
build_file_proto_mode = "disable",
importpath = "k8s.io/component-base",
sum = "h1:iLpj2btXbR326s/xNQWmPNGu0gaYSjzn7IN/5i28nQw=",
version = "v0.21.1",
)
go_repository(
name = "io_k8s_csi_translation_lib",
build_file_proto_mode = "disable",
importpath = "k8s.io/csi-translation-lib",
sum = "h1:bP9yGfCJDknP7tklCwizZtwgJNRePMVcEaFIfeA11ho=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_gengo",
build_file_proto_mode = "disable",
importpath = "k8s.io/gengo",
sum = "h1:Uusb3oh8XcdzDF/ndlI4ToKTYVlkCSJP39SRY2mfRAw=",
version = "v0.0.0-20201214224949-b6c5ce23f027",
)
go_repository(
name = "io_k8s_klog",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog",
sum = "h1:Pt+yjF5aB1xDSVbau4VsWe+dQNzA0qv1LlXdC2dF6Q8=",
version = "v1.0.0",
)
go_repository(
name = "io_k8s_klog_v2",
build_file_proto_mode = "disable",
importpath = "k8s.io/klog/v2",
sum = "h1:Q3gmuM9hKEjefWFFYF0Mat+YyFJvsUyYuwyNNJ5C9Ts=",
version = "v2.8.0",
)
go_repository(
name = "io_k8s_kube_aggregator",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-aggregator",
sum = "h1:rL4fsftMaqkKjaibArYDaBeqN41CHaJzgRJjUB9IrIg=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kube_openapi",
build_file_proto_mode = "disable",
importpath = "k8s.io/kube-openapi",
sum = "h1:vEx13qjvaZ4yfObSSXW7BrMc/KQBBT/Jyee8XtLf4x0=",
version = "v0.0.0-20210305001622-591a79e4bda7",
)
go_repository(
name = "io_k8s_kubectl",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubectl",
sum = "h1:t9uxaZzGvqc2jY96mjnPSjFHtaKOxoUegeGZdaGT6aw=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_kubernetes",
build_file_proto_mode = "disable",
importpath = "k8s.io/kubernetes",
sum = "h1:wJx/r2HuPVaaBeCUk/P47GSK0eyrj3mI/kESRFBp6/A=",
version = "v1.14.7",
)
go_repository(
name = "io_k8s_legacy_cloud_providers",
build_file_proto_mode = "disable",
importpath = "k8s.io/legacy-cloud-providers",
sum = "h1:VvFqJGiYAr2gIdoNuqbeZLEdxIFeN4Yt6OLJS9l2oIE=",
version = "v0.17.4",
)
go_repository(
name = "io_k8s_metrics",
build_file_proto_mode = "disable",
importpath = "k8s.io/metrics",
sum = "h1:cKq0+Z7wg5qkK1n8dryNffKfU22DBX83JguGpR+TCk0=",
version = "v0.19.0",
)
go_repository(
name = "io_k8s_sigs_apiserver_network_proxy_konnectivity_client",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/apiserver-network-proxy/konnectivity-client",
sum = "h1:4uqm9Mv+w2MmBYD+F4qf/v6tDFUdPOk29C095RbU5mY=",
version = "v0.0.15",
)
go_repository(
name = "io_k8s_sigs_boskos",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/boskos",
sum = "h1:OCr84Jrq4HgrYxP9wrfSsGioR1VSpTZMh/RXMu5sm+8=",
version = "v0.0.0-20210730172138-093b54882439",
)
go_repository(
name = "io_k8s_sigs_controller_runtime",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-runtime",
sum = "h1:ZIZ/dtpboPSbZYY7uUz2OzrkaBTOThx2yekLtpGB+zY=",
version = "v0.9.0",
)
go_repository(
name = "io_k8s_sigs_controller_tools",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/controller-tools",
sum = "h1:PXOHvyYAjWfO0UfQvaUo33HpXNCOilV3i/Vjc7iM1/A=",
version = "v0.2.9-0.20200414181213-645d44dca7c0",
)
go_repository(
name = "io_k8s_sigs_kustomize",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/kustomize",
sum = "h1:JUufWFNlI44MdtnjUqVnvh29rR37PQFzPbLXqhyOyX0=",
version = "v2.0.3+incompatible",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff",
sum = "h1:LOs1LZWMsz1xs77Phr/pkB4LFaavH7IVq/3+WTN9XTA=",
version = "v1.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v2",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v2",
sum = "h1:I0h4buiCqDtPztO3NOiyoNMtqSIfld49D4Wj3UBXYZA=",
version = "v2.0.1",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v3",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v3",
sum = "h1:dOmIZBMfhcHS09XZkMyUgkq5trg3/jRyJYFZUiaOp8E=",
version = "v3.0.0",
)
go_repository(
name = "io_k8s_sigs_structured_merge_diff_v4",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/structured-merge-diff/v4",
sum = "h1:C4r9BgJ98vrKnnVCjwCSXcWjWe0NKcUQkmzDXZXGwH8=",
version = "v4.1.0",
)
go_repository(
name = "io_k8s_sigs_testing_frameworks",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/testing_frameworks",
sum = "h1:vK0+tvjF0BZ/RYFeZ1E6BYBwHJJXhjuZ3TdsEKH+UQM=",
version = "v0.1.2",
)
go_repository(
name = "io_k8s_sigs_yaml",
build_file_proto_mode = "disable",
importpath = "sigs.k8s.io/yaml",
sum = "h1:kr/MCeFWJWTwyaHoR9c8EjH9OumOmoF9YGiZd7lFm/Q=",
version = "v1.2.0",
)
go_repository(
name = "io_k8s_test_infra",
build_file_proto_mode = "disable",
importpath = "k8s.io/test-infra",
sum = "h1:g5GLdRIOMs9vnEM/ZWG67f1Stn8cW1dER+MNK9P7Xn8=",
version = "v0.0.0-20210903101950-5c7809e9c5e9",
)
go_repository(
name = "io_k8s_utils",
build_file_proto_mode = "disable",
importpath = "k8s.io/utils",
sum = "h1:MSqsVQ3pZvPGTqCjptfimO2WjG7A9un2zcpiHkA6M/s=",
version = "v0.0.0-20210527160623-6fdb442a123b",
)
go_repository(
name = "io_opencensus_go",
build_file_proto_mode = "disable",
importpath = "go.opencensus.io",
sum = "h1:LYy1Hy3MJdrCdMwwzxA/dRok4ejH+RwNGbuoD9fCjto=",
version = "v0.22.4",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_aws",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/aws",
sum = "h1:YsbWYxDZkC7x2OxlsDEYvvEXZ3cBI3qBgUK5BqkZvRw=",
version = "v0.0.0-20181029163544-2befc13012d0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_ocagent",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/ocagent",
sum = "h1:Z1n6UAyr0QwM284yUuh5Zd8JlvxUGAhFZcgMJkMPrGM=",
version = "v0.6.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_prometheus",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/prometheus",
sum = "h1:SByaIoWwNgMdPSgl5sMqM2KDE5H/ukPWBRo314xiDvg=",
version = "v0.1.0",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_stackdriver",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/stackdriver",
sum = "h1:RX9W6FelAqTVnBi/bRXJLXr9n18v4QkQwZYIdnNS51I=",
version = "v0.13.1",
)
go_repository(
name = "io_opencensus_go_contrib_exporter_zipkin",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/exporter/zipkin",
sum = "h1:PR+1zWqY8ceXs1qDQQIlgXe+sdiwCf0n32bH4+Epk8g=",
version = "v0.1.1",
)
go_repository(
name = "io_opencensus_go_contrib_integrations_ocsql",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/integrations/ocsql",
sum = "h1:kfg5Yyy1nYUrqzyfW5XX+dzMASky8IJXhtHe0KTYNS4=",
version = "v0.1.4",
)
go_repository(
name = "io_opencensus_go_contrib_resource",
build_file_proto_mode = "disable",
importpath = "contrib.go.opencensus.io/resource",
sum = "h1:4r2CANuYhKGmYWP02+5E94rLRcS/YeD+KlxSrOsMxk0=",
version = "v0.1.1",
)
go_repository(
name = "io_rsc_binaryregexp",
build_file_proto_mode = "disable",
importpath = "rsc.io/binaryregexp",
sum = "h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=",
version = "v0.2.0",
)
go_repository(
name = "io_rsc_letsencrypt",
build_file_proto_mode = "disable",
importpath = "rsc.io/letsencrypt",
sum = "h1:H7xDfhkaFFSYEJlKeq38RwX2jYcnTeHuDQyT+mMNMwM=",
version = "v0.0.3",
)
go_repository(
name = "io_rsc_quote_v3",
build_file_proto_mode = "disable",
importpath = "rsc.io/quote/v3",
sum = "h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY=",
version = "v3.1.0",
)
go_repository(
name = "io_rsc_sampler",
build_file_proto_mode = "disable",
importpath = "rsc.io/sampler",
sum = "h1:7uVkIFmeBqHfdjD+gZwtXXI+RODJ2Wc4O7MPEh/QiW4=",
version = "v1.3.0",
)
go_repository(
name = "ml_vbom_util",
build_file_proto_mode = "disable",
importpath = "vbom.ml/util",
sum = "h1:O69FD9pJA4WUZlEwYatBEEkRWKQ5cKodWpdKTrCS/iQ=",
version = "v0.0.0-20180919145318-efcd4e0f9787",
)
go_repository(
name = "org_apache_git_thrift_git",
build_file_proto_mode = "disable",
importpath = "git.apache.org/thrift.git",
sum = "h1:CMxsZlAmxKs+VAZMlDDL0wXciMblJcutQbEe3A9CYUM=",
version = "v0.12.0",
)
go_repository(
name = "org_bazil_fuse",
build_file_proto_mode = "disable",
importpath = "bazil.org/fuse",
sum = "h1:FNCRpXiquG1aoyqcIWVFmpTSKVcx2bQD38uZZeGtdlw=",
version = "v0.0.0-20180421153158-65cc252bf669",
)
go_repository(
name = "org_go4",
build_file_proto_mode = "disable",
importpath = "go4.org",
sum = "h1:iqAGo78tVOJXELHQFRjR6TMwItrvXH4hrGJ32I/NFF8=",
version = "v0.0.0-20201209231011-d4a079459e60",
)
go_repository(
name = "org_golang_google_api",
build_file_proto_mode = "disable",
importpath = "google.golang.org/api",
sum = "h1:Le77IccnTqEa8ryp9wIpX5W3zYm7Gf9LhOp9PHcwFts=",
version = "v0.32.0",
)
go_repository(
name = "org_golang_google_appengine",
build_file_proto_mode = "disable",
importpath = "google.golang.org/appengine",
sum = "h1:FZR1q0exgwxzPzp/aF+VccGrSfxfPpkBqjIIEq3ru6c=",
version = "v1.6.7",
)
go_repository(
name = "org_golang_google_cloud",
build_file_proto_mode = "disable",
importpath = "google.golang.org/cloud",
sum = "h1:Cpp2P6TPjujNoC5M2KHY6g7wfyLYfIWRZaSdIKfDasA=",
version = "v0.0.0-20151119220103-975617b05ea8",
)
go_repository(
name = "org_golang_google_genproto",
build_file_proto_mode = "disable",
importpath = "google.golang.org/genproto",
sum = "h1:pOwg4OoaRYScjmR4LlLgdtnyoHYTSAVhhqe5uPdpII8=",
version = "v0.0.0-20201110150050-8816d57aaa9a",
)
go_repository(
name = "org_golang_google_grpc",
build_file_proto_mode = "disable",
importpath = "google.golang.org/grpc",
sum = "h1:zWTV+LMdc3kaiJMSTOFz2UgSBgx8RNQoTGiZu3fR9S0=",
version = "v1.32.0",
)
go_repository(
name = "org_golang_google_protobuf",
build_file_proto_mode = "disable",
importpath = "google.golang.org/protobuf",
sum = "h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk=",
version = "v1.26.0",
)
go_repository(
name = "org_golang_x_crypto",
build_file_proto_mode = "disable",
importpath = "golang.org/x/crypto",
sum = "h1:It14KIkyBFYkHkwZ7k45minvA9aorojkyjGk9KJ5B/w=",
version = "v0.0.0-20210322153248-0c34fe9e7dc2",
)
go_repository(
name = "org_golang_x_exp",
build_file_proto_mode = "disable",
importpath = "golang.org/x/exp",
sum = "h1:QE6XYQK6naiK1EPAe1g/ILLxN5RBoH5xkJk3CqlMI/Y=",
version = "v0.0.0-20200224162631-6cc2880d07d6",
)
go_repository(
name = "org_golang_x_image",
build_file_proto_mode = "disable",
importpath = "golang.org/x/image",
sum = "h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=",
version = "v0.0.0-20190802002840-cff245a6509b",
)
go_repository(
name = "org_golang_x_lint",
build_file_proto_mode = "disable",
importpath = "golang.org/x/lint",
replace = "golang.org/x/lint",
sum = "h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI=",
version = "v0.0.0-20190409202823-959b441ac422",
)
go_repository(
name = "org_golang_x_mobile",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mobile",
sum = "h1:b373EGXtj0o+ssqkOkdVphTCZ/fVg2LwhctJn2QQbqA=",
version = "v0.0.0-20190806162312-597adff16ade",
)
go_repository(
name = "org_golang_x_mod",
build_file_proto_mode = "disable",
importpath = "golang.org/x/mod",
sum = "h1:8pl+sMODzuvGJkmj2W4kZihvVb5mKm8pB/X44PIQHv8=",
version = "v0.4.0",
)
go_repository(
name = "org_golang_x_net",
build_file_proto_mode = "disable",
importpath = "golang.org/x/net",
sum = "h1:DzZ89McO9/gWPsQXS/FVKAlG02ZjaQ6AlZRBimEYOd0=",
version = "v0.0.0-20210428140749-89ef3d95e781",
)
go_repository(
name = "org_golang_x_oauth2",
build_file_proto_mode = "disable",
importpath = "golang.org/x/oauth2",
sum = "h1:ld7aEMNHoBnnDAX15v1T6z31v8HwR2A9FYOuAhWqkwc=",
version = "v0.0.0-20200902213428-5d25da1a8d43",
)
go_repository(
name = "org_golang_x_sync",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sync",
sum = "h1:DcqTD9SDLc+1P/r1EmRBwnVsrOwW+kk2vWf9n+1sGhs=",
version = "v0.0.0-20201207232520-09787c993a3a",
)
go_repository(
name = "org_golang_x_sys",
build_file_proto_mode = "disable",
importpath = "golang.org/x/sys",
sum = "h1:JWgyZ1qgdTaF3N3oxC+MdTV7qvEEgHo3otj+HB5CM7Q=",
version = "v0.0.0-20210603081109-ebe580a85c40",
)
go_repository(
name = "org_golang_x_term",
build_file_proto_mode = "disable",
importpath = "golang.org/x/term",
sum = "h1:SZxvLBoTP5yHO3Frd4z4vrF+DBX9vMVanchswa69toE=",
version = "v0.0.0-20210220032956-6a3ed077a48d",
)
go_repository(
name = "org_golang_x_text",
build_file_proto_mode = "disable",
importpath = "golang.org/x/text",
sum = "h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=",
version = "v0.3.6",
)
go_repository(
name = "org_golang_x_time",
build_file_proto_mode = "disable",
importpath = "golang.org/x/time",
sum = "h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE=",
version = "v0.0.0-20210220033141-f8bda1e9f3ba",
)
go_repository(
name = "org_golang_x_tools",
build_file_proto_mode = "disable",
importpath = "golang.org/x/tools",
sum = "h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=",
version = "v0.1.0",
)
go_repository(
name = "org_golang_x_xerrors",
build_file_proto_mode = "disable",
importpath = "golang.org/x/xerrors",
sum = "h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=",
version = "v0.0.0-20200804184101-5ec99f83aff1",
)
go_repository(
name = "org_gonum_v1_gonum",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/gonum",
sum = "h1:OB/uP/Puiu5vS5QMRPrXCDWUPb+kt8f1KW8oQzFejQw=",
version = "v0.0.0-20190331200053-3d26580ed485",
)
go_repository(
name = "org_gonum_v1_netlib",
build_file_proto_mode = "disable",
importpath = "gonum.org/v1/netlib",
sum = "h1:jRyg0XfpwWlhEV8mDfdNGBeSJM2fuyh9Yjrnd8kF2Ts=",
version = "v0.0.0-20190331212654-76723241ea4e",
)
go_repository(
name = "org_modernc_cc",
build_file_proto_mode = "disable",
importpath = "modernc.org/cc",
sum = "h1:nPibNuDEx6tvYrUAtvDTTw98rx5juGsa5zuDnKwEEQQ=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_golex",
build_file_proto_mode = "disable",
importpath = "modernc.org/golex",
sum = "h1:wWpDlbK8ejRfSyi0frMyhilD3JBvtcx2AdGDnU+JtsE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_mathutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/mathutil",
sum = "h1:93vKjrJopTPrtTNpZ8XIovER7iCIH1QU7wNbOQXC60I=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_strutil",
build_file_proto_mode = "disable",
importpath = "modernc.org/strutil",
sum = "h1:XVFtQwFVwc02Wk+0L/Z/zDDXO81r5Lhe6iMKmGX3KhE=",
version = "v1.0.0",
)
go_repository(
name = "org_modernc_xc",
build_file_proto_mode = "disable",
importpath = "modernc.org/xc",
sum = "h1:7ccXrupWZIS3twbUGrtKmHS2DXY6xegFua+6O3xgAFU=",
version = "v1.0.0",
)
go_repository(
name = "org_mongodb_go_mongo_driver",
build_file_proto_mode = "disable",
importpath = "go.mongodb.org/mongo-driver",
sum = "h1:jxcFYjlkl8xaERsgLo+RNquI0epW6zuy/ZRQs6jnrFA=",
version = "v1.1.2",
)
go_repository(
name = "org_uber_go_atomic",
build_file_proto_mode = "disable",
importpath = "go.uber.org/atomic",
sum = "h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=",
version = "v1.7.0",
)
go_repository(
name = "org_uber_go_goleak",
build_file_proto_mode = "disable",
importpath = "go.uber.org/goleak",
sum = "h1:z+mqJhf6ss6BSfSM671tgKyZBFPTTJM+HLxnhPC3wu0=",
version = "v1.1.10",
)
go_repository(
name = "org_uber_go_multierr",
build_file_proto_mode = "disable",
importpath = "go.uber.org/multierr",
sum = "h1:y6IPFStTAIT5Ytl7/XYmHvzXQ7S3g/IeZW9hyZ5thw4=",
version = "v1.6.0",
)
go_repository(
name = "org_uber_go_tools",
build_file_proto_mode = "disable",
importpath = "go.uber.org/tools",
sum = "h1:0mgffUl7nfd+FpvXMVz4IDEaUSmT1ysygQC7qYo7sG4=",
version = "v0.0.0-20190618225709-2cfd321de3ee",
)
go_repository(
name = "org_uber_go_zap",
build_file_proto_mode = "disable",
importpath = "go.uber.org/zap",
sum = "h1:MTjgFu6ZLKvY6Pvaqk97GlxNBuMpV4Hy/3P6tRGlI2U=",
version = "v1.17.0",
)
go_repository(
name = "sh_helm_helm_v3",
build_file_proto_mode = "disable",
importpath = "helm.sh/helm/v3",
sum = "h1:aykwPMVyQyncZ8iLNVMXgJ1l3c6W0+LSOPmqp8JdCjs=",
version = "v3.1.1",
)
go_repository(
name = "tools_gotest",
build_file_proto_mode = "disable",
importpath = "gotest.tools",
sum = "h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo=",
version = "v2.2.0+incompatible",
)
go_repository(
name = "tools_gotest_v3",
build_file_proto_mode = "disable",
importpath = "gotest.tools/v3",
sum = "h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0=",
version = "v3.0.3",
)
go_repository(
name = "xyz_gomodules_jsonpatch_v2",
build_file_proto_mode = "disable",
importpath = "gomodules.xyz/jsonpatch/v2",
sum = "h1:4pT439QV83L+G9FkcCriY6EkpcK6r6bK+A5FBUMI7qY=",
version = "v2.2.0",
)
| true | true |
f731a62bb6c4d06bd43c8c0f54f539302a734f73 | 539 | py | Python | regexlib/python_re2_test_file/regexlib_7018.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | 1 | 2022-01-24T14:43:23.000Z | 2022-01-24T14:43:23.000Z | regexlib/python_re2_test_file/regexlib_7018.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | null | null | null | regexlib/python_re2_test_file/regexlib_7018.py | yetingli/ReDoS-Benchmarks | f5b5094d835649e957bf3fec6b8bd4f6efdb35fc | [
"MIT"
] | null | null | null | # 7018
# ^([a-zA-Z](?:(?:(?:\w[\.\_]?)*)\w)+)([a-zA-Z0-9])$
# EXPONENT
# nums:5
# EXPONENT AttackString:"a"+"_"*32+"!1 __EOA(iii)"
import re2 as re
from time import perf_counter
regex = """^([a-zA-Z](?:(?:(?:\w[\.\_]?)*)\w)+)([a-zA-Z0-9])$"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "a" + "_" * i * 1 + "!1 __EOA(iii)"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
# m = REGEX.match(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *1}: took {DURATION} seconds!") | 28.368421 | 64 | 0.541744 |
import re2 as re
from time import perf_counter
regex = """^([a-zA-Z](?:(?:(?:\w[\.\_]?)*)\w)+)([a-zA-Z0-9])$"""
REGEX = re.compile(regex)
for i in range(0, 150000):
ATTACK = "a" + "_" * i * 1 + "!1 __EOA(iii)"
LEN = len(ATTACK)
BEGIN = perf_counter()
m = REGEX.search(ATTACK)
DURATION = perf_counter() - BEGIN
print(f"{i *1}: took {DURATION} seconds!") | true | true |
f731a7aa0c3b474639722781f113b0d34999a1c2 | 1,011 | py | Python | nova/api/openstack/compute/legacy_v2/contrib/used_limits_for_admin.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 7 | 2015-09-22T11:27:16.000Z | 2015-11-02T12:33:46.000Z | nova/api/openstack/compute/legacy_v2/contrib/used_limits_for_admin.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 9 | 2015-05-20T11:20:17.000Z | 2017-07-27T08:21:33.000Z | nova/api/openstack/compute/legacy_v2/contrib/used_limits_for_admin.py | ebalduf/nova-backports | 6bf97ec73467de522d34ab7a17ca0e0874baa7f9 | [
"Apache-2.0"
] | 13 | 2015-05-05T09:34:04.000Z | 2017-11-08T02:03:46.000Z | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
class Used_limits_for_admin(extensions.ExtensionDescriptor):
"""Provide data to admin on limited resources used by other tenants."""
name = "UsedLimitsForAdmin"
alias = "os-used-limits-for-admin"
namespace = ("http://docs.openstack.org/compute/ext/used_limits_for_admin"
"/api/v1.1")
updated = "2013-05-02T00:00:00Z"
| 38.884615 | 78 | 0.721068 |
from nova.api.openstack import extensions
class Used_limits_for_admin(extensions.ExtensionDescriptor):
name = "UsedLimitsForAdmin"
alias = "os-used-limits-for-admin"
namespace = ("http://docs.openstack.org/compute/ext/used_limits_for_admin"
"/api/v1.1")
updated = "2013-05-02T00:00:00Z"
| true | true |
f731a834130281feea54b8acb4fe90404145a770 | 1,064 | py | Python | checkov/terraform/checks/resource/aws/PasswordPolicyExpiration.py | gustavotabares/checkov | 79250dba0fc5227ef3027b3c1300739f063b0d05 | [
"Apache-2.0"
] | null | null | null | checkov/terraform/checks/resource/aws/PasswordPolicyExpiration.py | gustavotabares/checkov | 79250dba0fc5227ef3027b3c1300739f063b0d05 | [
"Apache-2.0"
] | null | null | null | checkov/terraform/checks/resource/aws/PasswordPolicyExpiration.py | gustavotabares/checkov | 79250dba0fc5227ef3027b3c1300739f063b0d05 | [
"Apache-2.0"
] | null | null | null | from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
class PasswordPolicyExpiration(BaseResourceCheck):
def __init__(self):
name = "Ensure IAM password policy expires passwords within 90 days or less"
id = "CKV_AWS_9"
supported_resources = ['aws_iam_account_password_policy']
categories = [CheckCategories.IAM]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
"""
validates iam password policy
https://www.terraform.io/docs/providers/aws/r/iam_account_password_policy.html
:param conf: aws_iam_account_password_policy configuration
:return: <CheckResult>
"""
key = 'max_password_age'
if key in conf.keys():
if conf[key][0] >= 90:
return CheckResult.PASSED
return CheckResult.FAILED
check = PasswordPolicyExpiration()
| 38 | 106 | 0.698308 | from checkov.common.models.enums import CheckResult, CheckCategories
from checkov.terraform.checks.resource.base_resource_check import BaseResourceCheck
class PasswordPolicyExpiration(BaseResourceCheck):
def __init__(self):
name = "Ensure IAM password policy expires passwords within 90 days or less"
id = "CKV_AWS_9"
supported_resources = ['aws_iam_account_password_policy']
categories = [CheckCategories.IAM]
super().__init__(name=name, id=id, categories=categories, supported_resources=supported_resources)
def scan_resource_conf(self, conf):
key = 'max_password_age'
if key in conf.keys():
if conf[key][0] >= 90:
return CheckResult.PASSED
return CheckResult.FAILED
check = PasswordPolicyExpiration()
| true | true |
f731aa3598934a92decae817cbccea150452d087 | 1,898 | py | Python | python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_execution_plan.py | JPeer264/dagster-fork | 32cc87a36134be7c442fa85d6867eb1d3301aea0 | [
"Apache-2.0"
] | 1 | 2020-09-19T16:35:59.000Z | 2020-09-19T16:35:59.000Z | python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_execution_plan.py | JPeer264/dagster-fork | 32cc87a36134be7c442fa85d6867eb1d3301aea0 | [
"Apache-2.0"
] | null | null | null | python_modules/dagster/dagster_tests/api_tests/test_api_snapshot_execution_plan.py | JPeer264/dagster-fork | 32cc87a36134be7c442fa85d6867eb1d3301aea0 | [
"Apache-2.0"
] | null | null | null | from dagster.api.snapshot_execution_plan import sync_get_external_execution_plan
from dagster.core.snap.execution_plan_snapshot import ExecutionPlanSnapshot
from .utils import get_foo_pipeline_handle
def test_execution_plan_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(), environment_dict={}, mode="default", snapshot_id="12345",
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_step_keys_to_execute_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={},
mode="default",
snapshot_id="12345",
step_keys_to_execute=['do_something.compute'],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_subset_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={'solids': {'do_input': {'inputs': {'x': {'value': "test"}}}}},
mode="default",
snapshot_id="12345",
solid_selection=["do_input"],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 1
| 33.892857 | 95 | 0.74236 | from dagster.api.snapshot_execution_plan import sync_get_external_execution_plan
from dagster.core.snap.execution_plan_snapshot import ExecutionPlanSnapshot
from .utils import get_foo_pipeline_handle
def test_execution_plan_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(), environment_dict={}, mode="default", snapshot_id="12345",
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_step_keys_to_execute_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={},
mode="default",
snapshot_id="12345",
step_keys_to_execute=['do_something.compute'],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_something.compute',
]
assert len(execution_plan_snapshot.steps) == 2
def test_execution_plan_with_subset_snapshot_api():
pipeline_handle = get_foo_pipeline_handle()
execution_plan_snapshot = sync_get_external_execution_plan(
pipeline_handle.get_origin(),
environment_dict={'solids': {'do_input': {'inputs': {'x': {'value': "test"}}}}},
mode="default",
snapshot_id="12345",
solid_selection=["do_input"],
)
assert isinstance(execution_plan_snapshot, ExecutionPlanSnapshot)
assert execution_plan_snapshot.step_keys_to_execute == [
'do_input.compute',
]
assert len(execution_plan_snapshot.steps) == 1
| true | true |
f731aa9055ffb5d097e1f239a5baee16564f7fec | 1,564 | py | Python | improved-ver1/6.Real_Estate_Own_Data_Prediction_improved1.py | ARAN1218/RealEstateRentPrediction_AI | da537f3204fa1bc80a499a03b2fd015926ccc755 | [
"MIT"
] | 1 | 2021-06-26T04:44:14.000Z | 2021-06-26T04:44:14.000Z | improved-ver1/6.Real_Estate_Own_Data_Prediction_improved1.py | ARAN1218/RealEstateRentPrediction_AI | da537f3204fa1bc80a499a03b2fd015926ccc755 | [
"MIT"
] | 1 | 2021-08-06T15:58:27.000Z | 2021-11-27T14:59:00.000Z | improved-ver1/6.Real_Estate_Own_Data_Prediction_improved1.py | ARAN1218/RealEstateRentPrediction_AI | da537f3204fa1bc80a499a03b2fd015926ccc755 | [
"MIT"
] | null | null | null | #必要なライブラリをインポート
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.preprocessing import LabelEncoder
#numpyのリスト表示制限を解除しておく
np.set_printoptions(threshold=np.inf)
#既に学習データとREDP関数を用いてAIが作成されているものとする
#model = RERL(df_l)
#住所と路線と間取りはラベルエンコーディングの都合により学習データ/テストデータにあったものしか使えない為、予め確保しておいた使える要素を表示させる
#上記の3つの要素はこの中から選んでもらう
#このプログラムは別のセルで起動させると見やすい
print(adressC,'\n','\n',stationC,'\n','\n',layoutC)
#(学習した範囲内の)任意のデータを入力して賃料を予測できる関数
#Real_Estate_Own_Data_Prediction
def REODP(address,station,access,mc_fees,k_fees,s_fees,area,layout,age):
#入力したデータを辞書d_tryに格納する
d_try = {
'住所':address,
'路線':station,
'交通':access,
'管理共益費':mc_fees,
'礼金':k_fees,
'敷金':s_fees,
'専有面積':area,
'間取り':layout,
'築年数':age
}
#辞書d_tryをデータフレームdf_tryに変換する
df_try = pd.DataFrame(d_try,index=['own'])
#入力情報の確認用
display(df_try)
#ラベルエンコーディングを行い、文字列を数値化する
df_try.住所 = LE1.transform(df_try.住所)
df_try.路線 = LE2.transform(df_try.路線)
df_try.間取り = LE3.transform(df_try.間取り)
#データ型をfloat64で統一する
df_try = df_try.astype('float64')
#予測結果(少数第二位まで)を表示する
df_try = xgb.DMatrix(df_try)
return print('予想賃料:',round(float(model.predict(df_try)),2),'万円')
#REODP(住所, 路線, 交通, 管理共益費, 礼金, 敷金, 専有面積, 間取り, 築年数)
#データ型に気をつける
#住所と路線と間取りはラベルエンコーディングの都合により学習データ/テストデータにあったものしか使えない為、上で表示させた要素から選ぶこと
REODP(address=''
,station=''
,access=0
,mc_fees=0
,k_fees=0
,s_fees=0
,area=0
,layout=''
,age=0
)
| 22.342857 | 74 | 0.671995 |
import numpy as np
import pandas as pd
import xgboost as xgb
from sklearn.preprocessing import LabelEncoder
np.set_printoptions(threshold=np.inf)
print(adressC,'\n','\n',stationC,'\n','\n',layoutC)
def REODP(address,station,access,mc_fees,k_fees,s_fees,area,layout,age):
d_try = {
'住所':address,
'路線':station,
'交通':access,
'管理共益費':mc_fees,
'礼金':k_fees,
'敷金':s_fees,
'専有面積':area,
'間取り':layout,
'築年数':age
}
df_try = pd.DataFrame(d_try,index=['own'])
display(df_try)
df_try.住所 = LE1.transform(df_try.住所)
df_try.路線 = LE2.transform(df_try.路線)
df_try.間取り = LE3.transform(df_try.間取り)
df_try = df_try.astype('float64')
df_try = xgb.DMatrix(df_try)
return print('予想賃料:',round(float(model.predict(df_try)),2),'万円')
REODP(address=''
,station=''
,access=0
,mc_fees=0
,k_fees=0
,s_fees=0
,area=0
,layout=''
,age=0
)
| true | true |
f731aaf8a13f76df61485cd65cdaf166bf910a5e | 7,617 | py | Python | modules/filter_bed.py | hillerlab/TOGA | c4bce48c760d51739a8a63132316f49de77e0c71 | [
"MIT"
] | 32 | 2020-07-20T03:18:28.000Z | 2022-03-28T09:04:48.000Z | modules/filter_bed.py | hillerlab/TOGA | c4bce48c760d51739a8a63132316f49de77e0c71 | [
"MIT"
] | 28 | 2020-07-20T14:03:55.000Z | 2022-02-28T08:08:39.000Z | modules/filter_bed.py | hillerlab/TOGA | c4bce48c760d51739a8a63132316f49de77e0c71 | [
"MIT"
] | 5 | 2020-07-23T13:03:32.000Z | 2021-09-16T15:39:23.000Z | #!/usr/bin/env python3
"""Filter bed-12 file.
Remove:
- incomplete annotations
- genes without CDS
"""
import argparse
import sys
import re
from collections import Counter
try:
from modules.common import die
from modules.common import eprint
except ImportError:
from common import die
from commom import eprint
__author__ = "Bogdan Kirilenko, 2020."
__version__ = "1.0"
__email__ = "bogdan.kirilenko@senckenberg.de"
__credits__ = ["Michael Hiller", "Virag Sharma", "David Jebb"]
ALLOWED_CHARSET = "a-zA-Z0-9._-"
ALLOWED_CHARSET_RE = rf"[^{ALLOWED_CHARSET}]"
def parse_args():
"""Read args, check."""
app = argparse.ArgumentParser()
app.add_argument("input", help="Bed-12 formatted annotation track.")
app.add_argument(
"output", default="stdout", help="Output destination, stdout as default"
)
app.add_argument(
"--out_of_frame",
action="store_true",
dest="out_of_frame",
help="Do not skip out-of-frame genes.",
)
# print help if there are no args
if len(sys.argv) < 2:
app.print_help()
sys.exit(0)
args = app.parse_args()
return args
def prepare_bed_file(bed_file, output, ouf=False, save_rejected=None, only_chrom=None):
"""Filter the bed file given and save the updated version."""
new_lines = [] # keep updated lines
rejected = [] # keep IDs of skipped transcripts + the reason why
names = Counter() # we need to make sure that all names are unique
allowed_re = re.compile(ALLOWED_CHARSET_RE).search
broken_names = []
f = open(bed_file, "r")
for num, line in enumerate(f, 1):
# parse bed file according to specification
line_data = line.rstrip().split("\t")
if len(line_data) != 12:
f.close() # this is for sure an error
# it is possible only if something except a bed12 was provided
die(
"Error! Bed 12 file is required! Got a file with {len(line_data)} fields instead"
)
chrom = line_data[0]
if only_chrom and chrom != only_chrom:
# TOGA allows to perform the analysis on a specific chromosome only
# is so, we can skip all transcripts that located on other chromosomes
continue
chromStart = int(line_data[1])
chromEnd = int(line_data[2])
name = line_data[3] # gene_name usually
corr_name = not bool(allowed_re(name))
if corr_name is False:
broken_names.append(name)
# TODO: check weird characters in the transcript name
# bed_score = int(line_data[4]) # never used
# strand = line_data[5] # otherwise:
# strand = True if line_data[5] == '+' else False
thickStart = int(line_data[6])
thickEnd = int(line_data[7])
# itemRgb = line_data[8] # never used
blockCount = int(line_data[9])
blockSizes = [int(x) for x in line_data[10].split(",") if x != ""]
blockStarts = [int(x) for x in line_data[11].split(",") if x != ""]
blockEnds = [blockStarts[i] + blockSizes[i] for i in range(blockCount)]
blockAbsStarts = [blockStarts[i] + chromStart for i in range(blockCount)]
blockAbsEnds = [blockEnds[i] + chromStart for i in range(blockCount)]
blockNewStarts, blockNewEnds = [], []
names[name] += 1
if thickStart > thickEnd:
f.close() # according to bed12 specification this should never happen
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickEnd MUST be >= thickStart")
elif thickStart == thickEnd:
# this means that this is a non-coding transcript
# TOGA cannot process them: we can skip it
rejected.append((name, "No CDS"))
continue
if thickStart < chromStart or thickEnd > chromEnd:
# a very strange (but still possible) case
f.close() # for sure an error with input data
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickRange is outside chromRange!")
# now select CDS only
# we keep UTRs in the filtered file
# however, we need CDS to check whether it's correct (% 3 == 0)
for block_num in range(blockCount):
blockStart = blockAbsStarts[block_num]
blockEnd = blockAbsEnds[block_num]
# skip the block if it is entirely UTR
if blockEnd <= thickStart:
continue
elif blockStart >= thickEnd:
continue
# if we are here: this is not an entirely UTR exon
# it might intersect the CDS border or to be in the CDS entirely
# remove UTRs: block start must be >= CDS_start (thickStart)
# block end must be <= CDS_end (thickEnd)
blockNewStart = blockStart if blockStart >= thickStart else thickStart
blockNewEnd = blockEnd if blockEnd <= thickEnd else thickEnd
blockNewStarts.append(blockNewStart - thickStart)
blockNewEnds.append(blockNewEnd - thickStart)
if len(blockNewStarts) == 0:
# even it thickStart != thickEnd this transcript still can be non-coding
# but if there are no blocks in the CDS -> we can catch this
rejected.append((name, "No CDS"))
continue
block_new_count = len(blockNewStarts)
blockNewSizes = [
blockNewEnds[i] - blockNewStarts[i] for i in range(block_new_count)
]
if sum(blockNewSizes) % 3 != 0 and not ouf:
# this is an out-of-frame (or incomplete transcript)
# ideally CDS length should be divisible by 3
# not ouf means that we like to keep such transcripts for some reason
rejected.append((name, "Out-of-frame gene"))
continue
# we keep this transcript: add in to the list
new_line = "\t".join([str(x) for x in line_data])
new_lines.append(new_line)
f.close()
# if not allowed characters in transcript names: list them
if len(broken_names) > 0:
eprint("Error! Some transcript names contain not allowed characters")
for t in broken_names:
eprint(t)
die(f"Allowed characters are: {ALLOWED_CHARSET}")
# if there are non-unique transcript IDs: die
# I kill it there, not earlier to show them altogether
if any(v > 1 for v in names.values()):
eprint("Error! There are non-uniq transcript IDs:")
duplicates = [k for k, v in names.items() if v > 1]
for d in duplicates:
eprint(d)
die("Abort")
if len(new_lines) == 0:
# no transcripts pass the filter: probably an input data mistake
sys.exit(
f"Error! No reference annotation tracks left after filtering procedure! Abort"
)
# write transcripts that passed the filter to the output file
f = open(output, "w") if output != "stdout" else sys.stdout
f.write("\n".join(new_lines) + "\n")
f.close() if output != "stdout" else None
if save_rejected:
# save transcripts that didn't pass the filter + reason why
f = open(save_rejected, "w")
for elem in rejected:
f.write(f"{elem[0]}\t{elem[1]}\n")
f.close()
def main():
"""Entry point."""
args = parse_args()
prepare_bed_file(args.input, args.output, args.out_of_frame)
sys.exit(0)
if __name__ == "__main__":
main()
| 37.895522 | 97 | 0.616516 |
import argparse
import sys
import re
from collections import Counter
try:
from modules.common import die
from modules.common import eprint
except ImportError:
from common import die
from commom import eprint
__author__ = "Bogdan Kirilenko, 2020."
__version__ = "1.0"
__email__ = "bogdan.kirilenko@senckenberg.de"
__credits__ = ["Michael Hiller", "Virag Sharma", "David Jebb"]
ALLOWED_CHARSET = "a-zA-Z0-9._-"
ALLOWED_CHARSET_RE = rf"[^{ALLOWED_CHARSET}]"
def parse_args():
app = argparse.ArgumentParser()
app.add_argument("input", help="Bed-12 formatted annotation track.")
app.add_argument(
"output", default="stdout", help="Output destination, stdout as default"
)
app.add_argument(
"--out_of_frame",
action="store_true",
dest="out_of_frame",
help="Do not skip out-of-frame genes.",
)
if len(sys.argv) < 2:
app.print_help()
sys.exit(0)
args = app.parse_args()
return args
def prepare_bed_file(bed_file, output, ouf=False, save_rejected=None, only_chrom=None):
new_lines = []
rejected = []
names = Counter()
allowed_re = re.compile(ALLOWED_CHARSET_RE).search
broken_names = []
f = open(bed_file, "r")
for num, line in enumerate(f, 1):
line_data = line.rstrip().split("\t")
if len(line_data) != 12:
f.close()
die(
"Error! Bed 12 file is required! Got a file with {len(line_data)} fields instead"
)
chrom = line_data[0]
if only_chrom and chrom != only_chrom:
continue
chromStart = int(line_data[1])
chromEnd = int(line_data[2])
name = line_data[3]
corr_name = not bool(allowed_re(name))
if corr_name is False:
broken_names.append(name)
thickStart = int(line_data[6])
thickEnd = int(line_data[7])
ckCount = int(line_data[9])
blockSizes = [int(x) for x in line_data[10].split(",") if x != ""]
blockStarts = [int(x) for x in line_data[11].split(",") if x != ""]
blockEnds = [blockStarts[i] + blockSizes[i] for i in range(blockCount)]
blockAbsStarts = [blockStarts[i] + chromStart for i in range(blockCount)]
blockAbsEnds = [blockEnds[i] + chromStart for i in range(blockCount)]
blockNewStarts, blockNewEnds = [], []
names[name] += 1
if thickStart > thickEnd:
f.close()
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickEnd MUST be >= thickStart")
elif thickStart == thickEnd:
rejected.append((name, "No CDS"))
continue
if thickStart < chromStart or thickEnd > chromEnd:
f.close()
sys.stderr.write(f"Problem occurred at line {num}, gene {name}\n")
die("Error! Bed file is corrupted, thickRange is outside chromRange!")
for block_num in range(blockCount):
blockStart = blockAbsStarts[block_num]
blockEnd = blockAbsEnds[block_num]
# skip the block if it is entirely UTR
if blockEnd <= thickStart:
continue
elif blockStart >= thickEnd:
continue
# if we are here: this is not an entirely UTR exon
# it might intersect the CDS border or to be in the CDS entirely
# remove UTRs: block start must be >= CDS_start (thickStart)
# block end must be <= CDS_end (thickEnd)
blockNewStart = blockStart if blockStart >= thickStart else thickStart
blockNewEnd = blockEnd if blockEnd <= thickEnd else thickEnd
blockNewStarts.append(blockNewStart - thickStart)
blockNewEnds.append(blockNewEnd - thickStart)
if len(blockNewStarts) == 0:
# even it thickStart != thickEnd this transcript still can be non-coding
# but if there are no blocks in the CDS -> we can catch this
rejected.append((name, "No CDS"))
continue
block_new_count = len(blockNewStarts)
blockNewSizes = [
blockNewEnds[i] - blockNewStarts[i] for i in range(block_new_count)
]
if sum(blockNewSizes) % 3 != 0 and not ouf:
# this is an out-of-frame (or incomplete transcript)
# ideally CDS length should be divisible by 3
# not ouf means that we like to keep such transcripts for some reason
rejected.append((name, "Out-of-frame gene"))
continue
# we keep this transcript: add in to the list
new_line = "\t".join([str(x) for x in line_data])
new_lines.append(new_line)
f.close()
# if not allowed characters in transcript names: list them
if len(broken_names) > 0:
eprint("Error! Some transcript names contain not allowed characters")
for t in broken_names:
eprint(t)
die(f"Allowed characters are: {ALLOWED_CHARSET}")
# if there are non-unique transcript IDs: die
# I kill it there, not earlier to show them altogether
if any(v > 1 for v in names.values()):
eprint("Error! There are non-uniq transcript IDs:")
duplicates = [k for k, v in names.items() if v > 1]
for d in duplicates:
eprint(d)
die("Abort")
if len(new_lines) == 0:
# no transcripts pass the filter: probably an input data mistake
sys.exit(
f"Error! No reference annotation tracks left after filtering procedure! Abort"
)
# write transcripts that passed the filter to the output file
f = open(output, "w") if output != "stdout" else sys.stdout
f.write("\n".join(new_lines) + "\n")
f.close() if output != "stdout" else None
if save_rejected:
# save transcripts that didn't pass the filter + reason why
f = open(save_rejected, "w")
for elem in rejected:
f.write(f"{elem[0]}\t{elem[1]}\n")
f.close()
def main():
args = parse_args()
prepare_bed_file(args.input, args.output, args.out_of_frame)
sys.exit(0)
if __name__ == "__main__":
main()
| true | true |
f731abd403e3a3f072d4bde30a655735bb7f1420 | 8,680 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_09_01/operations/_default_security_rules_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 2 | 2019-05-17T21:24:53.000Z | 2020-02-12T11:13:42.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_09_01/operations/_default_security_rules_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 15 | 2019-07-12T18:18:04.000Z | 2019-07-25T20:55:51.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_09_01/operations/_default_security_rules_operations.py | LianwMS/azure-sdk-for-python | 612d7bca9de86ee1bd1fa59291d7bf897ba9213f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DefaultSecurityRulesOperations(object):
"""DefaultSecurityRulesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
network_security_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["models.SecurityRuleListResult"]
"""Gets all default security rules in a network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SecurityRuleListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2017_09_01.models.SecurityRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SecurityRuleListResult"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_security_group_name, # type: str
default_security_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.SecurityRule"
"""Get the specified default network security rule.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param default_security_rule_name: The name of the default security rule.
:type default_security_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: SecurityRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_09_01.models.SecurityRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.SecurityRule"]
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'defaultSecurityRuleName': self._serialize.url("default_security_rule_name", default_security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules/{defaultSecurityRuleName}'} # type: ignore
| 48.222222 | 236 | 0.673272 |
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models
if TYPE_CHECKING:
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class DefaultSecurityRulesOperations(object):
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name,
network_security_group_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
def prepare_request(next_link=None):
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
else:
url = next_link
query_parameters = {}
header_parameters = {}
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('SecurityRuleListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules'}
def get(
self,
resource_group_name,
network_security_group_name,
default_security_rule_name,
**kwargs
):
cls = kwargs.pop('cls', None)
error_map = {404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-09-01"
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'defaultSecurityRuleName': self._serialize.url("default_security_rule_name", default_security_rule_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = 'application/json'
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('SecurityRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}/defaultSecurityRules/{defaultSecurityRuleName}'}
| true | true |
f731ac74f48315280995265c0525ff9726f39e0d | 1,156 | py | Python | 02_ukb/src/01_data_preparation/02_qdec_table/01_creat_qdec_table.py | Lifebrain/p025_education_brain | 507cca3514b8ddbf65df7a047dba5bae1295badf | [
"MIT"
] | null | null | null | 02_ukb/src/01_data_preparation/02_qdec_table/01_creat_qdec_table.py | Lifebrain/p025_education_brain | 507cca3514b8ddbf65df7a047dba5bae1295badf | [
"MIT"
] | null | null | null | 02_ukb/src/01_data_preparation/02_qdec_table/01_creat_qdec_table.py | Lifebrain/p025_education_brain | 507cca3514b8ddbf65df7a047dba5bae1295badf | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Purpose: Create qdec table
import pandas as pd
import numpy as np
import glob
import os.path as op
import os
data_csv = op.join(os.environ['TABULAR_DATA_DIR'],'data.csv')
output_file = op.join(os.environ['QDEC_DATA_DIR'],'qdec.table.dat')
fs_dir = "/cluster/projects/p23/data/open_datasets/ukb_long/bids/derivatives/freesurfer.7.1.0/recon"
df = pd.read_csv(data_csv)
def extract_fs_long_dir(id,timepoint):
""" Extract fs long dir based on id and timepoint """
search = glob.glob(op.join(fs_dir,"*"+str(id)+"*"+str(timepoint+1)+"*"+".long.*"))
try:
return op.basename(search[0])
except:
return np.nan
df['fs_long_dir'] = df[['eid','mr_timepoint']].apply(lambda x: extract_fs_long_dir(x.eid,x.mr_timepoint), axis=1)
df = df.dropna()
df['fsid'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[0])
df['fsid_base'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[1])
df['edu_coded'] = df['education'].apply(lambda x: 1 if x==1 else 0)
df['sex'] = df['sex'].apply(lambda x: int(x))
df[['fsid','fsid_base','int','bl_age','sex','edu_coded']].to_csv(output_file, sep=" ", index=False) | 31.243243 | 113 | 0.675606 |
import pandas as pd
import numpy as np
import glob
import os.path as op
import os
data_csv = op.join(os.environ['TABULAR_DATA_DIR'],'data.csv')
output_file = op.join(os.environ['QDEC_DATA_DIR'],'qdec.table.dat')
fs_dir = "/cluster/projects/p23/data/open_datasets/ukb_long/bids/derivatives/freesurfer.7.1.0/recon"
df = pd.read_csv(data_csv)
def extract_fs_long_dir(id,timepoint):
search = glob.glob(op.join(fs_dir,"*"+str(id)+"*"+str(timepoint+1)+"*"+".long.*"))
try:
return op.basename(search[0])
except:
return np.nan
df['fs_long_dir'] = df[['eid','mr_timepoint']].apply(lambda x: extract_fs_long_dir(x.eid,x.mr_timepoint), axis=1)
df = df.dropna()
df['fsid'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[0])
df['fsid_base'] = df['fs_long_dir'].apply(lambda x: x.split(".long.")[1])
df['edu_coded'] = df['education'].apply(lambda x: 1 if x==1 else 0)
df['sex'] = df['sex'].apply(lambda x: int(x))
df[['fsid','fsid_base','int','bl_age','sex','edu_coded']].to_csv(output_file, sep=" ", index=False) | true | true |
f731ad39fbfd6798ad047b1f2307e945dfddf5eb | 14,114 | py | Python | utime/callbacks/callbacks.py | amiyapatanaik/U-Time | a9ed4892da77d165a71dbfef1d069d782c909757 | [
"MIT"
] | 1 | 2022-03-15T12:31:30.000Z | 2022-03-15T12:31:30.000Z | utime/callbacks/callbacks.py | amiyapatanaik/U-Time | a9ed4892da77d165a71dbfef1d069d782c909757 | [
"MIT"
] | null | null | null | utime/callbacks/callbacks.py | amiyapatanaik/U-Time | a9ed4892da77d165a71dbfef1d069d782c909757 | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
from carbontracker.tracker import CarbonTracker
from tensorflow.keras.callbacks import Callback
from utime.utils import get_memory_usage
from mpunet.utils import highlighted
from mpunet.logging import ScreenLogger
from collections import defaultdict
from datetime import timedelta
class Validation(Callback):
"""
Validation computation callback.
Samples a number of validation batches from a deepsleep
ValidationMultiSequence object
and computes for all tasks:
- Batch-wise validation loss
- Batch-wise metrics as specified in model.metrics_tensors
- Epoch-wise pr-class and average precision
- Epoch-wise pr-class and average recall
- Epoch-wise pr-class and average dice coefficients
... and adds all results to the log dict
Note: The purpose of this callback over the default tf.keras evaluation
mechanism is to calculate certain metrics over the entire epoch of data as
opposed to averaged batch-wise computations.
"""
def __init__(self,
val_sequence,
max_val_studies_per_dataset=20,
logger=None, verbose=True):
"""
Args:
val_sequence: A deepsleep ValidationMultiSequence object
logger: An instance of a MultiPlanar Logger that prints to
screen and/or file
verbose: Print progress to screen - OBS does not use Logger
"""
super().__init__()
self.logger = logger or ScreenLogger()
self.sequences = val_sequence.sequences
self.verbose = verbose
self.max_studies = max_val_studies_per_dataset
self.n_classes = val_sequence.n_classes
self.IDs = val_sequence.IDs
self.print_round = 3
self.log_round = 4
def _compute_counts(self, pred, true, ignore_class=None):
# Argmax and CM elements
pred = pred.argmax(-1).ravel()
true = true.ravel()
if ignore_class:
mask = np.where(true != ignore_class)
true = true[mask]
pred = pred[mask]
# Compute relevant CM elements
# We select the number following the largest class integer when
# y != pred, then bincount and remove the added dummy class
tps = np.bincount(np.where(true == pred, true, self.n_classes),
minlength=self.n_classes+1)[:-1].astype(np.uint64)
rel = np.bincount(true, minlength=self.n_classes).astype(np.uint64)
sel = np.bincount(pred, minlength=self.n_classes).astype(np.uint64)
return tps, rel, sel
def predict(self):
# Get tensors to run and their names
metrics = self.model.loss_functions + self.model.metrics
metrics_names = self.model.metrics_names
self.model.reset_metrics()
assert len(metrics_names) == len(metrics)
# Prepare arrays for CM summary stats
true_pos, relevant, selected, metrics_results = {}, {}, {}, {}
for id_, sequence in zip(self.IDs, self.sequences):
# Add count arrays to the result dictionaries
true_pos[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
relevant[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
selected[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
# Get validation sleep study loader
n_val = min(len(sequence.dataset_queue), self.max_studies)
study_iterator = sequence.dataset_queue.get_study_iterator(n_val)
# Predict and evaluate on all studies
per_study_metrics = defaultdict(list)
for i, sleep_study_context in enumerate(study_iterator):
if self.verbose:
s = " {}Validation subject: {}/{}".format(f"[{id_}] "
if id_ else "",
i+1,
n_val)
print(s, end="\r", flush=True)
with sleep_study_context as ss:
x, y = sequence.get_single_study_full_seq(ss.identifier,
reshape=True)
pred = self.model.predict_on_batch(x)
# Compute counts
tps, rel, sel = self._compute_counts(pred=pred.numpy(),
true=y,
ignore_class=5)
true_pos[id_] += tps
relevant[id_] += rel
selected[id_] += sel
# Run all metrics
for metric, name in zip(metrics, metrics_names):
per_study_metrics[name].append(metric(y, pred).numpy())
# Compute mean metrics for the dataset
metrics_results[id_] = {}
for metric, name in zip(metrics, metrics_names):
metrics_results[id_][name] = np.mean(per_study_metrics[name])
self.model.reset_metrics()
self.logger("")
self.logger("")
return true_pos, relevant, selected, metrics_results
@staticmethod
def _compute_dice(tp, rel, sel):
# Get data masks (to avoid div. by zero warnings)
# We set precision, recall, dice to 0 in for those particular cls.
sel_mask = sel > 0
rel_mask = rel > 0
# prepare arrays
precisions = np.zeros(shape=tp.shape, dtype=np.float32)
recalls = np.zeros_like(precisions)
dices = np.zeros_like(precisions)
# Compute precisions, recalls
precisions[sel_mask] = tp[sel_mask] / sel[sel_mask]
recalls[rel_mask] = tp[rel_mask] / rel[rel_mask]
# Compute dice
intrs = (2 * precisions * recalls)
union = (precisions + recalls)
dice_mask = union > 0
dices[dice_mask] = intrs[dice_mask] / union[dice_mask]
return precisions, recalls, dices
def _print_val_results(self, precisions, recalls, dices, metrics, epoch,
name, classes):
# Log the results
# We add them to a pd dataframe just for the pretty print output
index = ["cls %i" % i for i in classes]
metric_keys, metric_vals = map(list, list(zip(*metrics.items())))
col_order = metric_keys + ["precision", "recall", "dice"]
nan_arr = np.empty(shape=len(precisions))
nan_arr[:] = np.nan
value_dict = {"precision": precisions,
"recall": recalls,
"dice": dices}
value_dict.update({key: nan_arr for key in metrics})
val_results = pd.DataFrame(value_dict,
index=index).loc[:, col_order] # ensure order
# Transpose the results to have metrics in rows
val_results = val_results.T
# Add mean and set in first row
means = metric_vals + [precisions.mean(), recalls.mean(), dices.mean()]
val_results["mean"] = means
cols = list(val_results.columns)
cols.insert(0, cols.pop(cols.index('mean')))
val_results = val_results.loc[:, cols]
# Print the df to screen
self.logger(highlighted(("[%s] Validation Results for "
"Epoch %i" % (name, epoch)).lstrip(" ")))
print_string = val_results.round(self.print_round).to_string()
self.logger(print_string.replace("NaN", "---") + "\n")
def on_epoch_end(self, epoch, logs={}):
self.logger("\n")
# Predict and get CM
TPs, relevant, selected, metrics = self.predict()
for id_ in self.IDs:
tp, rel, sel = TPs[id_], relevant[id_], selected[id_]
precisions, recalls, dices = self._compute_dice(tp=tp, sel=sel, rel=rel)
classes = np.arange(len(dices))
# Add to log
n = (id_ + "_") if len(self.IDs) > 1 else ""
logs[f"{n}val_dice"] = dices.mean().round(self.log_round)
logs[f"{n}val_precision"] = precisions.mean().round(self.log_round)
logs[f"{n}val_recall"] = recalls.mean().round(self.log_round)
for m_name, value in metrics[id_].items():
logs[f"{n}val_{m_name}"] = value.round(self.log_round)
if self.verbose:
self._print_val_results(precisions=precisions,
recalls=recalls,
dices=dices,
metrics=metrics[id_],
epoch=epoch,
name=id_,
classes=classes)
if len(self.IDs) > 1:
# Print cross-dataset mean values
if self.verbose:
self.logger(highlighted(f"[ALL DATASETS] Means Across Classes"
f" for Epoch {epoch}"))
fetch = ("val_dice", "val_precision", "val_recall")
m_fetch = tuple(["val_" + s for s in self.model.metrics_names])
to_print = {}
for f in fetch + m_fetch:
scores = [logs["%s_%s" % (name, f)] for name in self.IDs]
res = np.mean(scores)
logs[f] = res.round(self.log_round) # Add to log file
to_print[f.split("_")[-1]] = list(scores) + [res]
if self.verbose:
df = pd.DataFrame(to_print)
df.index = self.IDs + ["mean"]
self.logger(df.round(self.print_round))
self.logger("")
class MemoryConsumption(Callback):
def __init__(self, max_gib=None, round_=2, logger=None, set_limit=False):
self.max_gib = max_gib
self.logger = logger
self.round_ = round_
if set_limit:
import resource
_, hard = resource.getrlimit(resource.RLIMIT_AS)
resource.setrlimit(resource.RLIMIT_AS,
(self._gib_to_bytes(max_gib), hard))
self.logger("Setting memory limit to {} GiB".format(max_gib))
@staticmethod
def _gib_to_bytes(gib):
return gib * (1024 ** 3)
@staticmethod
def _bytes_to_gib(bytes):
return bytes / (1024 ** 3)
def on_epoch_end(self, epoch, logs={}):
mem_bytes = get_memory_usage()
mem_gib = round(self._bytes_to_gib(mem_bytes), self.round_)
logs['memory_usage_gib'] = mem_gib
if self.max_gib and mem_gib >= self.max_gib:
self.logger.warn("Stopping training from callback 'MemoryConsumption'! "
"Total memory consumption of {} GiB exceeds limitation"
" (self.max_gib = {}) ".format(mem_gib, self.max_gib))
self.model.stop_training = True
class MaxTrainingTime(Callback):
def __init__(self, max_minutes, log_name='train_time_total', logger=None):
"""
TODO
Args:
"""
super().__init__()
self.max_minutes = int(max_minutes)
self.log_name = log_name
self.logger = logger or ScreenLogger()
def on_epoch_end(self, epochs, logs={}):
"""
TODO
Args:
epochs:
logs:
Returns:
"""
train_time_str = logs.get(self.log_name, None)
if not train_time_str:
self.logger.warn("Did not find log entry '{}' (needed in callback "
"'MaxTrainingTime')".format(self.log_name))
return
train_time_m = timedelta(
days=int(train_time_str[:2]),
hours=int(train_time_str[4:6]),
minutes=int(train_time_str[8:10]),
seconds=int(train_time_str[12:14])
).total_seconds() / 60
if train_time_m >= self.max_minutes:
# Stop training
self.warn("Stopping training from callback 'MaxTrainingTime'! "
"Total training length of {} minutes exceeded (now {}) "
"".format(self.max_minutes, train_time_m))
self.model.stop_training = True
class CarbonUsageTracking(Callback):
"""
tf.keras Callback for the Carbontracker package.
See https://github.com/lfwa/carbontracker.
"""
def __init__(self, epochs, add_to_logs=True, monitor_epochs=-1,
epochs_before_pred=-1, devices_by_pid=True, **additional_tracker_kwargs):
"""
Accepts parameters as per CarbonTracker.__init__
Sets other default values for key parameters.
Args:
add_to_logs: bool, Add total_energy_kwh and total_co2_g to the keras logs after each epoch
For other arguments, please refer to CarbonTracker.__init__
"""
super().__init__()
self.tracker = None
self.add_to_logs = bool(add_to_logs)
self.parameters = {"epochs": epochs,
"monitor_epochs": monitor_epochs,
"epochs_before_pred": epochs_before_pred,
"devices_by_pid": devices_by_pid}
self.parameters.update(additional_tracker_kwargs)
def on_train_end(self, logs={}):
""" Ensure actual consumption is reported """
self.tracker.stop()
def on_epoch_begin(self, epoch, logs={}):
""" Start tracking this epoch """
if self.tracker is None:
# At this point all CPUs should be discoverable
self.tracker = CarbonTracker(**self.parameters)
self.tracker.epoch_start()
def on_epoch_end(self, epoch, logs={}):
""" End tracking this epoch """
self.tracker.epoch_end()
if self.add_to_logs:
energy_kwh = self.tracker.tracker.total_energy_per_epoch().sum()
co2eq_g = self.tracker._co2eq(energy_kwh)
logs["total_energy_kwh"] = round(energy_kwh, 6)
logs["total_co2_g"] = round(co2eq_g, 6)
| 41.511765 | 102 | 0.568372 | import numpy as np
import pandas as pd
from carbontracker.tracker import CarbonTracker
from tensorflow.keras.callbacks import Callback
from utime.utils import get_memory_usage
from mpunet.utils import highlighted
from mpunet.logging import ScreenLogger
from collections import defaultdict
from datetime import timedelta
class Validation(Callback):
def __init__(self,
val_sequence,
max_val_studies_per_dataset=20,
logger=None, verbose=True):
super().__init__()
self.logger = logger or ScreenLogger()
self.sequences = val_sequence.sequences
self.verbose = verbose
self.max_studies = max_val_studies_per_dataset
self.n_classes = val_sequence.n_classes
self.IDs = val_sequence.IDs
self.print_round = 3
self.log_round = 4
def _compute_counts(self, pred, true, ignore_class=None):
pred = pred.argmax(-1).ravel()
true = true.ravel()
if ignore_class:
mask = np.where(true != ignore_class)
true = true[mask]
pred = pred[mask]
tps = np.bincount(np.where(true == pred, true, self.n_classes),
minlength=self.n_classes+1)[:-1].astype(np.uint64)
rel = np.bincount(true, minlength=self.n_classes).astype(np.uint64)
sel = np.bincount(pred, minlength=self.n_classes).astype(np.uint64)
return tps, rel, sel
def predict(self):
metrics = self.model.loss_functions + self.model.metrics
metrics_names = self.model.metrics_names
self.model.reset_metrics()
assert len(metrics_names) == len(metrics)
true_pos, relevant, selected, metrics_results = {}, {}, {}, {}
for id_, sequence in zip(self.IDs, self.sequences):
true_pos[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
relevant[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
selected[id_] = np.zeros(shape=(self.n_classes,), dtype=np.uint64)
n_val = min(len(sequence.dataset_queue), self.max_studies)
study_iterator = sequence.dataset_queue.get_study_iterator(n_val)
per_study_metrics = defaultdict(list)
for i, sleep_study_context in enumerate(study_iterator):
if self.verbose:
s = " {}Validation subject: {}/{}".format(f"[{id_}] "
if id_ else "",
i+1,
n_val)
print(s, end="\r", flush=True)
with sleep_study_context as ss:
x, y = sequence.get_single_study_full_seq(ss.identifier,
reshape=True)
pred = self.model.predict_on_batch(x)
tps, rel, sel = self._compute_counts(pred=pred.numpy(),
true=y,
ignore_class=5)
true_pos[id_] += tps
relevant[id_] += rel
selected[id_] += sel
for metric, name in zip(metrics, metrics_names):
per_study_metrics[name].append(metric(y, pred).numpy())
metrics_results[id_] = {}
for metric, name in zip(metrics, metrics_names):
metrics_results[id_][name] = np.mean(per_study_metrics[name])
self.model.reset_metrics()
self.logger("")
self.logger("")
return true_pos, relevant, selected, metrics_results
@staticmethod
def _compute_dice(tp, rel, sel):
sel_mask = sel > 0
rel_mask = rel > 0
precisions = np.zeros(shape=tp.shape, dtype=np.float32)
recalls = np.zeros_like(precisions)
dices = np.zeros_like(precisions)
precisions[sel_mask] = tp[sel_mask] / sel[sel_mask]
recalls[rel_mask] = tp[rel_mask] / rel[rel_mask]
intrs = (2 * precisions * recalls)
union = (precisions + recalls)
dice_mask = union > 0
dices[dice_mask] = intrs[dice_mask] / union[dice_mask]
return precisions, recalls, dices
def _print_val_results(self, precisions, recalls, dices, metrics, epoch,
name, classes):
index = ["cls %i" % i for i in classes]
metric_keys, metric_vals = map(list, list(zip(*metrics.items())))
col_order = metric_keys + ["precision", "recall", "dice"]
nan_arr = np.empty(shape=len(precisions))
nan_arr[:] = np.nan
value_dict = {"precision": precisions,
"recall": recalls,
"dice": dices}
value_dict.update({key: nan_arr for key in metrics})
val_results = pd.DataFrame(value_dict,
index=index).loc[:, col_order]
val_results = val_results.T
means = metric_vals + [precisions.mean(), recalls.mean(), dices.mean()]
val_results["mean"] = means
cols = list(val_results.columns)
cols.insert(0, cols.pop(cols.index('mean')))
val_results = val_results.loc[:, cols]
self.logger(highlighted(("[%s] Validation Results for "
"Epoch %i" % (name, epoch)).lstrip(" ")))
print_string = val_results.round(self.print_round).to_string()
self.logger(print_string.replace("NaN", "---") + "\n")
def on_epoch_end(self, epoch, logs={}):
self.logger("\n")
TPs, relevant, selected, metrics = self.predict()
for id_ in self.IDs:
tp, rel, sel = TPs[id_], relevant[id_], selected[id_]
precisions, recalls, dices = self._compute_dice(tp=tp, sel=sel, rel=rel)
classes = np.arange(len(dices))
n = (id_ + "_") if len(self.IDs) > 1 else ""
logs[f"{n}val_dice"] = dices.mean().round(self.log_round)
logs[f"{n}val_precision"] = precisions.mean().round(self.log_round)
logs[f"{n}val_recall"] = recalls.mean().round(self.log_round)
for m_name, value in metrics[id_].items():
logs[f"{n}val_{m_name}"] = value.round(self.log_round)
if self.verbose:
self._print_val_results(precisions=precisions,
recalls=recalls,
dices=dices,
metrics=metrics[id_],
epoch=epoch,
name=id_,
classes=classes)
if len(self.IDs) > 1:
if self.verbose:
self.logger(highlighted(f"[ALL DATASETS] Means Across Classes"
f" for Epoch {epoch}"))
fetch = ("val_dice", "val_precision", "val_recall")
m_fetch = tuple(["val_" + s for s in self.model.metrics_names])
to_print = {}
for f in fetch + m_fetch:
scores = [logs["%s_%s" % (name, f)] for name in self.IDs]
res = np.mean(scores)
logs[f] = res.round(self.log_round)
to_print[f.split("_")[-1]] = list(scores) + [res]
if self.verbose:
df = pd.DataFrame(to_print)
df.index = self.IDs + ["mean"]
self.logger(df.round(self.print_round))
self.logger("")
class MemoryConsumption(Callback):
def __init__(self, max_gib=None, round_=2, logger=None, set_limit=False):
self.max_gib = max_gib
self.logger = logger
self.round_ = round_
if set_limit:
import resource
_, hard = resource.getrlimit(resource.RLIMIT_AS)
resource.setrlimit(resource.RLIMIT_AS,
(self._gib_to_bytes(max_gib), hard))
self.logger("Setting memory limit to {} GiB".format(max_gib))
@staticmethod
def _gib_to_bytes(gib):
return gib * (1024 ** 3)
@staticmethod
def _bytes_to_gib(bytes):
return bytes / (1024 ** 3)
def on_epoch_end(self, epoch, logs={}):
mem_bytes = get_memory_usage()
mem_gib = round(self._bytes_to_gib(mem_bytes), self.round_)
logs['memory_usage_gib'] = mem_gib
if self.max_gib and mem_gib >= self.max_gib:
self.logger.warn("Stopping training from callback 'MemoryConsumption'! "
"Total memory consumption of {} GiB exceeds limitation"
" (self.max_gib = {}) ".format(mem_gib, self.max_gib))
self.model.stop_training = True
class MaxTrainingTime(Callback):
def __init__(self, max_minutes, log_name='train_time_total', logger=None):
super().__init__()
self.max_minutes = int(max_minutes)
self.log_name = log_name
self.logger = logger or ScreenLogger()
def on_epoch_end(self, epochs, logs={}):
train_time_str = logs.get(self.log_name, None)
if not train_time_str:
self.logger.warn("Did not find log entry '{}' (needed in callback "
"'MaxTrainingTime')".format(self.log_name))
return
train_time_m = timedelta(
days=int(train_time_str[:2]),
hours=int(train_time_str[4:6]),
minutes=int(train_time_str[8:10]),
seconds=int(train_time_str[12:14])
).total_seconds() / 60
if train_time_m >= self.max_minutes:
self.warn("Stopping training from callback 'MaxTrainingTime'! "
"Total training length of {} minutes exceeded (now {}) "
"".format(self.max_minutes, train_time_m))
self.model.stop_training = True
class CarbonUsageTracking(Callback):
def __init__(self, epochs, add_to_logs=True, monitor_epochs=-1,
epochs_before_pred=-1, devices_by_pid=True, **additional_tracker_kwargs):
super().__init__()
self.tracker = None
self.add_to_logs = bool(add_to_logs)
self.parameters = {"epochs": epochs,
"monitor_epochs": monitor_epochs,
"epochs_before_pred": epochs_before_pred,
"devices_by_pid": devices_by_pid}
self.parameters.update(additional_tracker_kwargs)
def on_train_end(self, logs={}):
self.tracker.stop()
def on_epoch_begin(self, epoch, logs={}):
if self.tracker is None:
self.tracker = CarbonTracker(**self.parameters)
self.tracker.epoch_start()
def on_epoch_end(self, epoch, logs={}):
self.tracker.epoch_end()
if self.add_to_logs:
energy_kwh = self.tracker.tracker.total_energy_per_epoch().sum()
co2eq_g = self.tracker._co2eq(energy_kwh)
logs["total_energy_kwh"] = round(energy_kwh, 6)
logs["total_co2_g"] = round(co2eq_g, 6)
| true | true |
f731ad77a89b21efe9c339549c926df276f7e654 | 3,152 | py | Python | stackalytics/processor/config.py | duongmn89/stackalytics | 7b2412cddf5f48f42f92bc3400754a6c21742f0c | [
"Apache-2.0"
] | null | null | null | stackalytics/processor/config.py | duongmn89/stackalytics | 7b2412cddf5f48f42f92bc3400754a6c21742f0c | [
"Apache-2.0"
] | null | null | null | stackalytics/processor/config.py | duongmn89/stackalytics | 7b2412cddf5f48f42f92bc3400754a6c21742f0c | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
from oslo_config import cfg
CONNECTION_OPTS = [
cfg.StrOpt('runtime-storage-uri', default='memcached://127.0.0.1:11211',
help='Storage URI'),
]
PROCESSOR_OPTS = [
cfg.StrOpt('default-data-uri',
default='https://raw.githubusercontent.com/stackalytics/'
'default_data/master/default_data.json',
help='URI for default data. A local file can be used with the '
'prefix "file://". For example, '
'default_data_uri = file:///path/to/default_data.json'),
cfg.StrOpt('sources-root', default='/var/local/stackalytics',
help='The folder that holds all project sources to analyze'),
cfg.IntOpt('days_to_update_members', default=30,
help='Number of days to update members'),
cfg.StrOpt('corrections-uri',
default=('https://git.openstack.org/cgit/'
'openstack/stackalytics/plain/etc/corrections.json'),
help='The address of file with corrections data'),
cfg.StrOpt('review-uri', default='gerrit://review.openstack.org',
help='URI of review system'),
cfg.StrOpt('git-base-uri', default='git://git.openstack.org',
help='git base location'),
cfg.StrOpt('ssh-key-filename', default='/home/user/.ssh/id_rsa',
help='SSH key for gerrit review system access'),
cfg.StrOpt('ssh-username', default='user',
help='SSH username for gerrit review system access'),
cfg.StrOpt('github-login', default=None,
help='Login for github access'),
cfg.StrOpt('github-password', default=None,
help='Password for github access'),
cfg.StrOpt('translation-team-uri',
default='https://git.openstack.org/cgit/openstack/i18n/'
'plain/tools/zanata/translation_team.yaml',
help='URI of translation team data'),
cfg.StrOpt("fetching-user-source", default='launchpad',
choices=['launchpad', '<None>'],
help="Source for fetching user profiles"),
cfg.IntOpt('members-look-ahead', default=250,
help='How many member profiles to look ahead after the last'),
cfg.IntOpt('read-timeout', default=120,
help='Number of seconds to wait for remote response'),
cfg.IntOpt('gerrit-retry', default=10,
help='How many times to retry after Gerrit errors'),
]
def list_opts():
yield (None, copy.deepcopy(CONNECTION_OPTS + PROCESSOR_OPTS))
| 44.394366 | 78 | 0.63769 |
import copy
from oslo_config import cfg
CONNECTION_OPTS = [
cfg.StrOpt('runtime-storage-uri', default='memcached://127.0.0.1:11211',
help='Storage URI'),
]
PROCESSOR_OPTS = [
cfg.StrOpt('default-data-uri',
default='https://raw.githubusercontent.com/stackalytics/'
'default_data/master/default_data.json',
help='URI for default data. A local file can be used with the '
'prefix "file://". For example, '
'default_data_uri = file:///path/to/default_data.json'),
cfg.StrOpt('sources-root', default='/var/local/stackalytics',
help='The folder that holds all project sources to analyze'),
cfg.IntOpt('days_to_update_members', default=30,
help='Number of days to update members'),
cfg.StrOpt('corrections-uri',
default=('https://git.openstack.org/cgit/'
'openstack/stackalytics/plain/etc/corrections.json'),
help='The address of file with corrections data'),
cfg.StrOpt('review-uri', default='gerrit://review.openstack.org',
help='URI of review system'),
cfg.StrOpt('git-base-uri', default='git://git.openstack.org',
help='git base location'),
cfg.StrOpt('ssh-key-filename', default='/home/user/.ssh/id_rsa',
help='SSH key for gerrit review system access'),
cfg.StrOpt('ssh-username', default='user',
help='SSH username for gerrit review system access'),
cfg.StrOpt('github-login', default=None,
help='Login for github access'),
cfg.StrOpt('github-password', default=None,
help='Password for github access'),
cfg.StrOpt('translation-team-uri',
default='https://git.openstack.org/cgit/openstack/i18n/'
'plain/tools/zanata/translation_team.yaml',
help='URI of translation team data'),
cfg.StrOpt("fetching-user-source", default='launchpad',
choices=['launchpad', '<None>'],
help="Source for fetching user profiles"),
cfg.IntOpt('members-look-ahead', default=250,
help='How many member profiles to look ahead after the last'),
cfg.IntOpt('read-timeout', default=120,
help='Number of seconds to wait for remote response'),
cfg.IntOpt('gerrit-retry', default=10,
help='How many times to retry after Gerrit errors'),
]
def list_opts():
yield (None, copy.deepcopy(CONNECTION_OPTS + PROCESSOR_OPTS))
| true | true |
f731ad9e1174078405c48bb42fee69eb69fa5af1 | 1,103 | py | Python | piqa/model/tokenizers.py | AndrzejGretkowski/masters-piqa | 5924cb5261bbcc2bb9970622c12c29f49bdea6e7 | [
"MIT"
] | null | null | null | piqa/model/tokenizers.py | AndrzejGretkowski/masters-piqa | 5924cb5261bbcc2bb9970622c12c29f49bdea6e7 | [
"MIT"
] | null | null | null | piqa/model/tokenizers.py | AndrzejGretkowski/masters-piqa | 5924cb5261bbcc2bb9970622c12c29f49bdea6e7 | [
"MIT"
] | null | null | null | from transformers import AlbertTokenizerFast, RobertaTokenizerFast, DistilBertTokenizerFast
from piqa.model.tokenizers_base import BaseTokenizerPIQA
class PIQATokenizer(object):
tokenizer_mapping = dict()
@classmethod
def register(cls, *args):
def decorator(fn):
for arg in args:
cls.tokenizer_mapping[arg] = fn
return fn
return decorator
@classmethod
def get(cls, tokenizer):
return cls.tokenizer_mapping.get(tokenizer)
@PIQATokenizer.register('roberta-base', 'roberta-large')
class RobertaPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return RobertaTokenizerFast
@PIQATokenizer.register('albert-base-v2', 'albert-large-v2', 'albert-xlarge-v2', 'albert-xxlarge-v2')
class AlbertPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return AlbertTokenizerFast
@PIQATokenizer.register('distilbert-base-uncased')
class DistilPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return DistilBertTokenizerFast
| 27.575 | 101 | 0.724388 | from transformers import AlbertTokenizerFast, RobertaTokenizerFast, DistilBertTokenizerFast
from piqa.model.tokenizers_base import BaseTokenizerPIQA
class PIQATokenizer(object):
tokenizer_mapping = dict()
@classmethod
def register(cls, *args):
def decorator(fn):
for arg in args:
cls.tokenizer_mapping[arg] = fn
return fn
return decorator
@classmethod
def get(cls, tokenizer):
return cls.tokenizer_mapping.get(tokenizer)
@PIQATokenizer.register('roberta-base', 'roberta-large')
class RobertaPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return RobertaTokenizerFast
@PIQATokenizer.register('albert-base-v2', 'albert-large-v2', 'albert-xlarge-v2', 'albert-xxlarge-v2')
class AlbertPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return AlbertTokenizerFast
@PIQATokenizer.register('distilbert-base-uncased')
class DistilPIQATokenizer(BaseTokenizerPIQA):
@property
def get_tokenizer(self):
return DistilBertTokenizerFast
| true | true |
f731ae5909a825418b54595ca18abd70d6832fb9 | 1,895 | py | Python | home/urls.py | Paphra/recruitment-cms | 2894d5285a6da4ff47c114377968c8fda95c24b6 | [
"MIT"
] | 2 | 2021-02-07T12:08:47.000Z | 2021-02-22T07:12:53.000Z | home/urls.py | Paphra/recruitment-cms | 2894d5285a6da4ff47c114377968c8fda95c24b6 | [
"MIT"
] | null | null | null | home/urls.py | Paphra/recruitment-cms | 2894d5285a6da4ff47c114377968c8fda95c24b6 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
from files import views as files
from finances import views as finances
from others import views as others
from operations import views as operations
urlpatterns = [
path('clients/<int:client_id>/pay-reg-fee', views.pay_reg_fee, name='pay-reg-fee'),
path('clients/<int:client_id>/register', views.register, name='register'),
path('clients', views.clients, name='clients'),
path('jobs', views.jobs, name='jobs'),
path('branches', views.branches, name='branches'),
# operations
path('clearances', operations.clearances, name='clearances'),
path('contracts', operations.contracts, name='contracts'),
path('interpols', operations.interpols, name='interpols'),
path('interviews', operations.interviews, name='interviews'),
path('trainings', operations.trainings, name='trainings'),
path('medicals', operations.medicals, name='medicals'),
path('passports', operations.passports, name='passports'),
path('tickets', operations.tickets, name='tickets'),
path('vettings', operations.vettings, name='vettings'),
path('vetting-album', operations.vetting_album, name='vetting-album'),
path('visas', operations.visas, name='visas'),
path('travel-plans', operations.travel_plans, name='travel-plans'),
path('other-ops', operations.other_ops, name='other-ops'),
# finances
path('payments', finances.payments, name='payments'),
path('expenses', finances.expenses, name='expenses'),
path('fees', finances.fees, name='fees'),
# files
path('clearance-files', files.clearance_files, name='clearance-files'),
path('client-files', files.client_files, name='client-files'),
# others
path('tasks', others.tasks, name='tasks'),
path('recruitments', others.recruitments, name='recruitments'),
path('', views.index, name="index"),
]
| 39.479167 | 87 | 0.692876 | from django.urls import path
from . import views
from files import views as files
from finances import views as finances
from others import views as others
from operations import views as operations
urlpatterns = [
path('clients/<int:client_id>/pay-reg-fee', views.pay_reg_fee, name='pay-reg-fee'),
path('clients/<int:client_id>/register', views.register, name='register'),
path('clients', views.clients, name='clients'),
path('jobs', views.jobs, name='jobs'),
path('branches', views.branches, name='branches'),
path('clearances', operations.clearances, name='clearances'),
path('contracts', operations.contracts, name='contracts'),
path('interpols', operations.interpols, name='interpols'),
path('interviews', operations.interviews, name='interviews'),
path('trainings', operations.trainings, name='trainings'),
path('medicals', operations.medicals, name='medicals'),
path('passports', operations.passports, name='passports'),
path('tickets', operations.tickets, name='tickets'),
path('vettings', operations.vettings, name='vettings'),
path('vetting-album', operations.vetting_album, name='vetting-album'),
path('visas', operations.visas, name='visas'),
path('travel-plans', operations.travel_plans, name='travel-plans'),
path('other-ops', operations.other_ops, name='other-ops'),
path('payments', finances.payments, name='payments'),
path('expenses', finances.expenses, name='expenses'),
path('fees', finances.fees, name='fees'),
path('clearance-files', files.clearance_files, name='clearance-files'),
path('client-files', files.client_files, name='client-files'),
path('tasks', others.tasks, name='tasks'),
path('recruitments', others.recruitments, name='recruitments'),
path('', views.index, name="index"),
]
| true | true |
f731afae0e47cb38488bcaa0764e4ae4aadbc6ac | 6,801 | py | Python | kerberos.py | bbhunter/kerberoast | e983ba1d5290e94c71e04fc15dc2cee482873f18 | [
"Apache-2.0"
] | 1,011 | 2015-03-17T21:52:07.000Z | 2022-03-31T09:25:41.000Z | kerberos.py | bbhunter/kerberoast | e983ba1d5290e94c71e04fc15dc2cee482873f18 | [
"Apache-2.0"
] | 16 | 2015-07-08T09:12:27.000Z | 2022-03-05T19:04:26.000Z | kerberos.py | bbhunter/kerberoast | e983ba1d5290e94c71e04fc15dc2cee482873f18 | [
"Apache-2.0"
] | 274 | 2015-04-28T21:24:23.000Z | 2022-03-29T18:37:27.000Z | #!/usr/bin/env python3 -tt
import hashlib
import hmac
from pyasn1.type import univ, char, useful, tag
from pyasn1.codec.ber import encoder, decoder
import datetime
import base64
import sys
#REF: http://tools.ietf.org/id/draft-brezak-win2k-krb-rc4-hmac-03.txt
#T = 1 for TS-ENC-TS in the AS-Request
#T = 8 for the AS-Reply
#T = 7 for the Authenticator in the TGS-Request
#T = 8 for the TGS-Reply
#T = 2 for the Server Ticket in the AP-Request
#T = 11 for the Authenticator in the AP-Request
#T = 12 for the Server returned AP-Reply
#T = 15 in the generation of checksum for the MIC token
#T = 0 in the generation of sequence number for the MIC token
#T = 13 in the generation of checksum for the WRAP token
#T = 0 in the generation of sequence number for the WRAP token
#T = 0 in the generation of encrypted data for the WRAPPED token
def ntlmhash(s):
hash = hashlib.new('md4', s.encode('utf-16le')).digest()
return hash
#return binascii.hexlify(hash)
def rc4crypt(key, data):
x = 0
box = list(range(256))
for i in range(256):
x = (x + box[i] + (key[i % len(key)])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = b''
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out += bytes([char ^ box[(box[x] + box[y]) % 256]])
return out
#print decoder.decode(enc)
#define KERB_ETYPE_RC4_HMAC 23
KERB_ETYPE_RC4_HMAC = 23
#define KERB_ETYPE_RC4_HMAC_EXP 24
def decrypt(key, messagetype, edata):
#DECRYPT (K, fRC4_EXP, T, edata, edata_len, data, data_len)
#{
# if (fRC4_EXP){
# *((DWORD *)(L40+10)) = T;
# HMAC (K, L40, 14, K1);
# }else{
# HMAC (K, &T, 4, K1);
# }
#K1 = hmac.new(key, chr(messagetype) + "\x00\x00\x00", hashlib.md5).digest() # \x0b = 11
K1 = hmac.new(key, bytes([messagetype]) + b"\x00\x00\x00", hashlib.md5).digest() # \x0b = 11
# memcpy (K2, K1, 16);
K2 = K1
# if (fRC4_EXP) memset (K1+7, 0xAB, 9);
# HMAC (K1, edata, 16, K3); // checksum is at edata
K3 = hmac.new(K1, edata[:16], hashlib.md5).digest()
# RC4(K3, edata + 16, edata_len - 16, edata + 16);
ddata = rc4crypt(K3, edata[16:])
# data_len = edata_len - 16 - 8;
# memcpy (data, edata + 16 + 8, data_len);
#
# // verify generated and received checksums
# HMAC (K2, edata + 16, edata_len - 16, checksum);
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
# if (memcmp(edata, checksum, 16) != 0)
# printf("CHECKSUM ERROR !!!!!!\n");
#}
if checksum == edata[:16]:
#print "Decrypt Checksum: %s" % str(checksum).encode('hex') # == edata[:16])
#print "Checksum Calc: %s" % str(checksum).encode('hex')
#print "Checksum Pkct: %s" % str(edata[:16]).encode('hex')
#print messagetype
#print data
#print "Nonce: %s" % ddata[:8].encode('hex')
#return ddata[8:] # first 8 bytes are nonce, the rest is data
#return {
# 'data': ddata[8:],
# 'nonce': ddata[:8]
#}
return ddata[8:], ddata[:8]
else:
#print "CHECKSUM ERROR!"
return None, None
def encrypt(key, messagetype, data, nonce):
# if (fRC4_EXP){
# *((DWORD *)(L40+10)) = T;
# HMAC (K, L40, 10 + 4, K1);
# }else{
# HMAC (K, &T, 4, K1);
# }
K1 = hmac.new(key, chr(messagetype) + "\x00\x00\x00", hashlib.md5).digest() # \x0b = 11
# memcpy (K2, K1, 16);
K2 = K1
# if (fRC4_EXP) memset (K1+7, 0xAB, 9);
# add_8_random_bytes(data, data_len, conf_plus_data);
ddata = nonce + data
# HMAC (K2, conf_plus_data, 8 + data_len, checksum);
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
# HMAC (K1, checksum, 16, K3);
K3 = hmac.new(K1, checksum, hashlib.md5).digest()
#print "K3: %s" % K3.encode('hex')
# RC4(K3, conf_plus_data, 8 + data_len, edata + 16);
# print "EN DDATA: %s" % ddata[:32].encode('hex')
edata = rc4crypt(K3, ddata)
# memcpy (edata, checksum, 16);
# edata_len = 16 + 8 + data_len;
return checksum + edata
def zerosigs(data):
d = list(map(ord, data))
for i in range(5, 21): # zero out the 16 char sig, KDC
d[len(d) - i] = 0
for i in range(29, 45): # zero out the 16 char sig, Server
d[len(d) - i] = 0
retval = "".join(map(chr, d))
#print retval.encode('hex')
return retval
def chksum(K, T, data):
data = zerosigs(data)
# K = the Key
#T = the message type, encoded as a little-endian four-byte integer
#Ksign = HMAC(K, "signaturekey") //includes zero octet at end
SIGNATUREKEY = 'signaturekey\x00'
Ksign = hmac.new(K, SIGNATUREKEY, hashlib.md5).digest()
#tmp = MD5(concat(T, data))
tmp = hashlib.md5(T + data).digest()
#CHKSUM = HMAC(Ksign, tmp)
chksum = hmac.new(Ksign, tmp, hashlib.md5).digest()
return chksum
def getservsig(encchunk):
return str(encchunk[-44:-28])
def getprivsig(encchunk):
return str(encchunk[-20:-4])
def printdecode(kerbpayload, ktype=2):
d = decoder.decode(kerbpayload)
if ktype == 32:
#print "Protocol Version (pvno): " + str(d[0][0])
print("Message Type: " + str(d[0][1]))
print("Realm: " + str(d[0][2]))
print("Principal: " + str(d[0][3][1][0]))
print("Ticket Version (tkt-vno): " + str(d[0][4][0]))
print("Ticket Realm: " + str(d[0][4][1]))
#print "Name-Type (Service & Instance): " + str(d[0][4][2][0])
print("Server, Name: " + str(d[0][4][2][1][0]))
print("Server, Name: " + str(d[0][4][2][1][1]))
#print "Data: " + str(d[0][4][3][2]).encode('hex')
#print "Encryption Type: : " + str(d[0][5][0])
#print "Data: " + str(d[0])
#print "Server Realm: " + str(d[0][4][2][4])
elif ktype == 2:
print("a")
def extract_ticket_from_kirbi(filename):
with open(filename, 'rb') as fd:
data = fd.read()
return extract_ticket(data)
def extract_ticket(data):
if data[0] == 0x76:
# ram dump
#enctickets.append(((decoder.decode(data)[0][2][0][3][2]).asOctets(), i, f))
return (decoder.decode(data)[0][2][0][3][2]).asOctets()
elif data[:2] == b'6d':
# honestly, i completely forgot. I think this is from a pcap -Tim
#enctickets.append(((decoder.decode(ticket.decode('hex'))[0][4][3][2]).asOctets(), i, f))
return (decoder.decode(ticket.decode('hex'))[0][4][3][2]).asOctets()
| 34.522843 | 97 | 0.551242 |
import hashlib
import hmac
from pyasn1.type import univ, char, useful, tag
from pyasn1.codec.ber import encoder, decoder
import datetime
import base64
import sys
def ntlmhash(s):
hash = hashlib.new('md4', s.encode('utf-16le')).digest()
return hash
def rc4crypt(key, data):
x = 0
box = list(range(256))
for i in range(256):
x = (x + box[i] + (key[i % len(key)])) % 256
box[i], box[x] = box[x], box[i]
x = 0
y = 0
out = b''
for char in data:
x = (x + 1) % 256
y = (y + box[x]) % 256
box[x], box[y] = box[y], box[x]
out += bytes([char ^ box[(box[x] + box[y]) % 256]])
return out
KERB_ETYPE_RC4_HMAC = 23
def decrypt(key, messagetype, edata):
mac.new(key, bytes([messagetype]) + b"\x00\x00\x00", hashlib.md5).digest()
K2 = K1
K3 = hmac.new(K1, edata[:16], hashlib.md5).digest()
ddata = rc4crypt(K3, edata[16:])
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
if checksum == edata[:16]:
return ddata[8:], ddata[:8]
else:
return None, None
def encrypt(key, messagetype, data, nonce):
K1 = hmac.new(key, chr(messagetype) + "\x00\x00\x00", hashlib.md5).digest()
K2 = K1
ddata = nonce + data
checksum = hmac.new(K2, ddata, hashlib.md5).digest()
K3 = hmac.new(K1, checksum, hashlib.md5).digest()
edata = rc4crypt(K3, ddata)
return checksum + edata
def zerosigs(data):
d = list(map(ord, data))
for i in range(5, 21):
d[len(d) - i] = 0
for i in range(29, 45):
d[len(d) - i] = 0
retval = "".join(map(chr, d))
return retval
def chksum(K, T, data):
data = zerosigs(data)
SIGNATUREKEY = 'signaturekey\x00'
Ksign = hmac.new(K, SIGNATUREKEY, hashlib.md5).digest()
tmp = hashlib.md5(T + data).digest()
chksum = hmac.new(Ksign, tmp, hashlib.md5).digest()
return chksum
def getservsig(encchunk):
return str(encchunk[-44:-28])
def getprivsig(encchunk):
return str(encchunk[-20:-4])
def printdecode(kerbpayload, ktype=2):
d = decoder.decode(kerbpayload)
if ktype == 32:
print("Message Type: " + str(d[0][1]))
print("Realm: " + str(d[0][2]))
print("Principal: " + str(d[0][3][1][0]))
print("Ticket Version (tkt-vno): " + str(d[0][4][0]))
print("Ticket Realm: " + str(d[0][4][1]))
print("Server, Name: " + str(d[0][4][2][1][0]))
print("Server, Name: " + str(d[0][4][2][1][1]))
elif ktype == 2:
print("a")
def extract_ticket_from_kirbi(filename):
with open(filename, 'rb') as fd:
data = fd.read()
return extract_ticket(data)
def extract_ticket(data):
if data[0] == 0x76:
return (decoder.decode(data)[0][2][0][3][2]).asOctets()
elif data[:2] == b'6d':
return (decoder.decode(ticket.decode('hex'))[0][4][3][2]).asOctets()
| true | true |
f731b06c3942398040ef6910dde20647365a1923 | 992 | py | Python | IPAddressPool/IPAddressPool/ip/IpAcquire.py | ruxuezhuimeng/spider | badf61a66e99cf1936b5c404ad85f1b00838943e | [
"MIT"
] | 1 | 2020-06-17T05:56:19.000Z | 2020-06-17T05:56:19.000Z | IPAddressPool/IPAddressPool/ip/IpAcquire.py | ruxuezhuimeng/spider | badf61a66e99cf1936b5c404ad85f1b00838943e | [
"MIT"
] | null | null | null | IPAddressPool/IPAddressPool/ip/IpAcquire.py | ruxuezhuimeng/spider | badf61a66e99cf1936b5c404ad85f1b00838943e | [
"MIT"
] | null | null | null | from config.SourceUrl import getUrl
from ip.Ip2Db import insert
import threading
import requests
from Log import log
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
}
def acquireIp():
aUrl = getUrl()
log.info('获取ip地址:{}'.format(aUrl))
try:
reponse = requests.get(aUrl, headers=header, timeout=5)
if reponse.status_code == 200:
parseHtml(reponse.text)
except:
# traceback.print_exc()
log.error('请求ip异常:{}'.format(aUrl))
def parseHtml(html):
html = html.replace('\'', '').replace('b', '').replace('<r/>', '').replace('\r', '')
ips = html.split("\n")
for ip in ips:
ip = ip.strip()
if 'false' in ip:
log.war('您的套餐今日已到达上限')
return
elif '' == ip:
return
else:
if '.' in ip:
threading.Thread(target=insert, args=(ip,)).start()
| 26.810811 | 134 | 0.566532 | from config.SourceUrl import getUrl
from ip.Ip2Db import insert
import threading
import requests
from Log import log
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
}
def acquireIp():
aUrl = getUrl()
log.info('获取ip地址:{}'.format(aUrl))
try:
reponse = requests.get(aUrl, headers=header, timeout=5)
if reponse.status_code == 200:
parseHtml(reponse.text)
except:
log.error('请求ip异常:{}'.format(aUrl))
def parseHtml(html):
html = html.replace('\'', '').replace('b', '').replace('<r/>', '').replace('\r', '')
ips = html.split("\n")
for ip in ips:
ip = ip.strip()
if 'false' in ip:
log.war('您的套餐今日已到达上限')
return
elif '' == ip:
return
else:
if '.' in ip:
threading.Thread(target=insert, args=(ip,)).start()
| true | true |
f731b08e6a086de9df0318dd73e3fdb7854f1ed6 | 1,413 | py | Python | models/diabetes/train.py | aditya9126/pipelines-azureml | 0c747f12e02ee3d3976746663bd1da0ab5935887 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | models/diabetes/train.py | aditya9126/pipelines-azureml | 0c747f12e02ee3d3976746663bd1da0ab5935887 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | models/diabetes/train.py | aditya9126/pipelines-azureml | 0c747f12e02ee3d3976746663bd1da0ab5935887 | [
"CC-BY-4.0",
"MIT"
] | null | null | null | # new update
import pickle
import os
import numpy as np
from sklearn.datasets import load_diabetes
from sklearn.linear_model import Ridge
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from azureml.core.run import Run
from utils import mylib
os.makedirs('./outputs', exist_ok=True)
X, y = load_diabetes(return_X_y=True)
run = Run.get_context()
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.2,
random_state=0)
data = {"train": {"X": X_train, "y": y_train},
"test": {"X": X_test, "y": y_test}}
# list of numbers from 0.0 to 1.0 with a 0.05 interval
alphas = mylib.get_alphas()
for alpha in alphas:
# Use Ridge algorithm to create a regression model
reg = Ridge(alpha=alpha)
reg.fit(data["train"]["X"], data["train"]["y"])
preds = reg.predict(data["test"]["X"])
mse = mean_squared_error(preds, data["test"]["y"])
run.log('alpha', alpha)
run.log('mse', mse)
# Save model in the outputs folder so it automatically get uploaded when running on AML Compute
model_file_name = 'ridge_{0:.2f}.pkl'.format(alpha)
with open(os.path.join('./outputs/', model_file_name), 'wb') as file:
pickle.dump(reg, file)
print('alpha is {0:.2f}, and mse is {1:0.2f}'.format(alpha, mse))
| 31.4 | 99 | 0.643312 |
import pickle
import os
import numpy as np
from sklearn.datasets import load_diabetes
from sklearn.linear_model import Ridge
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import train_test_split
from azureml.core.run import Run
from utils import mylib
os.makedirs('./outputs', exist_ok=True)
X, y = load_diabetes(return_X_y=True)
run = Run.get_context()
X_train, X_test, y_train, y_test = train_test_split(X, y,
test_size=0.2,
random_state=0)
data = {"train": {"X": X_train, "y": y_train},
"test": {"X": X_test, "y": y_test}}
alphas = mylib.get_alphas()
for alpha in alphas:
reg = Ridge(alpha=alpha)
reg.fit(data["train"]["X"], data["train"]["y"])
preds = reg.predict(data["test"]["X"])
mse = mean_squared_error(preds, data["test"]["y"])
run.log('alpha', alpha)
run.log('mse', mse)
model_file_name = 'ridge_{0:.2f}.pkl'.format(alpha)
with open(os.path.join('./outputs/', model_file_name), 'wb') as file:
pickle.dump(reg, file)
print('alpha is {0:.2f}, and mse is {1:0.2f}'.format(alpha, mse))
| true | true |
f731b0a7f4cf43d68c7253dd502f524c7bafe510 | 1,522 | py | Python | src/ewaluacja2021/management/commands/raport_3n_to_xlsx.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | null | null | null | src/ewaluacja2021/management/commands/raport_3n_to_xlsx.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | 41 | 2019-11-07T00:07:02.000Z | 2022-02-27T22:09:39.000Z | src/ewaluacja2021/management/commands/raport_3n_to_xlsx.py | iplweb/bpp | f027415cc3faf1ca79082bf7bacd4be35b1a6fdf | [
"BSD-3-Clause"
] | null | null | null | # -*- encoding: utf-8 -*-
import os
from argparse import FileType
from django.core.management import BaseCommand
from ewaluacja2021.reports import load_data, rekordy
from ewaluacja2021.util import autor2fn
from ewaluacja2021.xlsy import AutorskiXLSX, CalosciowyXLSX
from bpp.models import Autor
from bpp.util import pbar
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("wejscie", type=FileType("r"))
parser.add_argument("--katalog-wyjsciowy", type=str, default=None)
def handle(self, wejscie, katalog_wyjsciowy, liczba_n=None, *args, **options):
dane = load_data(wejscie)
if katalog_wyjsciowy is None:
katalog_wyjsciowy = wejscie.name.replace(".json", "_output")
if not os.path.exists(katalog_wyjsciowy):
os.mkdir(katalog_wyjsciowy)
rekordy_danych = rekordy(dane)
CalosciowyXLSX(
"AAA_rekordy",
rekordy=rekordy_danych,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
for autor in pbar(
Autor.objects.filter(pk__in=(x.autor_id for x in rekordy(dane))),
label="Dane autorow...",
):
rekordy_autora = rekordy_danych.filter(autor_id=autor.id)
AutorskiXLSX(
autor=autor,
title=autor2fn(autor),
rekordy=rekordy_autora,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
| 30.44 | 82 | 0.635348 |
import os
from argparse import FileType
from django.core.management import BaseCommand
from ewaluacja2021.reports import load_data, rekordy
from ewaluacja2021.util import autor2fn
from ewaluacja2021.xlsy import AutorskiXLSX, CalosciowyXLSX
from bpp.models import Autor
from bpp.util import pbar
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument("wejscie", type=FileType("r"))
parser.add_argument("--katalog-wyjsciowy", type=str, default=None)
def handle(self, wejscie, katalog_wyjsciowy, liczba_n=None, *args, **options):
dane = load_data(wejscie)
if katalog_wyjsciowy is None:
katalog_wyjsciowy = wejscie.name.replace(".json", "_output")
if not os.path.exists(katalog_wyjsciowy):
os.mkdir(katalog_wyjsciowy)
rekordy_danych = rekordy(dane)
CalosciowyXLSX(
"AAA_rekordy",
rekordy=rekordy_danych,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
for autor in pbar(
Autor.objects.filter(pk__in=(x.autor_id for x in rekordy(dane))),
label="Dane autorow...",
):
rekordy_autora = rekordy_danych.filter(autor_id=autor.id)
AutorskiXLSX(
autor=autor,
title=autor2fn(autor),
rekordy=rekordy_autora,
dane=dane,
katalog_wyjsciowy=katalog_wyjsciowy,
).zrob()
| true | true |
f731b0e7750a7bea8517170165c3c68c3dc22cf8 | 844 | py | Python | setup.py | jbarbadillo/pycoinmon | 035e64652c533a569c6b236f54e12aff35ad82b1 | [
"MIT"
] | null | null | null | setup.py | jbarbadillo/pycoinmon | 035e64652c533a569c6b236f54e12aff35ad82b1 | [
"MIT"
] | null | null | null | setup.py | jbarbadillo/pycoinmon | 035e64652c533a569c6b236f54e12aff35ad82b1 | [
"MIT"
] | null | null | null | from setuptools import setup
from pycoinmon.metadata import Metadata
metadata = Metadata()
setup(
name = 'pycoinmon',
packages = ['pycoinmon'],
version = metadata.get_version(),
license = 'MIT',
description = 'Python Port Based on COINMON',
url = 'https://github.com/RDCH106/pycoinmon',
keywords = ['bitcoin', 'criptocurrency', 'crypto', 'ticker', 'python', 'cli', 'price-tracker', 'command-line'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
) | 40.190476 | 115 | 0.575829 | from setuptools import setup
from pycoinmon.metadata import Metadata
metadata = Metadata()
setup(
name = 'pycoinmon',
packages = ['pycoinmon'],
version = metadata.get_version(),
license = 'MIT',
description = 'Python Port Based on COINMON',
url = 'https://github.com/RDCH106/pycoinmon',
keywords = ['bitcoin', 'criptocurrency', 'crypto', 'ticker', 'python', 'cli', 'price-tracker', 'command-line'],
classifiers = ['Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'],
) | true | true |
f731b187b5cb80cd697134e200a8839565bad37f | 453 | py | Python | card.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | 3 | 2021-01-21T02:06:12.000Z | 2022-03-14T10:26:43.000Z | card.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | null | null | null | card.py | yehudareisler/risky-game | ea919bd07a2acf75dfd184b5c59ad80d41f47428 | [
"MIT"
] | 1 | 2021-08-29T07:47:12.000Z | 2021-08-29T07:47:12.000Z | from enum import Enum
class Card:
card_type = None
territory_name = ''
def __init__(self, territory_name, card_type):
self.territory_name = territory_name
self.card_type = card_type
def __str__(self):
return f'Card of {self.territory_name} with {self.card_type} type'
class CardType(Enum):
WILDCARD = 0
INFANTRY = 1
CAVALRY = 2
ARTILLERY = 3
def __str__(self):
return self.name
| 18.875 | 74 | 0.646799 | from enum import Enum
class Card:
card_type = None
territory_name = ''
def __init__(self, territory_name, card_type):
self.territory_name = territory_name
self.card_type = card_type
def __str__(self):
return f'Card of {self.territory_name} with {self.card_type} type'
class CardType(Enum):
WILDCARD = 0
INFANTRY = 1
CAVALRY = 2
ARTILLERY = 3
def __str__(self):
return self.name
| true | true |
f731b1ea9b5a643b7fe01d0c7b531d8169710a1a | 401 | py | Python | mmtbx/command_line/ntc_validation.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 155 | 2016-11-23T12:52:16.000Z | 2022-03-31T15:35:44.000Z | mmtbx/command_line/ntc_validation.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 590 | 2016-12-10T11:31:18.000Z | 2022-03-30T23:10:09.000Z | mmtbx/command_line/ntc_validation.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 115 | 2016-11-15T08:17:28.000Z | 2022-02-09T15:30:14.000Z | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
# LIBTBX_SET_DISPATCHER_NAME cctbx.development.ntc_validation
from iotbx.cli_parser import run_program
from mmtbx.programs.ntc_validation import Program
# =============================================================================
if (__name__ == '__main__'):
results = run_program(program_class=Program)
| 33.416667 | 79 | 0.645885 |
from __future__ import absolute_import, division, print_function
from iotbx.cli_parser import run_program
from mmtbx.programs.ntc_validation import Program
if (__name__ == '__main__'):
results = run_program(program_class=Program)
| true | true |
f731b41b93060edbb8a60d852567a8c02dfe18c9 | 19,457 | py | Python | hive/indexer/cached_post.py | RailCoin/hivemind | cd78b952b55911b63a59c762d1d4530a25d7d8f5 | [
"MIT"
] | null | null | null | hive/indexer/cached_post.py | RailCoin/hivemind | cd78b952b55911b63a59c762d1d4530a25d7d8f5 | [
"MIT"
] | null | null | null | hive/indexer/cached_post.py | RailCoin/hivemind | cd78b952b55911b63a59c762d1d4530a25d7d8f5 | [
"MIT"
] | 1 | 2020-03-24T12:26:08.000Z | 2020-03-24T12:26:08.000Z | """Manages cached post data."""
import math
import collections
import logging
import ujson as json
from toolz import partition_all
from hive.db.adapter import Db
from hive.utils.post import post_basic, post_legacy, post_payout, post_stats
from hive.utils.timer import Timer
from hive.indexer.accounts import Accounts
log = logging.getLogger(__name__)
DB = Db.instance()
# levels of post dirtiness, in order of decreasing priority
LEVELS = ['insert', 'payout', 'update', 'upvote', 'recount']
def _keyify(items):
return dict(map(lambda x: ("val_%d" % x[0], x[1]), enumerate(items)))
class CachedPost:
"""Maintain update queue and writing to `hive_posts_cache`."""
# cursor signifying upper bound of cached post span
_last_id = -1
# cached id map
_ids = {}
# urls which are missing from id map
_noids = set()
# dirty posts; {key: dirty_level}
_queue = collections.OrderedDict()
# new promoted values, pending write
_pending_promoted = {}
@classmethod
def update_promoted_amount(cls, post_id, amount):
"""Set a new pending amount for a post for its next update."""
cls._pending_promoted[post_id] = amount
@classmethod
def _dirty(cls, level, author, permlink, pid=None):
"""Mark a post as dirty."""
assert level in LEVELS, "invalid level {}".format(level)
mode = LEVELS.index(level)
url = author + '/' + permlink
# add to appropriate queue.
if url not in cls._queue:
cls._queue[url] = mode
# upgrade priority if needed
elif cls._queue[url] > mode:
cls._queue[url] = mode
# add to id map, or register missing
if pid and url in cls._ids:
assert pid == cls._ids[url], "pid map conflict #78"
elif pid:
cls._ids[url] = pid
else:
cls._noids.add(url)
@classmethod
def _get_id(cls, url):
"""Given a post url, get its id."""
if url in cls._ids:
return cls._ids[url]
raise Exception("requested id for %s not in map" % url)
@classmethod
def recount(cls, author, permlink, pid=None):
"""Force a child re-count."""
cls._dirty('recount', author, permlink, pid)
@classmethod
def vote(cls, author, permlink, pid=None):
"""Handle a post dirtied by a `vote` op."""
cls._dirty('upvote', author, permlink, pid)
Accounts.dirty(set([author])) # rep changed
@classmethod
def insert(cls, author, permlink, pid):
"""Handle a post created by a `comment` op."""
cls._dirty('insert', author, permlink, pid)
@classmethod
def update(cls, author, permlink, pid):
"""Handle a post updated by a `comment` op."""
cls._dirty('update', author, permlink, pid)
@classmethod
def delete(cls, post_id, author, permlink):
"""Handle a post deleted by a `delete_comment` op.
With steemd, posts can be 'deleted' or unallocated in certain
conditions. It requires foregoing convenient assumptions, e.g.:
- author/permlink is unique and always references the same post
- you can always get_content on any author/permlink you see in an op
"""
DB.query("DELETE FROM hive_posts_cache WHERE post_id = :id", id=post_id)
# if it was queued for a write, remove it
url = author+'/'+permlink
if url in cls._queue:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
@classmethod
def undelete(cls, post_id, author, permlink):
"""Handle a post 'undeleted' by a `comment` op.
'Undeletion' occurs when hive detects that a previously deleted
author/permlink combination has been reused on a new post. Hive
does not delete hive_posts entries because they are currently
irreplaceable in case of a fork. Instead, we reuse the slot.
It's important to immediately insert a placeholder in the cache
table since hive only scans forward. This row's properties push
it to the front of update-immediately queue.
Alternate ways of handling undeletes:
- delete row from hive_posts so that it can be re-indexed (re-id'd)
- comes at a risk of losing expensive entry on fork (and no undo)
- create undo table for hive_posts, hive_follows, etc, & link to block
- rely on steemd's post.id instead of database autoincrement
- requires way to query steemd post objects by id to be useful
- batch get_content_by_ids in steemd would be /huge/ speedup
- create a consistent cache queue table or dirty flag col
"""
# do not force-write unless cache spans this id.
if post_id > cls.last_id():
cls.insert(author, permlink, post_id)
return
# force-create dummy row to ensure cache is aware. only needed when
# cache already spans this id, in case in-mem buffer is lost. default
# value for payout_at ensures that it will get picked up for update.
DB.query(cls._insert({
'post_id': post_id,
'author': author,
'permlink': permlink}))
cls.update(author, permlink, post_id)
@classmethod
def flush(cls, steem, trx=False, spread=1, full_total=None):
"""Process all posts which have been marked as dirty."""
cls._load_noids() # load missing ids
assert spread == 1, "not fully tested, use with caution"
counts = {}
tuples = []
for level in LEVELS:
tups = cls._get_tuples_for_level(level, spread)
counts[level] = len(tups)
tuples.extend(tups)
if trx or len(tuples) > 250:
changed = filter(lambda t: t[1], counts.items())
summary = list(map(lambda group: "%d %ss" % group[::-1], changed))
summary = ', '.join(summary) if summary else 'none'
log.info("[PREP] posts cache process: %s", summary)
cls._update_batch(steem, tuples, trx, full_total=full_total)
for url, _, _ in tuples:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
return counts
@classmethod
def _get_tuples_for_level(cls, level, fraction=1):
"""Query tuples to be updated.
Given a specific flush level (insert, payout, update, upvote),
returns a list of tuples to be passed to _update_batch, in the
form of: `[(url, id, level)*]`
"""
mode = LEVELS.index(level)
urls = [url for url, i in cls._queue.items() if i == mode]
if fraction > 1 and level != 'insert': # inserts must be full flush
urls = urls[0:math.ceil(len(urls) / fraction)]
return [(url, cls._get_id(url), level) for url in urls]
@classmethod
def _load_noids(cls):
"""Load ids for posts we don't know the ids of.
When posts are marked dirty, specifying the id is optional
because a successive call might be able to provide it "for
free". Before flushing changes this method should be called
to fill in any gaps.
"""
from hive.indexer.posts import Posts
noids = cls._noids - set(cls._ids.keys())
tuples = [(Posts.get_id(*url.split('/')), url) for url in noids]
for pid, url in tuples:
assert pid, "WARNING: missing id for %s" % url
cls._ids[url] = pid
cls._noids = set()
return len(tuples)
@classmethod
def _select_paidout_tuples(cls, date):
"""Query hive_posts_cache for payout sweep.
Select all posts which should have been paid out before `date`
yet do not have the `is_paidout` flag set. We perform this
sweep to ensure that we always have accurate final payout
state. Since payout values vary even between votes, we'd have
stale data if we didn't sweep, and only waited for incoming
votes before an update.
"""
from hive.indexer.posts import Posts
sql = """SELECT post_id FROM hive_posts_cache
WHERE is_paidout = '0' AND payout_at <= :date"""
ids = DB.query_col(sql, date=date)
if not ids:
return []
sql = """SELECT id, author, permlink
FROM hive_posts WHERE id IN :ids"""
results = DB.query_all(sql, ids=tuple(ids))
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_paidouts(cls, date):
"""Mark dirty all paidout posts not yet updated in db."""
paidout = cls._select_paidout_tuples(date)
authors = set()
for (pid, author, permlink) in paidout:
authors.add(author)
cls._dirty('payout', author, permlink, pid)
Accounts.dirty(authors) # force-update accounts on payout
if len(paidout) > 200:
log.info("[PREP] Found %d payouts for %d authors since %s",
len(paidout), len(authors), date)
return len(paidout)
@classmethod
def _select_missing_tuples(cls, last_cached_id, limit=1000000):
"""Fetch posts inserted into main posts table but not cache."""
from hive.indexer.posts import Posts
sql = """SELECT id, author, permlink, promoted FROM hive_posts
WHERE is_deleted = '0' AND id > :id
ORDER BY id LIMIT :limit"""
results = DB.query_all(sql, id=last_cached_id, limit=limit)
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_missing(cls, limit=250000):
"""Mark dirty all hive_posts records not yet written to cache."""
from hive.indexer.posts import Posts
# cached posts inserted sequentially, so compare MAX(id)'s
last_cached_id = cls.last_id()
last_post_id = Posts.last_id()
gap = last_post_id - last_cached_id
if gap:
missing = cls._select_missing_tuples(last_cached_id, limit)
for pid, author, permlink, promoted in missing:
if promoted > 0: # ensure we don't miss promote amount
cls.update_promoted_amount(pid, promoted)
cls._dirty('insert', author, permlink, pid)
return gap
@classmethod
def recover_missing_posts(cls, steem):
"""Startup routine that cycles through missing posts.
This is used for (1) initial sync, and (2) recovering missing
cache records upon launch if hive fast-sync was interrupted.
"""
gap = cls.dirty_missing()
log.info("[INIT] %d missing post cache entries", gap)
while cls.flush(steem, trx=True, full_total=gap)['insert']:
gap = cls.dirty_missing()
@classmethod
def _update_batch(cls, steem, tuples, trx=True, full_total=None):
"""Fetch, process, and write a batch of posts.
Given a set of posts, fetch from steemd and write them to the
db. The `tuples` arg is the form of `[(url, id, level)*]`
representing posts which are to be fetched from steemd and
updated in cache.
Regarding _bump_last_id: there's a rare edge case when the last
hive_post entry has been deleted "in the future" (ie, we haven't
seen the delete op yet). So even when the post is not found
(i.e. `not post['author']`), it's important to advance _last_id,
because this cursor is used to deduce any missing cache entries.
"""
timer = Timer(total=len(tuples), entity='post',
laps=['rps', 'wps'], full_total=full_total)
tuples = sorted(tuples, key=lambda x: x[1]) # enforce ASC id's
for tups in partition_all(1000, tuples):
timer.batch_start()
buffer = []
post_args = [tup[0].split('/') for tup in tups]
posts = steem.get_content_batch(post_args)
post_ids = [tup[1] for tup in tups]
post_levels = [tup[2] for tup in tups]
for pid, post, level in zip(post_ids, posts, post_levels):
if post['author']:
buffer.extend(cls._sql(pid, post, level=level))
else:
# When a post has been deleted (or otherwise DNE),
# steemd simply returns a blank post object w/ all
# fields blank. While it's best to not try to cache
# already-deleted posts, it can happen during missed
# post sweep and while using `trail_blocks` > 0.
pass
cls._bump_last_id(pid)
timer.batch_lap()
DB.batch_queries(buffer, trx)
timer.batch_finish(len(posts))
if len(tuples) >= 1000:
log.info(timer.batch_status())
@classmethod
def last_id(cls):
"""Retrieve the latest post_id that was cached."""
if cls._last_id == -1:
# after initial query, we maintain last_id w/ _bump_last_id()
sql = "SELECT COALESCE(MAX(post_id), 0) FROM hive_posts_cache"
cls._last_id = DB.query_one(sql)
return cls._last_id
@classmethod
def _bump_last_id(cls, next_id):
"""Update our last_id based on a recent insert."""
last_id = cls.last_id()
if next_id <= last_id:
return
if next_id - last_id > 2:
cls._ensure_safe_gap(last_id, next_id)
if next_id - last_id > 4:
# gap of 2 is common due to deletions. report on larger gaps.
log.warning("skipping post ids %d -> %d", last_id, next_id)
cls._last_id = next_id
@classmethod
def _ensure_safe_gap(cls, last_id, next_id):
"""Paranoid check of important operating assumption."""
sql = """
SELECT COUNT(*) FROM hive_posts
WHERE id BETWEEN :x1 AND :x2 AND is_deleted = '0'
"""
missing_posts = DB.query_one(sql, x1=(last_id + 1), x2=(next_id - 1))
if not missing_posts:
return
raise Exception("found large cache gap: %d --> %d (%d)"
% (last_id, next_id, missing_posts))
@classmethod
def _sql(cls, pid, post, level=None):
"""Given a post and "update level", generate SQL edit statement.
Valid levels are:
- `insert`: post does not yet exist in cache
- `update`: post was modified
- `payout`: post was paidout
- `upvote`: post payout/votes changed
"""
#pylint: disable=bad-whitespace
assert post['author'], "post {} is blank".format(pid)
# last-minute sanity check to ensure `pid` is correct #78
pid2 = cls._get_id(post['author']+'/'+post['permlink'])
assert pid == pid2, "hpc id %d maps to %d" % (pid, pid2)
# inserts always sequential. if pid > last_id, this operation
# *must* be an insert; so `level` must not be any form of update.
if pid > cls.last_id() and level != 'insert':
raise Exception("WARNING: new pid, but level=%s. #%d vs %d, %s"
% (level, pid, cls.last_id(), repr(post)))
# start building the queries
tag_sqls = []
values = [('post_id', pid)]
# immutable; write only once (*edge case: undeleted posts)
if level == 'insert':
values.extend([
('author', post['author']),
('permlink', post['permlink']),
('category', post['category']),
('depth', post['depth'])])
# always write, unless simple vote update
if level in ['insert', 'payout', 'update']:
basic = post_basic(post)
values.extend([
('created_at', post['created']), # immutable*
('updated_at', post['last_update']),
('title', post['title']),
('payout_at', basic['payout_at']), # immutable*
('preview', basic['preview']),
('body', basic['body']),
('img_url', basic['image']),
('is_nsfw', basic['is_nsfw']),
('is_declined', basic['is_payout_declined']),
('is_full_power', basic['is_full_power']),
('is_paidout', basic['is_paidout']),
('json', json.dumps(basic['json_metadata'])),
('raw_json', json.dumps(post_legacy(post))),
])
# update tags if action is insert/update and is root post
if level in ['insert', 'update'] and not post['depth']:
diff = level != 'insert' # do not attempt tag diff on insert
tag_sqls.extend(cls._tag_sqls(pid, basic['tags'], diff=diff))
# if there's a pending promoted value to write, pull it out
if pid in cls._pending_promoted:
bal = cls._pending_promoted.pop(pid)
values.append(('promoted', bal))
# update unconditionally
payout = post_payout(post)
stats = post_stats(post)
values.extend([
('payout', "%f" % payout['payout']),
('rshares', "%d" % payout['rshares']),
('votes', "%s" % payout['csvotes']),
('sc_trend', "%f" % payout['sc_trend']),
('sc_hot', "%f" % payout['sc_hot']),
('flag_weight', "%f" % stats['flag_weight']),
('total_votes', "%d" % stats['total_votes']),
('up_votes', "%d" % stats['up_votes']),
('is_hidden', "%d" % stats['hide']),
('is_grayed', "%d" % stats['gray']),
('author_rep', "%f" % stats['author_rep']),
('children', "%d" % min(post['children'], 32767)),
])
# if recounting, update the parent next pass.
if level == 'recount' and post['depth']:
cls.recount(post['parent_author'], post['parent_permlink'])
# build the post insert/update SQL, add tag SQLs
if level == 'insert':
sql = cls._insert(values)
else:
sql = cls._update(values)
return [sql] + tag_sqls
@classmethod
def _tag_sqls(cls, pid, tags, diff=True):
"""Generate SQL "deltas" for a post_id's associated tags."""
next_tags = set(tags)
curr_tags = set()
if diff:
sql = "SELECT tag FROM hive_post_tags WHERE post_id = :id"
curr_tags = set(DB.query_col(sql, id=pid))
to_rem = (curr_tags - next_tags)
if to_rem:
sql = "DELETE FROM hive_post_tags WHERE post_id = :id AND tag IN :tags"
yield (sql, dict(id=pid, tags=tuple(to_rem)))
to_add = (next_tags - curr_tags)
if to_add:
params = _keyify(to_add)
vals = ["(:id, :%s)" % key for key in params.keys()]
sql = "INSERT INTO hive_post_tags (post_id, tag) VALUES %s"
sql += " ON CONFLICT DO NOTHING" # (conflicts due to collation)
yield (sql % ','.join(vals), {'id': pid, **params})
@classmethod
def _insert(cls, values):
return DB.build_insert('hive_posts_cache', values, pk='post_id')
@classmethod
def _update(cls, values):
return DB.build_update('hive_posts_cache', values, pk='post_id')
| 38.991984 | 83 | 0.582412 |
import math
import collections
import logging
import ujson as json
from toolz import partition_all
from hive.db.adapter import Db
from hive.utils.post import post_basic, post_legacy, post_payout, post_stats
from hive.utils.timer import Timer
from hive.indexer.accounts import Accounts
log = logging.getLogger(__name__)
DB = Db.instance()
LEVELS = ['insert', 'payout', 'update', 'upvote', 'recount']
def _keyify(items):
return dict(map(lambda x: ("val_%d" % x[0], x[1]), enumerate(items)))
class CachedPost:
_last_id = -1
_ids = {}
_noids = set()
_queue = collections.OrderedDict()
_pending_promoted = {}
@classmethod
def update_promoted_amount(cls, post_id, amount):
cls._pending_promoted[post_id] = amount
@classmethod
def _dirty(cls, level, author, permlink, pid=None):
assert level in LEVELS, "invalid level {}".format(level)
mode = LEVELS.index(level)
url = author + '/' + permlink
if url not in cls._queue:
cls._queue[url] = mode
elif cls._queue[url] > mode:
cls._queue[url] = mode
if pid and url in cls._ids:
assert pid == cls._ids[url], "pid map conflict #78"
elif pid:
cls._ids[url] = pid
else:
cls._noids.add(url)
@classmethod
def _get_id(cls, url):
if url in cls._ids:
return cls._ids[url]
raise Exception("requested id for %s not in map" % url)
@classmethod
def recount(cls, author, permlink, pid=None):
cls._dirty('recount', author, permlink, pid)
@classmethod
def vote(cls, author, permlink, pid=None):
cls._dirty('upvote', author, permlink, pid)
Accounts.dirty(set([author]))
@classmethod
def insert(cls, author, permlink, pid):
cls._dirty('insert', author, permlink, pid)
@classmethod
def update(cls, author, permlink, pid):
cls._dirty('update', author, permlink, pid)
@classmethod
def delete(cls, post_id, author, permlink):
DB.query("DELETE FROM hive_posts_cache WHERE post_id = :id", id=post_id)
url = author+'/'+permlink
if url in cls._queue:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
@classmethod
def undelete(cls, post_id, author, permlink):
if post_id > cls.last_id():
cls.insert(author, permlink, post_id)
return
DB.query(cls._insert({
'post_id': post_id,
'author': author,
'permlink': permlink}))
cls.update(author, permlink, post_id)
@classmethod
def flush(cls, steem, trx=False, spread=1, full_total=None):
cls._load_noids()
assert spread == 1, "not fully tested, use with caution"
counts = {}
tuples = []
for level in LEVELS:
tups = cls._get_tuples_for_level(level, spread)
counts[level] = len(tups)
tuples.extend(tups)
if trx or len(tuples) > 250:
changed = filter(lambda t: t[1], counts.items())
summary = list(map(lambda group: "%d %ss" % group[::-1], changed))
summary = ', '.join(summary) if summary else 'none'
log.info("[PREP] posts cache process: %s", summary)
cls._update_batch(steem, tuples, trx, full_total=full_total)
for url, _, _ in tuples:
del cls._queue[url]
if url in cls._ids:
del cls._ids[url]
return counts
@classmethod
def _get_tuples_for_level(cls, level, fraction=1):
mode = LEVELS.index(level)
urls = [url for url, i in cls._queue.items() if i == mode]
if fraction > 1 and level != 'insert':
urls = urls[0:math.ceil(len(urls) / fraction)]
return [(url, cls._get_id(url), level) for url in urls]
@classmethod
def _load_noids(cls):
from hive.indexer.posts import Posts
noids = cls._noids - set(cls._ids.keys())
tuples = [(Posts.get_id(*url.split('/')), url) for url in noids]
for pid, url in tuples:
assert pid, "WARNING: missing id for %s" % url
cls._ids[url] = pid
cls._noids = set()
return len(tuples)
@classmethod
def _select_paidout_tuples(cls, date):
from hive.indexer.posts import Posts
sql = """SELECT post_id FROM hive_posts_cache
WHERE is_paidout = '0' AND payout_at <= :date"""
ids = DB.query_col(sql, date=date)
if not ids:
return []
sql = """SELECT id, author, permlink
FROM hive_posts WHERE id IN :ids"""
results = DB.query_all(sql, ids=tuple(ids))
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_paidouts(cls, date):
paidout = cls._select_paidout_tuples(date)
authors = set()
for (pid, author, permlink) in paidout:
authors.add(author)
cls._dirty('payout', author, permlink, pid)
Accounts.dirty(authors)
if len(paidout) > 200:
log.info("[PREP] Found %d payouts for %d authors since %s",
len(paidout), len(authors), date)
return len(paidout)
@classmethod
def _select_missing_tuples(cls, last_cached_id, limit=1000000):
from hive.indexer.posts import Posts
sql = """SELECT id, author, permlink, promoted FROM hive_posts
WHERE is_deleted = '0' AND id > :id
ORDER BY id LIMIT :limit"""
results = DB.query_all(sql, id=last_cached_id, limit=limit)
return Posts.save_ids_from_tuples(results)
@classmethod
def dirty_missing(cls, limit=250000):
from hive.indexer.posts import Posts
last_cached_id = cls.last_id()
last_post_id = Posts.last_id()
gap = last_post_id - last_cached_id
if gap:
missing = cls._select_missing_tuples(last_cached_id, limit)
for pid, author, permlink, promoted in missing:
if promoted > 0: # ensure we don't miss promote amount
cls.update_promoted_amount(pid, promoted)
cls._dirty('insert', author, permlink, pid)
return gap
@classmethod
def recover_missing_posts(cls, steem):
gap = cls.dirty_missing()
log.info("[INIT] %d missing post cache entries", gap)
while cls.flush(steem, trx=True, full_total=gap)['insert']:
gap = cls.dirty_missing()
@classmethod
def _update_batch(cls, steem, tuples, trx=True, full_total=None):
timer = Timer(total=len(tuples), entity='post',
laps=['rps', 'wps'], full_total=full_total)
tuples = sorted(tuples, key=lambda x: x[1])
for tups in partition_all(1000, tuples):
timer.batch_start()
buffer = []
post_args = [tup[0].split('/') for tup in tups]
posts = steem.get_content_batch(post_args)
post_ids = [tup[1] for tup in tups]
post_levels = [tup[2] for tup in tups]
for pid, post, level in zip(post_ids, posts, post_levels):
if post['author']:
buffer.extend(cls._sql(pid, post, level=level))
else:
# When a post has been deleted (or otherwise DNE),
# steemd simply returns a blank post object w/ all
# fields blank. While it's best to not try to cache
pass
cls._bump_last_id(pid)
timer.batch_lap()
DB.batch_queries(buffer, trx)
timer.batch_finish(len(posts))
if len(tuples) >= 1000:
log.info(timer.batch_status())
@classmethod
def last_id(cls):
if cls._last_id == -1:
sql = "SELECT COALESCE(MAX(post_id), 0) FROM hive_posts_cache"
cls._last_id = DB.query_one(sql)
return cls._last_id
@classmethod
def _bump_last_id(cls, next_id):
last_id = cls.last_id()
if next_id <= last_id:
return
if next_id - last_id > 2:
cls._ensure_safe_gap(last_id, next_id)
if next_id - last_id > 4:
log.warning("skipping post ids %d -> %d", last_id, next_id)
cls._last_id = next_id
@classmethod
def _ensure_safe_gap(cls, last_id, next_id):
sql = """
SELECT COUNT(*) FROM hive_posts
WHERE id BETWEEN :x1 AND :x2 AND is_deleted = '0'
"""
missing_posts = DB.query_one(sql, x1=(last_id + 1), x2=(next_id - 1))
if not missing_posts:
return
raise Exception("found large cache gap: %d --> %d (%d)"
% (last_id, next_id, missing_posts))
@classmethod
def _sql(cls, pid, post, level=None):
assert post['author'], "post {} is blank".format(pid)
pid2 = cls._get_id(post['author']+'/'+post['permlink'])
assert pid == pid2, "hpc id %d maps to %d" % (pid, pid2)
if pid > cls.last_id() and level != 'insert':
raise Exception("WARNING: new pid, but level=%s. #%d vs %d, %s"
% (level, pid, cls.last_id(), repr(post)))
tag_sqls = []
values = [('post_id', pid)]
if level == 'insert':
values.extend([
('author', post['author']),
('permlink', post['permlink']),
('category', post['category']),
('depth', post['depth'])])
if level in ['insert', 'payout', 'update']:
basic = post_basic(post)
values.extend([
('created_at', post['created']),
('updated_at', post['last_update']),
('title', post['title']),
('payout_at', basic['payout_at']),
('preview', basic['preview']),
('body', basic['body']),
('img_url', basic['image']),
('is_nsfw', basic['is_nsfw']),
('is_declined', basic['is_payout_declined']),
('is_full_power', basic['is_full_power']),
('is_paidout', basic['is_paidout']),
('json', json.dumps(basic['json_metadata'])),
('raw_json', json.dumps(post_legacy(post))),
])
if level in ['insert', 'update'] and not post['depth']:
diff = level != 'insert'
tag_sqls.extend(cls._tag_sqls(pid, basic['tags'], diff=diff))
if pid in cls._pending_promoted:
bal = cls._pending_promoted.pop(pid)
values.append(('promoted', bal))
# update unconditionally
payout = post_payout(post)
stats = post_stats(post)
values.extend([
('payout', "%f" % payout['payout']),
('rshares', "%d" % payout['rshares']),
('votes', "%s" % payout['csvotes']),
('sc_trend', "%f" % payout['sc_trend']),
('sc_hot', "%f" % payout['sc_hot']),
('flag_weight', "%f" % stats['flag_weight']),
('total_votes', "%d" % stats['total_votes']),
('up_votes', "%d" % stats['up_votes']),
('is_hidden', "%d" % stats['hide']),
('is_grayed', "%d" % stats['gray']),
('author_rep', "%f" % stats['author_rep']),
('children', "%d" % min(post['children'], 32767)),
])
# if recounting, update the parent next pass.
if level == 'recount' and post['depth']:
cls.recount(post['parent_author'], post['parent_permlink'])
# build the post insert/update SQL, add tag SQLs
if level == 'insert':
sql = cls._insert(values)
else:
sql = cls._update(values)
return [sql] + tag_sqls
@classmethod
def _tag_sqls(cls, pid, tags, diff=True):
next_tags = set(tags)
curr_tags = set()
if diff:
sql = "SELECT tag FROM hive_post_tags WHERE post_id = :id"
curr_tags = set(DB.query_col(sql, id=pid))
to_rem = (curr_tags - next_tags)
if to_rem:
sql = "DELETE FROM hive_post_tags WHERE post_id = :id AND tag IN :tags"
yield (sql, dict(id=pid, tags=tuple(to_rem)))
to_add = (next_tags - curr_tags)
if to_add:
params = _keyify(to_add)
vals = ["(:id, :%s)" % key for key in params.keys()]
sql = "INSERT INTO hive_post_tags (post_id, tag) VALUES %s"
sql += " ON CONFLICT DO NOTHING" # (conflicts due to collation)
yield (sql % ','.join(vals), {'id': pid, **params})
@classmethod
def _insert(cls, values):
return DB.build_insert('hive_posts_cache', values, pk='post_id')
@classmethod
def _update(cls, values):
return DB.build_update('hive_posts_cache', values, pk='post_id')
| true | true |
f731b44538e1b9c630d1af353fa35d3677a745d9 | 1,210 | py | Python | BotClean_Large.py | Aditya148/Hackerrank-Artificial-Intelligence | 5abbe561115b75cb28397661b7b0d7d53486ffee | [
"MIT"
] | null | null | null | BotClean_Large.py | Aditya148/Hackerrank-Artificial-Intelligence | 5abbe561115b75cb28397661b7b0d7d53486ffee | [
"MIT"
] | null | null | null | BotClean_Large.py | Aditya148/Hackerrank-Artificial-Intelligence | 5abbe561115b75cb28397661b7b0d7d53486ffee | [
"MIT"
] | null | null | null | import math
def update_position(posr, posc, dirties):
nearest_dirt = []
for i in range(len(dirties)):
# Euclidean distance
result = math.sqrt(((dirties[i][0] - posr) ** 2) + ((dirties[i][1] - posc) ** 2))
nearest_dirt.append(result)
return [x for (y,x) in sorted(zip(nearest_dirt,dirties))]
# Set the bot in your new position
def next_move(posr, posc, x, y, board):
dirties = []
for i in range(x):
for j in range(y):
if board[i][j] == 'd':
dirties.append([i, j])
next_dirt = update_position(posr, posc, dirties)
if next_dirt[0][1] < posc:
print('LEFT')
elif next_dirt[0][1] > posc:
print('RIGHT')
elif next_dirt[0][0] < posr:
print('UP')
elif next_dirt[0][0] > posr:
print('DOWN')
else:
print('CLEAN')
if __name__ == "__main__":
pos = [int(i) for i in input().strip().split()]
dim = [int(i) for i in input().strip().split()]
board = [[j for j in input().strip()] for i in range(dim[0])]
next_move(pos[0], pos[1], dim[0], dim[1], board)
'''
Sample Input
0 0
5 5
b---d
-d--d
--dd-
--d--
----d
Sample Output
RIGHT
'''
| 22.407407 | 89 | 0.539669 | import math
def update_position(posr, posc, dirties):
nearest_dirt = []
for i in range(len(dirties)):
result = math.sqrt(((dirties[i][0] - posr) ** 2) + ((dirties[i][1] - posc) ** 2))
nearest_dirt.append(result)
return [x for (y,x) in sorted(zip(nearest_dirt,dirties))]
def next_move(posr, posc, x, y, board):
dirties = []
for i in range(x):
for j in range(y):
if board[i][j] == 'd':
dirties.append([i, j])
next_dirt = update_position(posr, posc, dirties)
if next_dirt[0][1] < posc:
print('LEFT')
elif next_dirt[0][1] > posc:
print('RIGHT')
elif next_dirt[0][0] < posr:
print('UP')
elif next_dirt[0][0] > posr:
print('DOWN')
else:
print('CLEAN')
if __name__ == "__main__":
pos = [int(i) for i in input().strip().split()]
dim = [int(i) for i in input().strip().split()]
board = [[j for j in input().strip()] for i in range(dim[0])]
next_move(pos[0], pos[1], dim[0], dim[1], board)
| true | true |
f731b4a38a9bc8969c194157663764cf285ad1bc | 4,079 | py | Python | AC_Network.py | pasindubawantha/just-copy | 919b1723c87cadc5946f891da53f4abc7d50ff6e | [
"Apache-2.0"
] | 1 | 2020-07-30T19:29:06.000Z | 2020-07-30T19:29:06.000Z | AC_Network.py | pasindubawantha/just-copy | 919b1723c87cadc5946f891da53f4abc7d50ff6e | [
"Apache-2.0"
] | null | null | null | AC_Network.py | pasindubawantha/just-copy | 919b1723c87cadc5946f891da53f4abc7d50ff6e | [
"Apache-2.0"
] | null | null | null | import tensorflow as tf
import tensorflow.contrib.slim as slim
#import tensorflow.nn as slim
import numpy as np
from helpers import *
class AC_Network():
def __init__(self,s_size,a_size,scope,trainer,s_shape):
with tf.variable_scope(scope):
#Input and visual encoding layers
self.inputs = tf.placeholder(shape=[None,s_size],dtype=tf.float32)
self.imageIn = tf.reshape(self.inputs,shape=[-1,s_shape[0],s_shape[1],s_shape[2]])
self.conv1 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.imageIn,num_outputs=16,
kernel_size=[8,8],stride=[4,4],padding='VALID')
self.conv2 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.conv1,num_outputs=32,
kernel_size=[4,4],stride=[2,2],padding='VALID')
hidden = slim.fully_connected(slim.flatten(self.conv2),256,activation_fn=tf.nn.elu)
#Recurrent network for temporal dependencies
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(256,state_is_tuple=True)
c_init = np.zeros((1, lstm_cell.state_size.c), np.float32)
h_init = np.zeros((1, lstm_cell.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.h])
self.state_in = (c_in, h_in)
rnn_in = tf.expand_dims(hidden, [0])
step_size = tf.shape(self.imageIn)[:1]
state_in = tf.nn.rnn_cell.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(
lstm_cell, rnn_in, initial_state=state_in, sequence_length=step_size,
time_major=False)
lstm_c, lstm_h = lstm_state
self.state_out = (lstm_c[:1, :], lstm_h[:1, :])
rnn_out = tf.reshape(lstm_outputs, [-1, 256])
#Output layers for policy and value estimations
self.policy = slim.fully_connected(rnn_out,a_size,
activation_fn=tf.nn.softmax,
weights_initializer=normalized_columns_initializer(0.01),
biases_initializer=None)
self.value = slim.fully_connected(rnn_out,1,
activation_fn=None,
weights_initializer=normalized_columns_initializer(1.0),
biases_initializer=None)
#Only the worker network need ops for loss functions and gradient updating.
if scope != 'global':
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions,a_size,dtype=tf.float32)
self.target_v = tf.placeholder(shape=[None],dtype=tf.float32)
self.advantages = tf.placeholder(shape=[None],dtype=tf.float32)
self.responsible_outputs = tf.reduce_sum(self.policy * self.actions_onehot, [1])
#Loss functions
self.value_loss = 0.5 * tf.reduce_sum(tf.square(self.target_v - tf.reshape(self.value,[-1])))
self.entropy = - tf.reduce_sum(self.policy * tf.log(self.policy))
self.policy_loss = -tf.reduce_sum(tf.log(self.responsible_outputs)*self.advantages)
self.loss = 0.5 * self.value_loss + self.policy_loss - self.entropy * 0.01
#Get gradients from local network using local losses
local_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
self.gradients = tf.gradients(self.loss,local_vars)
self.var_norms = tf.global_norm(local_vars)
self.grads,self.grad_norms = tf.clip_by_global_norm(self.gradients,40.0)
#Apply local gradients to global network
self.global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'global')
self.apply_grads = trainer.apply_gradients(zip(self.grads,self.global_vars)) | 53.671053 | 109 | 0.617308 | import tensorflow as tf
import tensorflow.contrib.slim as slim
import numpy as np
from helpers import *
class AC_Network():
def __init__(self,s_size,a_size,scope,trainer,s_shape):
with tf.variable_scope(scope):
self.inputs = tf.placeholder(shape=[None,s_size],dtype=tf.float32)
self.imageIn = tf.reshape(self.inputs,shape=[-1,s_shape[0],s_shape[1],s_shape[2]])
self.conv1 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.imageIn,num_outputs=16,
kernel_size=[8,8],stride=[4,4],padding='VALID')
self.conv2 = slim.conv2d(activation_fn=tf.nn.elu,
inputs=self.conv1,num_outputs=32,
kernel_size=[4,4],stride=[2,2],padding='VALID')
hidden = slim.fully_connected(slim.flatten(self.conv2),256,activation_fn=tf.nn.elu)
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(256,state_is_tuple=True)
c_init = np.zeros((1, lstm_cell.state_size.c), np.float32)
h_init = np.zeros((1, lstm_cell.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.h])
self.state_in = (c_in, h_in)
rnn_in = tf.expand_dims(hidden, [0])
step_size = tf.shape(self.imageIn)[:1]
state_in = tf.nn.rnn_cell.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = tf.nn.dynamic_rnn(
lstm_cell, rnn_in, initial_state=state_in, sequence_length=step_size,
time_major=False)
lstm_c, lstm_h = lstm_state
self.state_out = (lstm_c[:1, :], lstm_h[:1, :])
rnn_out = tf.reshape(lstm_outputs, [-1, 256])
self.policy = slim.fully_connected(rnn_out,a_size,
activation_fn=tf.nn.softmax,
weights_initializer=normalized_columns_initializer(0.01),
biases_initializer=None)
self.value = slim.fully_connected(rnn_out,1,
activation_fn=None,
weights_initializer=normalized_columns_initializer(1.0),
biases_initializer=None)
if scope != 'global':
self.actions = tf.placeholder(shape=[None],dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions,a_size,dtype=tf.float32)
self.target_v = tf.placeholder(shape=[None],dtype=tf.float32)
self.advantages = tf.placeholder(shape=[None],dtype=tf.float32)
self.responsible_outputs = tf.reduce_sum(self.policy * self.actions_onehot, [1])
self.value_loss = 0.5 * tf.reduce_sum(tf.square(self.target_v - tf.reshape(self.value,[-1])))
self.entropy = - tf.reduce_sum(self.policy * tf.log(self.policy))
self.policy_loss = -tf.reduce_sum(tf.log(self.responsible_outputs)*self.advantages)
self.loss = 0.5 * self.value_loss + self.policy_loss - self.entropy * 0.01
local_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope)
self.gradients = tf.gradients(self.loss,local_vars)
self.var_norms = tf.global_norm(local_vars)
self.grads,self.grad_norms = tf.clip_by_global_norm(self.gradients,40.0)
self.global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'global')
self.apply_grads = trainer.apply_gradients(zip(self.grads,self.global_vars)) | true | true |
f731b554fa6750e8f70b9ce4eeb1ca9577514c53 | 7,291 | py | Python | tests/pipeline/test_node_run.py | andmikey/kedro | 9b4e4135720609d44ffdf5248246fe805f0b5469 | [
"Apache-2.0"
] | 1 | 2021-11-19T05:36:47.000Z | 2021-11-19T05:36:47.000Z | tests/pipeline/test_node_run.py | andmikey/kedro | 9b4e4135720609d44ffdf5248246fe805f0b5469 | [
"Apache-2.0"
] | null | null | null | tests/pipeline/test_node_run.py | andmikey/kedro | 9b4e4135720609d44ffdf5248246fe805f0b5469 | [
"Apache-2.0"
] | 1 | 2021-11-19T05:36:49.000Z | 2021-11-19T05:36:49.000Z | # Copyright 2018-2019 QuantumBlack Visual Analytics Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND
# NONINFRINGEMENT. IN NO EVENT WILL THE LICENSOR OR OTHER CONTRIBUTORS
# BE LIABLE FOR ANY CLAIM, DAMAGES, OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF, OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# The QuantumBlack Visual Analytics Limited ("QuantumBlack") name and logo
# (either separately or in combination, "QuantumBlack Trademarks") are
# trademarks of QuantumBlack. The License does not grant you any right or
# license to the QuantumBlack Trademarks. You may not use the QuantumBlack
# Trademarks or any confusingly similar mark as a trademark for your product,
# or use the QuantumBlack Trademarks in any other manner that might cause
# confusion in the marketplace, including but not limited to in advertising,
# on websites, or on software.
#
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import pytest
from kedro.io import LambdaDataSet
from kedro.pipeline import node
@pytest.fixture
def mocked_dataset(mocker):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return LambdaDataSet(load, save)
def one_in_one_out(arg):
return arg
def one_in_dict_out(arg):
return dict(ret=arg)
def two_in_first_out(arg1, arg2):
return arg1
@pytest.fixture
def valid_nodes_with_inputs():
return [
(node(one_in_one_out, "ds1", "dsOut"), dict(ds1=42)),
(node(one_in_dict_out, dict(arg="ds1"), dict(ret="dsOut")), dict(ds1=42)),
(node(two_in_first_out, ["ds1", "ds2"], "dsOut"), dict(ds1=42, ds2=58)),
]
def test_valid_nodes(valid_nodes_with_inputs):
"""Check if node.run works as expected."""
for node_, input_ in valid_nodes_with_inputs:
output = node_.run(input_)
assert output["dsOut"] == 42
def test_run_got_dataframe(mocked_dataset):
"""Check an exception when non-dictionary (class object) is passed."""
pattern = r"Node.run\(\) expects a dictionary or None, "
pattern += r"but got <class \'kedro.io.lambda_data_set.LambdaDataSet\'> instead"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(mocked_dataset)
class TestNodeRunInvalidInput:
def test_unresolved(self):
"""Pass no input when one is expected."""
with pytest.raises(ValueError, match=r"expected one input"):
node(one_in_one_out, "unresolved", "ds1").run(None)
def test_no_inputs_node_error(self, mocked_dataset):
"""Pass one input when none is expected."""
with pytest.raises(ValueError, match=r"expected no inputs"):
node(lambda: 1, None, "A").run(dict(unexpected=mocked_dataset))
def test_one_input_error(self, mocked_dataset):
"""Pass a different input."""
pattern = r"expected one input named 'ds1', but got the "
pattern += r"following 1 input\(s\) instead: \['arg'\]"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(arg=mocked_dataset)
)
def test_run_diff_size_lists(self, mocked_dataset):
"""Pass only one dict input when two (list) are expected."""
pattern = r"expected 2 input\(s\) \['ds1', 'ds2'\], but "
pattern += r"got the following 1 input\(s\) instead."
with pytest.raises(ValueError, match=pattern):
node(two_in_first_out, ["ds1", "ds2"], "A").run(dict(ds1=mocked_dataset))
def test_run_diff_size_list_dict(self, mocked_dataset):
"""Pass two dict inputs when one (list) are expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds1=mocked_dataset, ds2=2))
def test_run_list_dict_unavailable(self, mocked_dataset):
"""Pass one dict which is different from expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds2=mocked_dataset))
def test_run_dict_unavailable(self, mocked_dataset):
"""Pass one dict which is different from expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(dict(ds2=mocked_dataset))
def test_run_dict_diff_size(self, mocked_dataset):
"""Pass two dict inputs when one is expected."""
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(
dict(ds1=mocked_dataset, ds2=2)
)
class TestNodeRunInvalidOutput:
def test_miss_matching_output_types(self, mocked_dataset):
pattern = r"The node output is a dictionary, whereas the function "
pattern += r"output is not\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", dict(a="ds")).run(dict(ds1=mocked_dataset))
def test_miss_matching_output_keys(self, mocked_dataset):
pattern = r"The node's output keys {'ret'} do not match "
pattern += r"with the returned output's keys"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(ds1=mocked_dataset)
)
def test_node_not_list_output(self, mocked_dataset):
pattern = r"The node definition contains a list of outputs "
pattern += r"\['B', 'C'\], whereas the node function returned "
pattern += r"a `LambdaDataSet`"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", ["B", "C"]).run(dict(ds1=mocked_dataset))
def test_node_wrong_num_of_outputs(self, mocker, mocked_dataset):
def one_in_two_out(arg):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return [LambdaDataSet(load, save), LambdaDataSet(load, save)]
pattern = r"The node function returned 2 output\(s\), whereas "
pattern += r"the node definition contains 3 output\(s\)\."
with pytest.raises(ValueError, match=pattern):
node(one_in_two_out, "ds1", ["A", "B", "C"]).run(dict(ds1=mocked_dataset))
| 42.888235 | 86 | 0.665478 |
import pytest
from kedro.io import LambdaDataSet
from kedro.pipeline import node
@pytest.fixture
def mocked_dataset(mocker):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return LambdaDataSet(load, save)
def one_in_one_out(arg):
return arg
def one_in_dict_out(arg):
return dict(ret=arg)
def two_in_first_out(arg1, arg2):
return arg1
@pytest.fixture
def valid_nodes_with_inputs():
return [
(node(one_in_one_out, "ds1", "dsOut"), dict(ds1=42)),
(node(one_in_dict_out, dict(arg="ds1"), dict(ret="dsOut")), dict(ds1=42)),
(node(two_in_first_out, ["ds1", "ds2"], "dsOut"), dict(ds1=42, ds2=58)),
]
def test_valid_nodes(valid_nodes_with_inputs):
for node_, input_ in valid_nodes_with_inputs:
output = node_.run(input_)
assert output["dsOut"] == 42
def test_run_got_dataframe(mocked_dataset):
pattern = r"Node.run\(\) expects a dictionary or None, "
pattern += r"but got <class \'kedro.io.lambda_data_set.LambdaDataSet\'> instead"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(mocked_dataset)
class TestNodeRunInvalidInput:
def test_unresolved(self):
with pytest.raises(ValueError, match=r"expected one input"):
node(one_in_one_out, "unresolved", "ds1").run(None)
def test_no_inputs_node_error(self, mocked_dataset):
with pytest.raises(ValueError, match=r"expected no inputs"):
node(lambda: 1, None, "A").run(dict(unexpected=mocked_dataset))
def test_one_input_error(self, mocked_dataset):
pattern = r"expected one input named 'ds1', but got the "
pattern += r"following 1 input\(s\) instead: \['arg'\]"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(arg=mocked_dataset)
)
def test_run_diff_size_lists(self, mocked_dataset):
pattern = r"expected 2 input\(s\) \['ds1', 'ds2'\], but "
pattern += r"got the following 1 input\(s\) instead."
with pytest.raises(ValueError, match=pattern):
node(two_in_first_out, ["ds1", "ds2"], "A").run(dict(ds1=mocked_dataset))
def test_run_diff_size_list_dict(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds1=mocked_dataset, ds2=2))
def test_run_list_dict_unavailable(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, ["ds1"], "A").run(dict(ds2=mocked_dataset))
def test_run_dict_unavailable(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 1 input\(s\) instead: \['ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(dict(ds2=mocked_dataset))
def test_run_dict_diff_size(self, mocked_dataset):
pattern = r"expected 1 input\(s\) \['ds1'\], but got the "
pattern += r"following 2 input\(s\) instead: \['ds1', 'ds2'\]\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, dict(arg="ds1"), "A").run(
dict(ds1=mocked_dataset, ds2=2)
)
class TestNodeRunInvalidOutput:
def test_miss_matching_output_types(self, mocked_dataset):
pattern = r"The node output is a dictionary, whereas the function "
pattern += r"output is not\."
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", dict(a="ds")).run(dict(ds1=mocked_dataset))
def test_miss_matching_output_keys(self, mocked_dataset):
pattern = r"The node's output keys {'ret'} do not match "
pattern += r"with the returned output's keys"
with pytest.raises(ValueError, match=pattern):
node(one_in_dict_out, "ds1", dict(ret="B", ans="C")).run(
dict(ds1=mocked_dataset)
)
def test_node_not_list_output(self, mocked_dataset):
pattern = r"The node definition contains a list of outputs "
pattern += r"\['B', 'C'\], whereas the node function returned "
pattern += r"a `LambdaDataSet`"
with pytest.raises(ValueError, match=pattern):
node(one_in_one_out, "ds1", ["B", "C"]).run(dict(ds1=mocked_dataset))
def test_node_wrong_num_of_outputs(self, mocker, mocked_dataset):
def one_in_two_out(arg):
load = mocker.Mock(return_value=42)
save = mocker.Mock()
return [LambdaDataSet(load, save), LambdaDataSet(load, save)]
pattern = r"The node function returned 2 output\(s\), whereas "
pattern += r"the node definition contains 3 output\(s\)\."
with pytest.raises(ValueError, match=pattern):
node(one_in_two_out, "ds1", ["A", "B", "C"]).run(dict(ds1=mocked_dataset))
| true | true |
f731b5fbc7cc5db0a0a57a78bcbce234f1e07f3c | 2,103 | py | Python | beta_rec/recommenders/userKNN.py | mengzaiqiao/TVBR | cdac86a753c41f8f3c55a025be8d88dd305325f5 | [
"MIT"
] | 126 | 2020-03-19T02:30:23.000Z | 2022-03-15T11:10:46.000Z | beta_rec/recommenders/userKNN.py | mengzaiqiao/TVBR | cdac86a753c41f8f3c55a025be8d88dd305325f5 | [
"MIT"
] | 174 | 2020-03-15T17:28:10.000Z | 2022-03-15T22:38:51.000Z | beta_rec/recommenders/userKNN.py | mengzaiqiao/TVBR | cdac86a753c41f8f3c55a025be8d88dd305325f5 | [
"MIT"
] | 38 | 2020-03-19T00:38:47.000Z | 2022-02-24T11:03:12.000Z | import os
import time
from munch import munchify
from ray import tune
from ..core.recommender import Recommender
from ..models.userKNN import UserKNNEngine
from ..utils.monitor import Monitor
def tune_train(config):
"""Train the model with a hyper-parameter tuner (ray).
Args:
config (dict): All the parameters for the model.
"""
data = config["data"]
train_engine = UserKNN(munchify(config))
result = train_engine.train(data)
while train_engine.eval_engine.n_worker > 0:
time.sleep(20)
tune.report(
valid_metric=result["valid_metric"],
model_save_dir=result["model_save_dir"],
)
class UserKNN(Recommender):
"""The User-based K Nearest Neighbour Model."""
def __init__(self, config):
"""Initialize the config of this recommender.
Args:
config:
"""
super(UserKNN, self).__init__(config, name="UserKNN")
def init_engine(self, data):
"""Initialize the required parameters for the model.
Args:
data: the Dataset object.
"""
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.engine = UserKNNEngine(self.config)
def train(self, data):
"""Training the model.
Args:
data: the Dataset object.
Returns:
dict: {}
"""
self.gpu_id, self.config["device_str"] = self.get_device() # Train the model.
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.monitor = Monitor(
log_dir=self.config["system"]["run_dir"], delay=1, gpu_id=self.gpu_id
)
self.init_engine(data)
print(type(data.train))
print(data.train.head())
self.engine.model.prepare_model(data)
self.model_save_dir = os.path.join(
self.config["system"]["model_save_dir"], self.config["model"]["save_name"]
)
self.config["run_time"] = self.monitor.stop()
return "data loaded"
| 27.311688 | 86 | 0.611032 | import os
import time
from munch import munchify
from ray import tune
from ..core.recommender import Recommender
from ..models.userKNN import UserKNNEngine
from ..utils.monitor import Monitor
def tune_train(config):
data = config["data"]
train_engine = UserKNN(munchify(config))
result = train_engine.train(data)
while train_engine.eval_engine.n_worker > 0:
time.sleep(20)
tune.report(
valid_metric=result["valid_metric"],
model_save_dir=result["model_save_dir"],
)
class UserKNN(Recommender):
def __init__(self, config):
super(UserKNN, self).__init__(config, name="UserKNN")
def init_engine(self, data):
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.engine = UserKNNEngine(self.config)
def train(self, data):
self.gpu_id, self.config["device_str"] = self.get_device()
self.config["model"]["n_users"] = data.n_users
self.config["model"]["n_items"] = data.n_items
self.monitor = Monitor(
log_dir=self.config["system"]["run_dir"], delay=1, gpu_id=self.gpu_id
)
self.init_engine(data)
print(type(data.train))
print(data.train.head())
self.engine.model.prepare_model(data)
self.model_save_dir = os.path.join(
self.config["system"]["model_save_dir"], self.config["model"]["save_name"]
)
self.config["run_time"] = self.monitor.stop()
return "data loaded"
| true | true |
f731b60bb39975a9d09169527228d3f67c163104 | 2,647 | py | Python | sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py | yrbahn/feast | b5648f0cb6109594fcc8881237ddd583960d244f | [
"Apache-2.0"
] | null | null | null | sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py | yrbahn/feast | b5648f0cb6109594fcc8881237ddd583960d244f | [
"Apache-2.0"
] | null | null | null | sdk/python/tests/integration/feature_repos/universal/data_sources/redshift.py | yrbahn/feast | b5648f0cb6109594fcc8881237ddd583960d244f | [
"Apache-2.0"
] | null | null | null | from typing import Dict
import pandas as pd
from feast import RedshiftSource
from feast.data_source import DataSource
from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig
from feast.infra.utils import aws_utils
from feast.repo_config import FeastConfigBaseModel
from tests.integration.feature_repos.universal.data_source_creator import (
DataSourceCreator,
)
class RedshiftDataSourceCreator(DataSourceCreator):
tables = []
def __init__(self, project_name: str):
super().__init__()
self.project_name = project_name
self.client = aws_utils.get_redshift_data_client("us-west-2")
self.s3 = aws_utils.get_s3_resource("us-west-2")
self.offline_store_config = RedshiftOfflineStoreConfig(
cluster_id="feast-integration-tests",
region="us-west-2",
user="admin",
database="feast",
s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion",
iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role",
)
def create_data_sources(
self,
destination: str,
df: pd.DataFrame,
event_timestamp_column="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:
aws_utils.upload_df_to_redshift(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
self.s3,
f"{self.offline_store_config.s3_staging_location}/copy/{destination}.parquet",
self.offline_store_config.iam_role,
destination,
df,
)
self.tables.append(destination)
return RedshiftSource(
table=destination,
event_timestamp_column=event_timestamp_column,
created_timestamp_column=created_timestamp_column,
date_partition_column="",
field_mapping=field_mapping or {"ts_1": "ts"},
)
def create_offline_store_config(self) -> FeastConfigBaseModel:
return self.offline_store_config
def get_prefixed_table_name(self, name: str, suffix: str) -> str:
return f"{name}_{suffix}"
def teardown(self):
for table in self.tables:
aws_utils.execute_redshift_statement(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
f"DROP TABLE IF EXISTS {table}",
)
| 33.0875 | 90 | 0.656215 | from typing import Dict
import pandas as pd
from feast import RedshiftSource
from feast.data_source import DataSource
from feast.infra.offline_stores.redshift import RedshiftOfflineStoreConfig
from feast.infra.utils import aws_utils
from feast.repo_config import FeastConfigBaseModel
from tests.integration.feature_repos.universal.data_source_creator import (
DataSourceCreator,
)
class RedshiftDataSourceCreator(DataSourceCreator):
tables = []
def __init__(self, project_name: str):
super().__init__()
self.project_name = project_name
self.client = aws_utils.get_redshift_data_client("us-west-2")
self.s3 = aws_utils.get_s3_resource("us-west-2")
self.offline_store_config = RedshiftOfflineStoreConfig(
cluster_id="feast-integration-tests",
region="us-west-2",
user="admin",
database="feast",
s3_staging_location="s3://feast-integration-tests/redshift/tests/ingestion",
iam_role="arn:aws:iam::402087665549:role/redshift_s3_access_role",
)
def create_data_sources(
self,
destination: str,
df: pd.DataFrame,
event_timestamp_column="ts",
created_timestamp_column="created_ts",
field_mapping: Dict[str, str] = None,
) -> DataSource:
aws_utils.upload_df_to_redshift(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
self.s3,
f"{self.offline_store_config.s3_staging_location}/copy/{destination}.parquet",
self.offline_store_config.iam_role,
destination,
df,
)
self.tables.append(destination)
return RedshiftSource(
table=destination,
event_timestamp_column=event_timestamp_column,
created_timestamp_column=created_timestamp_column,
date_partition_column="",
field_mapping=field_mapping or {"ts_1": "ts"},
)
def create_offline_store_config(self) -> FeastConfigBaseModel:
return self.offline_store_config
def get_prefixed_table_name(self, name: str, suffix: str) -> str:
return f"{name}_{suffix}"
def teardown(self):
for table in self.tables:
aws_utils.execute_redshift_statement(
self.client,
self.offline_store_config.cluster_id,
self.offline_store_config.database,
self.offline_store_config.user,
f"DROP TABLE IF EXISTS {table}",
)
| true | true |
f731b61e890831d1cd4fbf37958278bb584384cb | 2,988 | py | Python | setup.py | NeilBotelho/pip | d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f | [
"MIT"
] | null | null | null | setup.py | NeilBotelho/pip | d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f | [
"MIT"
] | 1 | 2021-10-04T12:25:25.000Z | 2021-10-05T07:30:54.000Z | setup.py | NeilBotelho/pip | d01bfcfaa13a4f06fa0ce61fa18cf06012f2e78f | [
"MIT"
] | 1 | 2020-06-01T19:13:16.000Z | 2020-06-01T19:13:16.000Z | # The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
import codecs
import os
import sys
from setuptools import find_packages, setup
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
# __version__ = "0.9"
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
setup(
name="pip",
version=get_version("src/pip/__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
license='MIT',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
url='https://pip.pypa.io/',
keywords='distutils easy_install egg setuptools wheel virtualenv',
project_urls={
"Documentation": "https://pip.pypa.io",
"Source": "https://github.com/pypa/pip",
"Changelog": "https://pip.pypa.io/en/stable/news/",
},
author='The pip developers',
author_email='distutils-sig@python.org',
package_dir={"": "src"},
packages=find_packages(
where="src",
exclude=["contrib", "docs", "tests*", "tasks"],
),
package_data={
"pip._vendor": ["vendor.txt"],
"pip._vendor.certifi": ["*.pem"],
"pip._vendor.requests": ["*.pem"],
"pip._vendor.distlib._backport": ["sysconfig.cfg"],
"pip._vendor.distlib": ["t32.exe", "t64.exe", "w32.exe", "w64.exe"],
},
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
"pip{}.{}=pip._internal.cli.main:main".format(
*sys.version_info[:2]
),
],
},
zip_safe=False,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
)
| 33.2 | 76 | 0.598728 |
import codecs
import os
import sys
from setuptools import find_packages, setup
def read(rel_path):
here = os.path.abspath(os.path.dirname(__file__))
(os.path.join(here, rel_path), 'r') as fp:
return fp.read()
def get_version(rel_path):
for line in read(rel_path).splitlines():
if line.startswith('__version__'):
delim = '"' if '"' in line else "'"
return line.split(delim)[1]
else:
raise RuntimeError("Unable to find version string.")
long_description = read('README.rst')
setup(
name="pip",
version=get_version("src/pip/__init__.py"),
description="The PyPA recommended tool for installing Python packages.",
long_description=long_description,
license='MIT',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Software Development :: Build Tools",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
url='https://pip.pypa.io/',
keywords='distutils easy_install egg setuptools wheel virtualenv',
project_urls={
"Documentation": "https://pip.pypa.io",
"Source": "https://github.com/pypa/pip",
"Changelog": "https://pip.pypa.io/en/stable/news/",
},
author='The pip developers',
author_email='distutils-sig@python.org',
package_dir={"": "src"},
packages=find_packages(
where="src",
exclude=["contrib", "docs", "tests*", "tasks"],
),
package_data={
"pip._vendor": ["vendor.txt"],
"pip._vendor.certifi": ["*.pem"],
"pip._vendor.requests": ["*.pem"],
"pip._vendor.distlib._backport": ["sysconfig.cfg"],
"pip._vendor.distlib": ["t32.exe", "t64.exe", "w32.exe", "w64.exe"],
},
entry_points={
"console_scripts": [
"pip=pip._internal.cli.main:main",
"pip{}=pip._internal.cli.main:main".format(sys.version_info[0]),
"pip{}.{}=pip._internal.cli.main:main".format(
*sys.version_info[:2]
),
],
},
zip_safe=False,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
)
| true | true |
f731b631d25327869866e36afed8e0d2ed3b20d4 | 184 | py | Python | domain/message.py | wazatoki/IotLoggerClientSample | e69f0f5f4caa3714041f4d5e8a16cde03bbbd912 | [
"MIT"
] | null | null | null | domain/message.py | wazatoki/IotLoggerClientSample | e69f0f5f4caa3714041f4d5e8a16cde03bbbd912 | [
"MIT"
] | 2 | 2021-04-06T18:18:36.000Z | 2021-06-02T02:26:19.000Z | domain/message.py | wazatoki/IotLoggerClientSample | e69f0f5f4caa3714041f4d5e8a16cde03bbbd912 | [
"MIT"
] | null | null | null | class message_data:
device_id = ""
message = ""
def get_Data(self):
return {
"deviceID": self.device_id,
"message": self.message
} | 18.4 | 39 | 0.505435 | class message_data:
device_id = ""
message = ""
def get_Data(self):
return {
"deviceID": self.device_id,
"message": self.message
} | true | true |
f731b70415e7f66bf51834ee97e333db6e5dc6c6 | 52,857 | py | Python | Lib/test/test_array.py | chexca/cpython | cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa | [
"CNRI-Python-GPL-Compatible"
] | 1,318 | 2019-07-11T10:34:39.000Z | 2022-03-29T15:05:19.000Z | Lib/test/test_array.py | chexca/cpython | cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa | [
"CNRI-Python-GPL-Compatible"
] | 387 | 2020-12-15T14:54:04.000Z | 2022-03-31T07:00:21.000Z | Lib/test/test_array.py | chexca/cpython | cfc6ce4d40f2f01314b7e283fb972a7bb3ed3faa | [
"CNRI-Python-GPL-Compatible"
] | 66 | 2019-11-11T15:33:12.000Z | 2022-03-01T07:55:55.000Z | """Test the arraymodule.
Roger E. Masse
"""
import unittest
from test import support
from test.support import _2G
import weakref
import pickle
import operator
import struct
import sys
import array
from array import _array_reconstructor as array_reconstructor
sizeof_wchar = array.array('u').itemsize
class ArraySubclass(array.array):
pass
class ArraySubclassWithKwargs(array.array):
def __init__(self, typecode, newarg=None):
array.array.__init__(self)
typecodes = 'ubBhHiIlLfdqQ'
class MiscTest(unittest.TestCase):
def test_bad_constructor(self):
self.assertRaises(TypeError, array.array)
self.assertRaises(TypeError, array.array, spam=42)
self.assertRaises(TypeError, array.array, 'xx')
self.assertRaises(ValueError, array.array, 'x')
def test_empty(self):
# Exercise code for handling zero-length arrays
a = array.array('B')
a[:] = a
self.assertEqual(len(a), 0)
self.assertEqual(len(a + a), 0)
self.assertEqual(len(a * 3), 0)
a += a
self.assertEqual(len(a), 0)
# Machine format codes.
#
# Search for "enum machine_format_code" in Modules/arraymodule.c to get the
# authoritative values.
UNKNOWN_FORMAT = -1
UNSIGNED_INT8 = 0
SIGNED_INT8 = 1
UNSIGNED_INT16_LE = 2
UNSIGNED_INT16_BE = 3
SIGNED_INT16_LE = 4
SIGNED_INT16_BE = 5
UNSIGNED_INT32_LE = 6
UNSIGNED_INT32_BE = 7
SIGNED_INT32_LE = 8
SIGNED_INT32_BE = 9
UNSIGNED_INT64_LE = 10
UNSIGNED_INT64_BE = 11
SIGNED_INT64_LE = 12
SIGNED_INT64_BE = 13
IEEE_754_FLOAT_LE = 14
IEEE_754_FLOAT_BE = 15
IEEE_754_DOUBLE_LE = 16
IEEE_754_DOUBLE_BE = 17
UTF16_LE = 18
UTF16_BE = 19
UTF32_LE = 20
UTF32_BE = 21
class ArrayReconstructorTest(unittest.TestCase):
def test_error(self):
self.assertRaises(TypeError, array_reconstructor,
"", "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
str, "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", '', b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", 0, "")
self.assertRaises(ValueError, array_reconstructor,
array.array, "?", 0, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", UNKNOWN_FORMAT, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", 22, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "d", 16, b"a")
def test_numbers(self):
testcases = (
(['B', 'H', 'I', 'L'], UNSIGNED_INT8, '=BBBB',
[0x80, 0x7f, 0, 0xff]),
(['b', 'h', 'i', 'l'], SIGNED_INT8, '=bbb',
[-0x80, 0x7f, 0]),
(['H', 'I', 'L'], UNSIGNED_INT16_LE, '<HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['H', 'I', 'L'], UNSIGNED_INT16_BE, '>HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['h', 'i', 'l'], SIGNED_INT16_LE, '<hhh',
[-0x8000, 0x7fff, 0]),
(['h', 'i', 'l'], SIGNED_INT16_BE, '>hhh',
[-0x8000, 0x7fff, 0]),
(['I', 'L'], UNSIGNED_INT32_LE, '<IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['I', 'L'], UNSIGNED_INT32_BE, '>IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['i', 'l'], SIGNED_INT32_LE, '<iii',
[-1<<31, (1<<31)-1, 0]),
(['i', 'l'], SIGNED_INT32_BE, '>iii',
[-1<<31, (1<<31)-1, 0]),
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<31, (1<<31)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<31, (1<<31)-1, 0]),
# The following tests for INT64 will raise an OverflowError
# when run on a 32-bit machine. The tests are simply skipped
# in that case.
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<63, (1<<63)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<63, (1<<63)-1, 0]),
(['f'], IEEE_754_FLOAT_LE, '<ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['f'], IEEE_754_FLOAT_BE, '>ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_LE, '<dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_BE, '>dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0])
)
for testcase in testcases:
valid_typecodes, mformat_code, struct_fmt, values = testcase
arraystr = struct.pack(struct_fmt, *values)
for typecode in valid_typecodes:
try:
a = array.array(typecode, values)
except OverflowError:
continue # Skip this test case.
b = array_reconstructor(
array.array, typecode, mformat_code, arraystr)
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
def test_unicode(self):
teststr = "Bonne Journ\xe9e \U0002030a\U00020347"
testcases = (
(UTF16_LE, "UTF-16-LE"),
(UTF16_BE, "UTF-16-BE"),
(UTF32_LE, "UTF-32-LE"),
(UTF32_BE, "UTF-32-BE")
)
for testcase in testcases:
mformat_code, encoding = testcase
a = array.array('u', teststr)
b = array_reconstructor(
array.array, 'u', mformat_code, teststr.encode(encoding))
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
class BaseTest:
# Required class attributes (provided by subclasses
# typecode: the typecode to test
# example: an initializer usable in the constructor for this type
# smallerexample: the same length as example, but smaller
# biggerexample: the same length as example, but bigger
# outside: An entry that is not in example
# minitemsize: the minimum guaranteed itemsize
def assertEntryEqual(self, entry1, entry2):
self.assertEqual(entry1, entry2)
def badtypecode(self):
# Return a typecode that is different from our own
return typecodes[(typecodes.index(self.typecode)+1) % len(typecodes)]
def test_constructor(self):
a = array.array(self.typecode)
self.assertEqual(a.typecode, self.typecode)
self.assertGreaterEqual(a.itemsize, self.minitemsize)
self.assertRaises(TypeError, array.array, self.typecode, None)
def test_len(self):
a = array.array(self.typecode)
a.append(self.example[0])
self.assertEqual(len(a), 1)
a = array.array(self.typecode, self.example)
self.assertEqual(len(a), len(self.example))
def test_buffer_info(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.buffer_info, 42)
bi = a.buffer_info()
self.assertIsInstance(bi, tuple)
self.assertEqual(len(bi), 2)
self.assertIsInstance(bi[0], int)
self.assertIsInstance(bi[1], int)
self.assertEqual(bi[1], len(a))
def test_byteswap(self):
if self.typecode == 'u':
example = '\U00100100'
else:
example = self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
b.byteswap()
self.assertEqual(a, b)
def test_copy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.copy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_deepcopy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.deepcopy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_reduce_ex(self):
a = array.array(self.typecode, self.example)
for protocol in range(3):
self.assertIs(a.__reduce_ex__(protocol)[0], array.array)
for protocol in range(3, pickle.HIGHEST_PROTOCOL + 1):
self.assertIs(a.__reduce_ex__(protocol)[0], array_reconstructor)
def test_pickle(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode, self.example)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode, self.example)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_pickle_for_empty_array(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_iterator_pickle(self):
orig = array.array(self.typecode, self.example)
data = list(orig)
data2 = data[::-1]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
# initial iterator
itorig = iter(orig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data + data2)
# running iterator
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data[1:] + data2)
# empty iterator
for i in range(1, len(data)):
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data2)
# exhausted iterator
self.assertRaises(StopIteration, next, itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(list(it), [])
def test_exhausted_iterator(self):
a = array.array(self.typecode, self.example)
self.assertEqual(list(a), list(self.example))
exhit = iter(a)
empit = iter(a)
for x in exhit: # exhaust the iterator
next(empit) # not exhausted
a.append(self.outside)
self.assertEqual(list(exhit), [])
self.assertEqual(list(empit), [self.outside])
self.assertEqual(list(a), list(self.example) + [self.outside])
def test_insert(self):
a = array.array(self.typecode, self.example)
a.insert(0, self.example[0])
self.assertEqual(len(a), 1+len(self.example))
self.assertEqual(a[0], a[1])
self.assertRaises(TypeError, a.insert)
self.assertRaises(TypeError, a.insert, None)
self.assertRaises(TypeError, a.insert, 0, None)
a = array.array(self.typecode, self.example)
a.insert(-1, self.example[0])
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:-1] + self.example[:1] + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a.insert(-1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a.insert(1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[:1])
)
def test_tofromfile(self):
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.tofile)
support.unlink(support.TESTFN)
f = open(support.TESTFN, 'wb')
try:
a.tofile(f)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
self.assertRaises(TypeError, b.fromfile)
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
self.assertRaises(EOFError, b.fromfile, f, len(self.example)+1)
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_fromfile_ioerror(self):
# Issue #5395: Check if fromfile raises a proper OSError
# instead of EOFError.
a = array.array(self.typecode)
f = open(support.TESTFN, 'wb')
try:
self.assertRaises(OSError, a.fromfile, f, len(self.example))
finally:
f.close()
support.unlink(support.TESTFN)
def test_filewrite(self):
a = array.array(self.typecode, 2*self.example)
f = open(support.TESTFN, 'wb')
try:
f.write(a)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
b.fromfile(f, len(self.example))
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_tofromlist(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tolist, 42)
self.assertRaises(TypeError, b.fromlist)
self.assertRaises(TypeError, b.fromlist, 42)
self.assertRaises(TypeError, b.fromlist, [None])
b.fromlist(a.tolist())
self.assertEqual(a, b)
def test_tofrombytes(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tobytes, 42)
self.assertRaises(TypeError, b.frombytes)
self.assertRaises(TypeError, b.frombytes, 42)
b.frombytes(a.tobytes())
c = array.array(self.typecode, bytearray(a.tobytes()))
self.assertEqual(a, b)
self.assertEqual(a, c)
if a.itemsize>1:
self.assertRaises(ValueError, b.frombytes, b"x")
def test_fromarray(self):
a = array.array(self.typecode, self.example)
b = array.array(self.typecode, a)
self.assertEqual(a, b)
def test_repr(self):
a = array.array(self.typecode, 2*self.example)
self.assertEqual(a, eval(repr(a), {"array": array.array}))
a = array.array(self.typecode)
self.assertEqual(repr(a), "array('%s')" % self.typecode)
def test_str(self):
a = array.array(self.typecode, 2*self.example)
str(a)
def test_cmp(self):
a = array.array(self.typecode, self.example)
self.assertIs(a == 42, False)
self.assertIs(a != 42, True)
self.assertIs(a == a, True)
self.assertIs(a != a, False)
self.assertIs(a < a, False)
self.assertIs(a <= a, True)
self.assertIs(a > a, False)
self.assertIs(a >= a, True)
al = array.array(self.typecode, self.smallerexample)
ab = array.array(self.typecode, self.biggerexample)
self.assertIs(a == 2*a, False)
self.assertIs(a != 2*a, True)
self.assertIs(a < 2*a, True)
self.assertIs(a <= 2*a, True)
self.assertIs(a > 2*a, False)
self.assertIs(a >= 2*a, False)
self.assertIs(a == al, False)
self.assertIs(a != al, True)
self.assertIs(a < al, False)
self.assertIs(a <= al, False)
self.assertIs(a > al, True)
self.assertIs(a >= al, True)
self.assertIs(a == ab, False)
self.assertIs(a != ab, True)
self.assertIs(a < ab, True)
self.assertIs(a <= ab, True)
self.assertIs(a > ab, False)
self.assertIs(a >= ab, False)
def test_add(self):
a = array.array(self.typecode, self.example) \
+ array.array(self.typecode, self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[::-1])
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__add__, "bad")
def test_iadd(self):
a = array.array(self.typecode, self.example[::-1])
b = a
a += array.array(self.typecode, 2*self.example)
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1]+2*self.example)
)
a = array.array(self.typecode, self.example)
a += a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__iadd__, "bad")
def test_mul(self):
a = 5*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a = array.array(self.typecode, self.example)*5
self.assertEqual(
a,
array.array(self.typecode, self.example*5)
)
a = 0*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = (-1)*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = 5 * array.array(self.typecode, self.example[:1])
self.assertEqual(
a,
array.array(self.typecode, [a[0]] * 5)
)
self.assertRaises(TypeError, a.__mul__, "bad")
def test_imul(self):
a = array.array(self.typecode, self.example)
b = a
a *= 5
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a *= 0
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= 1000
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= -1
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a = array.array(self.typecode, self.example)
a *= -1
self.assertEqual(a, array.array(self.typecode))
self.assertRaises(TypeError, a.__imul__, "bad")
def test_getitem(self):
a = array.array(self.typecode, self.example)
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[len(self.example)-1], self.example[-1])
self.assertEntryEqual(a[-len(self.example)], self.example[0])
self.assertRaises(TypeError, a.__getitem__)
self.assertRaises(IndexError, a.__getitem__, len(self.example))
self.assertRaises(IndexError, a.__getitem__, -len(self.example)-1)
def test_setitem(self):
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[len(self.example)-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-len(self.example)] = a[-1]
self.assertEntryEqual(a[0], a[-1])
self.assertRaises(TypeError, a.__setitem__)
self.assertRaises(TypeError, a.__setitem__, None)
self.assertRaises(TypeError, a.__setitem__, 0, None)
self.assertRaises(
IndexError,
a.__setitem__,
len(self.example), self.example[0]
)
self.assertRaises(
IndexError,
a.__setitem__,
-len(self.example)-1, self.example[0]
)
def test_delitem(self):
a = array.array(self.typecode, self.example)
del a[0]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
a = array.array(self.typecode, self.example)
del a[-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[len(self.example)-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[-len(self.example)]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
self.assertRaises(TypeError, a.__delitem__)
self.assertRaises(TypeError, a.__delitem__, None)
self.assertRaises(IndexError, a.__delitem__, len(self.example))
self.assertRaises(IndexError, a.__delitem__, -len(self.example)-1)
def test_getslice(self):
a = array.array(self.typecode, self.example)
self.assertEqual(a[:], a)
self.assertEqual(
a[1:],
array.array(self.typecode, self.example[1:])
)
self.assertEqual(
a[:1],
array.array(self.typecode, self.example[:1])
)
self.assertEqual(
a[:-1],
array.array(self.typecode, self.example[:-1])
)
self.assertEqual(
a[-1:],
array.array(self.typecode, self.example[-1:])
)
self.assertEqual(
a[-1:-1],
array.array(self.typecode)
)
self.assertEqual(
a[2:1],
array.array(self.typecode)
)
self.assertEqual(
a[1000:],
array.array(self.typecode)
)
self.assertEqual(a[-1000:], a)
self.assertEqual(a[:1000], a)
self.assertEqual(
a[:-1000],
array.array(self.typecode)
)
self.assertEqual(a[-1000:1000], a)
self.assertEqual(
a[2000:1000],
array.array(self.typecode)
)
def test_extended_getslice(self):
# Test extended slicing by comparing with list slicing
# (Assumes list conversion works correctly, too)
a = array.array(self.typecode, self.example)
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
# Everything except the initial 0 (invalid step)
for step in indices[1:]:
self.assertEqual(list(a[start:stop:step]),
list(a)[start:stop:step])
def test_setslice(self):
a = array.array(self.typecode, self.example)
a[:1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[:-1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[-1:])
)
a = array.array(self.typecode, self.example)
a[-1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:-1] = a
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:1] + self.example + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a[1000:] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[-1000:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:1000] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:-1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[1:0] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[2000:1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), None)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), None)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), b)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), b)
def test_extended_set_del_slice(self):
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
# Everything except the initial 0 (invalid step)
for step in indices[1:]:
a = array.array(self.typecode, self.example)
L = list(a)
# Make sure we have a slice of exactly the right length,
# but with (hopefully) different data.
data = L[start:stop:step]
data.reverse()
L[start:stop:step] = data
a[start:stop:step] = array.array(self.typecode, data)
self.assertEqual(a, array.array(self.typecode, L))
del L[start:stop:step]
del a[start:stop:step]
self.assertEqual(a, array.array(self.typecode, L))
def test_index(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.index)
for x in example:
self.assertEqual(a.index(x), example.index(x))
self.assertRaises(ValueError, a.index, None)
self.assertRaises(ValueError, a.index, self.outside)
def test_count(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.count)
for x in example:
self.assertEqual(a.count(x), example.count(x))
self.assertEqual(a.count(self.outside), 0)
self.assertEqual(a.count(None), 0)
def test_remove(self):
for x in self.example:
example = 2*self.example
a = array.array(self.typecode, example)
pos = example.index(x)
example2 = example[:pos] + example[pos+1:]
a.remove(x)
self.assertEqual(a, array.array(self.typecode, example2))
a = array.array(self.typecode, self.example)
self.assertRaises(ValueError, a.remove, self.outside)
self.assertRaises(ValueError, a.remove, None)
def test_pop(self):
a = array.array(self.typecode)
self.assertRaises(IndexError, a.pop)
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.pop, 42, 42)
self.assertRaises(TypeError, a.pop, None)
self.assertRaises(IndexError, a.pop, len(a))
self.assertRaises(IndexError, a.pop, -len(a)-1)
self.assertEntryEqual(a.pop(0), self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:]+self.example)
)
self.assertEntryEqual(a.pop(1), self.example[2])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:2]+self.example[3:]+self.example)
)
self.assertEntryEqual(a.pop(0), self.example[1])
self.assertEntryEqual(a.pop(), self.example[-1])
self.assertEqual(
a,
array.array(self.typecode, self.example[3:]+self.example[:-1])
)
def test_reverse(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.reverse, 42)
a.reverse()
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1])
)
def test_extend(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.extend)
a.extend(array.array(self.typecode, self.example[::-1]))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
a = array.array(self.typecode, self.example)
a.extend(a)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.extend, b)
a = array.array(self.typecode, self.example)
a.extend(self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
def test_constructor_with_iterable_argument(self):
a = array.array(self.typecode, iter(self.example))
b = array.array(self.typecode, self.example)
self.assertEqual(a, b)
# non-iterable argument
self.assertRaises(TypeError, array.array, self.typecode, 10)
# pass through errors raised in __iter__
class A:
def __iter__(self):
raise UnicodeError
self.assertRaises(UnicodeError, array.array, self.typecode, A())
# pass through errors raised in next()
def B():
raise UnicodeError
yield None
self.assertRaises(UnicodeError, array.array, self.typecode, B())
def test_coveritertraverse(self):
try:
import gc
except ImportError:
self.skipTest('gc module not available')
a = array.array(self.typecode)
l = [iter(a)]
l.append(l)
gc.collect()
def test_buffer(self):
a = array.array(self.typecode, self.example)
m = memoryview(a)
expected = m.tobytes()
self.assertEqual(a.tobytes(), expected)
self.assertEqual(a.tobytes()[0], expected[0])
# Resizing is forbidden when there are buffer exports.
# For issue 4509, we also check after each error that
# the array was not modified.
self.assertRaises(BufferError, a.append, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.extend, a[0:1])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.remove, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.pop, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.fromlist, a.tolist())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.frombytes, a.tobytes())
self.assertEqual(m.tobytes(), expected)
if self.typecode == 'u':
self.assertRaises(BufferError, a.fromunicode, a.tounicode())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 2)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.setitem, a, slice(0, 0), a)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, slice(0, 1))
self.assertEqual(m.tobytes(), expected)
def test_weakref(self):
s = array.array(self.typecode, self.example)
p = weakref.proxy(s)
self.assertEqual(p.tobytes(), s.tobytes())
s = None
self.assertRaises(ReferenceError, len, p)
@unittest.skipUnless(hasattr(sys, 'getrefcount'),
'test needs sys.getrefcount()')
def test_bug_782369(self):
for i in range(10):
b = array.array('B', range(64))
rc = sys.getrefcount(10)
for i in range(10):
b = array.array('B', range(64))
self.assertEqual(rc, sys.getrefcount(10))
def test_subclass_with_kwargs(self):
# SF bug #1486663 -- this used to erroneously raise a TypeError
ArraySubclassWithKwargs('b', newarg=1)
def test_create_from_bytes(self):
# XXX This test probably needs to be moved in a subclass or
# generalized to use self.typecode.
a = array.array('H', b"1234")
self.assertEqual(len(a) * a.itemsize, 4)
@support.cpython_only
def test_sizeof_with_buffer(self):
a = array.array(self.typecode, self.example)
basesize = support.calcvobjsize('Pn2Pi')
buffer_size = a.buffer_info()[1] * a.itemsize
support.check_sizeof(self, a, basesize + buffer_size)
@support.cpython_only
def test_sizeof_without_buffer(self):
a = array.array(self.typecode)
basesize = support.calcvobjsize('Pn2Pi')
support.check_sizeof(self, a, basesize)
def test_initialize_with_unicode(self):
if self.typecode != 'u':
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, 'foo')
self.assertIn("cannot use a str", str(cm.exception))
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, array.array('u', 'foo'))
self.assertIn("cannot use a unicode array", str(cm.exception))
else:
a = array.array(self.typecode, "foo")
a = array.array(self.typecode, array.array('u', 'foo'))
@support.cpython_only
def test_obsolete_write_lock(self):
from _testcapi import getbuffer_with_null_view
a = array.array('B', b"")
self.assertRaises(BufferError, getbuffer_with_null_view, a)
def test_free_after_iterating(self):
support.check_free_after_iterating(self, iter, array.array,
(self.typecode,))
support.check_free_after_iterating(self, reversed, array.array,
(self.typecode,))
class StringTest(BaseTest):
def test_setitem(self):
super().test_setitem()
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, 0, self.example[:2])
class UnicodeTest(StringTest, unittest.TestCase):
typecode = 'u'
example = '\x01\u263a\x00\ufeff'
smallerexample = '\x01\u263a\x00\ufefe'
biggerexample = '\x01\u263a\x01\ufeff'
outside = str('\x33')
minitemsize = 2
def test_unicode(self):
self.assertRaises(TypeError, array.array, 'b', 'foo')
a = array.array('u', '\xa0\xc2\u1234')
a.fromunicode(' ')
a.fromunicode('')
a.fromunicode('')
a.fromunicode('\x11abc\xff\u1234')
s = a.tounicode()
self.assertEqual(s, '\xa0\xc2\u1234 \x11abc\xff\u1234')
self.assertEqual(a.itemsize, sizeof_wchar)
s = '\x00="\'a\\b\x80\xff\u0000\u0001\u1234'
a = array.array('u', s)
self.assertEqual(
repr(a),
"array('u', '\\x00=\"\\'a\\\\b\\x80\xff\\x00\\x01\u1234')")
self.assertRaises(TypeError, a.fromunicode)
def test_issue17223(self):
# this used to crash
if sizeof_wchar == 4:
# U+FFFFFFFF is an invalid code point in Unicode 6.0
invalid_str = b'\xff\xff\xff\xff'
else:
# PyUnicode_FromUnicode() cannot fail with 16-bit wchar_t
self.skipTest("specific to 32-bit wchar_t")
a = array.array('u', invalid_str)
self.assertRaises(ValueError, a.tounicode)
self.assertRaises(ValueError, str, a)
class NumberTest(BaseTest):
def test_extslice(self):
a = array.array(self.typecode, range(5))
self.assertEqual(a[::], a)
self.assertEqual(a[::2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1::2], array.array(self.typecode, [1,3]))
self.assertEqual(a[::-1], array.array(self.typecode, [4,3,2,1,0]))
self.assertEqual(a[::-2], array.array(self.typecode, [4,2,0]))
self.assertEqual(a[3::-2], array.array(self.typecode, [3,1]))
self.assertEqual(a[-100:100:], a)
self.assertEqual(a[100:-100:-1], a[::-1])
self.assertEqual(a[-100:100:2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1000:2000:2], array.array(self.typecode, []))
self.assertEqual(a[-1000:-2000:-2], array.array(self.typecode, []))
def test_delslice(self):
a = array.array(self.typecode, range(5))
del a[::2]
self.assertEqual(a, array.array(self.typecode, [1,3]))
a = array.array(self.typecode, range(5))
del a[1::2]
self.assertEqual(a, array.array(self.typecode, [0,2,4]))
a = array.array(self.typecode, range(5))
del a[1::-2]
self.assertEqual(a, array.array(self.typecode, [0,2,3,4]))
a = array.array(self.typecode, range(10))
del a[::1000]
self.assertEqual(a, array.array(self.typecode, [1,2,3,4,5,6,7,8,9]))
# test issue7788
a = array.array(self.typecode, range(10))
del a[9::1<<333]
def test_assignment(self):
a = array.array(self.typecode, range(10))
a[::2] = array.array(self.typecode, [42]*5)
self.assertEqual(a, array.array(self.typecode, [42, 1, 42, 3, 42, 5, 42, 7, 42, 9]))
a = array.array(self.typecode, range(10))
a[::-4] = array.array(self.typecode, [10]*3)
self.assertEqual(a, array.array(self.typecode, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10]))
a = array.array(self.typecode, range(4))
a[::-1] = a
self.assertEqual(a, array.array(self.typecode, [3, 2, 1, 0]))
a = array.array(self.typecode, range(10))
b = a[:]
c = a[:]
ins = array.array(self.typecode, range(2))
a[2:3] = ins
b[slice(2,3)] = ins
c[2:3:] = ins
def test_iterationcontains(self):
a = array.array(self.typecode, range(10))
self.assertEqual(list(a), list(range(10)))
b = array.array(self.typecode, [20])
self.assertEqual(a[-1] in a, True)
self.assertEqual(b[0] not in a, True)
def check_overflow(self, lower, upper):
# method to be used by subclasses
# should not overflow assigning lower limit
a = array.array(self.typecode, [lower])
a[0] = lower
# should overflow assigning less than lower limit
self.assertRaises(OverflowError, array.array, self.typecode, [lower-1])
self.assertRaises(OverflowError, a.__setitem__, 0, lower-1)
# should not overflow assigning upper limit
a = array.array(self.typecode, [upper])
a[0] = upper
# should overflow assigning more than upper limit
self.assertRaises(OverflowError, array.array, self.typecode, [upper+1])
self.assertRaises(OverflowError, a.__setitem__, 0, upper+1)
def test_subclassing(self):
typecode = self.typecode
class ExaggeratingArray(array.array):
__slots__ = ['offset']
def __new__(cls, typecode, data, offset):
return array.array.__new__(cls, typecode, data)
def __init__(self, typecode, data, offset):
self.offset = offset
def __getitem__(self, i):
return array.array.__getitem__(self, i) + self.offset
a = ExaggeratingArray(self.typecode, [3, 6, 7, 11], 4)
self.assertEntryEqual(a[0], 7)
self.assertRaises(AttributeError, setattr, a, "color", "blue")
def test_frombytearray(self):
a = array.array('b', range(10))
b = array.array(self.typecode, a)
self.assertEqual(a, b)
class IntegerNumberTest(NumberTest):
def test_type_error(self):
a = array.array(self.typecode)
a.append(42)
with self.assertRaises(TypeError):
a.append(42.0)
with self.assertRaises(TypeError):
a[0] = 42.0
class Intable:
def __init__(self, num):
self._num = num
def __index__(self):
return self._num
def __int__(self):
return self._num
def __sub__(self, other):
return Intable(int(self) - int(other))
def __add__(self, other):
return Intable(int(self) + int(other))
class SignedNumberTest(IntegerNumberTest):
example = [-1, 0, 1, 42, 0x7f]
smallerexample = [-1, 0, 1, 42, 0x7e]
biggerexample = [-1, 0, 1, 43, 0x7f]
outside = 23
def test_overflow(self):
a = array.array(self.typecode)
lower = -1 * int(pow(2, a.itemsize * 8 - 1))
upper = int(pow(2, a.itemsize * 8 - 1)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
class UnsignedNumberTest(IntegerNumberTest):
example = [0, 1, 17, 23, 42, 0xff]
smallerexample = [0, 1, 17, 23, 42, 0xfe]
biggerexample = [0, 1, 17, 23, 43, 0xff]
outside = 0xaa
def test_overflow(self):
a = array.array(self.typecode)
lower = 0
upper = int(pow(2, a.itemsize * 8)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
def test_bytes_extend(self):
s = bytes(self.example)
a = array.array(self.typecode, self.example)
a.extend(s)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
a = array.array(self.typecode, self.example)
a.extend(bytearray(reversed(s)))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
class ByteTest(SignedNumberTest, unittest.TestCase):
typecode = 'b'
minitemsize = 1
class UnsignedByteTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'B'
minitemsize = 1
class ShortTest(SignedNumberTest, unittest.TestCase):
typecode = 'h'
minitemsize = 2
class UnsignedShortTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'H'
minitemsize = 2
class IntTest(SignedNumberTest, unittest.TestCase):
typecode = 'i'
minitemsize = 2
class UnsignedIntTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'I'
minitemsize = 2
class LongTest(SignedNumberTest, unittest.TestCase):
typecode = 'l'
minitemsize = 4
class UnsignedLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'L'
minitemsize = 4
class LongLongTest(SignedNumberTest, unittest.TestCase):
typecode = 'q'
minitemsize = 8
class UnsignedLongLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'Q'
minitemsize = 8
class FPTest(NumberTest):
example = [-42.0, 0, 42, 1e5, -1e10]
smallerexample = [-42.0, 0, 42, 1e5, -2e10]
biggerexample = [-42.0, 0, 42, 1e5, 1e10]
outside = 23
def assertEntryEqual(self, entry1, entry2):
self.assertAlmostEqual(entry1, entry2)
def test_nan(self):
a = array.array(self.typecode, [float('nan')])
b = array.array(self.typecode, [float('nan')])
self.assertIs(a != b, True)
self.assertIs(a == b, False)
self.assertIs(a > b, False)
self.assertIs(a >= b, False)
self.assertIs(a < b, False)
self.assertIs(a <= b, False)
def test_byteswap(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, self.example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
# On alphas treating the byte swapped bit patters as
# floats/doubles results in floating point exceptions
# => compare the 8bit string values instead
self.assertNotEqual(a.tobytes(), b.tobytes())
b.byteswap()
self.assertEqual(a, b)
class FloatTest(FPTest, unittest.TestCase):
typecode = 'f'
minitemsize = 4
class DoubleTest(FPTest, unittest.TestCase):
typecode = 'd'
minitemsize = 8
def test_alloc_overflow(self):
from sys import maxsize
a = array.array('d', [-1]*65536)
try:
a *= maxsize//65536 + 1
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
b = array.array('d', [ 2.71828183, 3.14159265, -1])
try:
b * (maxsize//3 + 1)
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
class LargeArrayTest(unittest.TestCase):
typecode = 'b'
def example(self, size):
# We assess a base memuse of <=2.125 for constructing this array
base = array.array(self.typecode, [0, 1, 2, 3, 4, 5, 6, 7]) * (size // 8)
base += array.array(self.typecode, [99]*(size % 8) + [8, 9, 10, 11])
return base
@support.bigmemtest(_2G, memuse=2.125)
def test_example_data(self, size):
example = self.example(size)
self.assertEqual(len(example), size+4)
@support.bigmemtest(_2G, memuse=2.125)
def test_access(self, size):
example = self.example(size)
self.assertEqual(example[0], 0)
self.assertEqual(example[-(size+4)], 0)
self.assertEqual(example[size], 8)
self.assertEqual(example[-4], 8)
self.assertEqual(example[size+3], 11)
self.assertEqual(example[-1], 11)
@support.bigmemtest(_2G, memuse=2.125+1)
def test_slice(self, size):
example = self.example(size)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
part = example[1:-1]
self.assertEqual(len(part), size+2)
self.assertEqual(part[0], 1)
self.assertEqual(part[-1], 10)
del part
part = example[::2]
self.assertEqual(len(part), (size+5)//2)
self.assertEqual(list(part[:4]), [0, 2, 4, 6])
if size % 2:
self.assertEqual(list(part[-2:]), [9, 11])
else:
self.assertEqual(list(part[-2:]), [8, 10])
@support.bigmemtest(_2G, memuse=2.125)
def test_count(self, size):
example = self.example(size)
self.assertEqual(example.count(0), size//8)
self.assertEqual(example.count(11), 1)
@support.bigmemtest(_2G, memuse=2.125)
def test_append(self, size):
example = self.example(size)
example.append(12)
self.assertEqual(example[-1], 12)
@support.bigmemtest(_2G, memuse=2.125)
def test_extend(self, size):
example = self.example(size)
example.extend(iter([12, 13, 14, 15]))
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_frombytes(self, size):
example = self.example(size)
example.frombytes(b'abcd')
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11] + list(b'abcd'))
@support.bigmemtest(_2G, memuse=2.125)
def test_fromlist(self, size):
example = self.example(size)
example.fromlist([12, 13, 14, 15])
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_index(self, size):
example = self.example(size)
self.assertEqual(example.index(0), 0)
self.assertEqual(example.index(1), 1)
self.assertEqual(example.index(7), 7)
self.assertEqual(example.index(11), size+3)
@support.bigmemtest(_2G, memuse=2.125)
def test_insert(self, size):
example = self.example(size)
example.insert(0, 12)
example.insert(10, 13)
example.insert(size+1, 14)
self.assertEqual(len(example), size+7)
self.assertEqual(example[0], 12)
self.assertEqual(example[10], 13)
self.assertEqual(example[size+1], 14)
@support.bigmemtest(_2G, memuse=2.125)
def test_pop(self, size):
example = self.example(size)
self.assertEqual(example.pop(0), 0)
self.assertEqual(example[0], 1)
self.assertEqual(example.pop(size+1), 10)
self.assertEqual(example[size+1], 11)
self.assertEqual(example.pop(1), 2)
self.assertEqual(example[1], 3)
self.assertEqual(len(example), size+1)
self.assertEqual(example.pop(), 11)
self.assertEqual(len(example), size)
@support.bigmemtest(_2G, memuse=2.125)
def test_remove(self, size):
example = self.example(size)
example.remove(0)
self.assertEqual(len(example), size+3)
self.assertEqual(example[0], 1)
example.remove(10)
self.assertEqual(len(example), size+2)
self.assertEqual(example[size], 9)
self.assertEqual(example[size+1], 11)
@support.bigmemtest(_2G, memuse=2.125)
def test_reverse(self, size):
example = self.example(size)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(example[0], 11)
self.assertEqual(example[3], 8)
self.assertEqual(example[-1], 0)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
# list takes about 9 bytes per element
@support.bigmemtest(_2G, memuse=2.125+9)
def test_tolist(self, size):
example = self.example(size)
ls = example.tolist()
self.assertEqual(len(ls), len(example))
self.assertEqual(ls[:8], list(example[:8]))
self.assertEqual(ls[-8:], list(example[-8:]))
if __name__ == "__main__":
unittest.main()
| 34.751479 | 92 | 0.572999 |
import unittest
from test import support
from test.support import _2G
import weakref
import pickle
import operator
import struct
import sys
import array
from array import _array_reconstructor as array_reconstructor
sizeof_wchar = array.array('u').itemsize
class ArraySubclass(array.array):
pass
class ArraySubclassWithKwargs(array.array):
def __init__(self, typecode, newarg=None):
array.array.__init__(self)
typecodes = 'ubBhHiIlLfdqQ'
class MiscTest(unittest.TestCase):
def test_bad_constructor(self):
self.assertRaises(TypeError, array.array)
self.assertRaises(TypeError, array.array, spam=42)
self.assertRaises(TypeError, array.array, 'xx')
self.assertRaises(ValueError, array.array, 'x')
def test_empty(self):
a = array.array('B')
a[:] = a
self.assertEqual(len(a), 0)
self.assertEqual(len(a + a), 0)
self.assertEqual(len(a * 3), 0)
a += a
self.assertEqual(len(a), 0)
UNKNOWN_FORMAT = -1
UNSIGNED_INT8 = 0
SIGNED_INT8 = 1
UNSIGNED_INT16_LE = 2
UNSIGNED_INT16_BE = 3
SIGNED_INT16_LE = 4
SIGNED_INT16_BE = 5
UNSIGNED_INT32_LE = 6
UNSIGNED_INT32_BE = 7
SIGNED_INT32_LE = 8
SIGNED_INT32_BE = 9
UNSIGNED_INT64_LE = 10
UNSIGNED_INT64_BE = 11
SIGNED_INT64_LE = 12
SIGNED_INT64_BE = 13
IEEE_754_FLOAT_LE = 14
IEEE_754_FLOAT_BE = 15
IEEE_754_DOUBLE_LE = 16
IEEE_754_DOUBLE_BE = 17
UTF16_LE = 18
UTF16_BE = 19
UTF32_LE = 20
UTF32_BE = 21
class ArrayReconstructorTest(unittest.TestCase):
def test_error(self):
self.assertRaises(TypeError, array_reconstructor,
"", "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
str, "b", 0, b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", '', b"")
self.assertRaises(TypeError, array_reconstructor,
array.array, "b", 0, "")
self.assertRaises(ValueError, array_reconstructor,
array.array, "?", 0, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", UNKNOWN_FORMAT, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "b", 22, b"")
self.assertRaises(ValueError, array_reconstructor,
array.array, "d", 16, b"a")
def test_numbers(self):
testcases = (
(['B', 'H', 'I', 'L'], UNSIGNED_INT8, '=BBBB',
[0x80, 0x7f, 0, 0xff]),
(['b', 'h', 'i', 'l'], SIGNED_INT8, '=bbb',
[-0x80, 0x7f, 0]),
(['H', 'I', 'L'], UNSIGNED_INT16_LE, '<HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['H', 'I', 'L'], UNSIGNED_INT16_BE, '>HHHH',
[0x8000, 0x7fff, 0, 0xffff]),
(['h', 'i', 'l'], SIGNED_INT16_LE, '<hhh',
[-0x8000, 0x7fff, 0]),
(['h', 'i', 'l'], SIGNED_INT16_BE, '>hhh',
[-0x8000, 0x7fff, 0]),
(['I', 'L'], UNSIGNED_INT32_LE, '<IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['I', 'L'], UNSIGNED_INT32_BE, '>IIII',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['i', 'l'], SIGNED_INT32_LE, '<iii',
[-1<<31, (1<<31)-1, 0]),
(['i', 'l'], SIGNED_INT32_BE, '>iii',
[-1<<31, (1<<31)-1, 0]),
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<31, (1<<31)-1, 0, (1<<32)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<31, (1<<31)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<31, (1<<31)-1, 0]),
(['L'], UNSIGNED_INT64_LE, '<QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['L'], UNSIGNED_INT64_BE, '>QQQQ',
[1<<63, (1<<63)-1, 0, (1<<64)-1]),
(['l'], SIGNED_INT64_LE, '<qqq',
[-1<<63, (1<<63)-1, 0]),
(['l'], SIGNED_INT64_BE, '>qqq',
[-1<<63, (1<<63)-1, 0]),
(['f'], IEEE_754_FLOAT_LE, '<ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['f'], IEEE_754_FLOAT_BE, '>ffff',
[16711938.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_LE, '<dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0]),
(['d'], IEEE_754_DOUBLE_BE, '>dddd',
[9006104071832581.0, float('inf'), float('-inf'), -0.0])
)
for testcase in testcases:
valid_typecodes, mformat_code, struct_fmt, values = testcase
arraystr = struct.pack(struct_fmt, *values)
for typecode in valid_typecodes:
try:
a = array.array(typecode, values)
except OverflowError:
continue
b = array_reconstructor(
array.array, typecode, mformat_code, arraystr)
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
def test_unicode(self):
teststr = "Bonne Journ\xe9e \U0002030a\U00020347"
testcases = (
(UTF16_LE, "UTF-16-LE"),
(UTF16_BE, "UTF-16-BE"),
(UTF32_LE, "UTF-32-LE"),
(UTF32_BE, "UTF-32-BE")
)
for testcase in testcases:
mformat_code, encoding = testcase
a = array.array('u', teststr)
b = array_reconstructor(
array.array, 'u', mformat_code, teststr.encode(encoding))
self.assertEqual(a, b,
msg="{0!r} != {1!r}; testcase={2!r}".format(a, b, testcase))
class BaseTest:
def assertEntryEqual(self, entry1, entry2):
self.assertEqual(entry1, entry2)
def badtypecode(self):
return typecodes[(typecodes.index(self.typecode)+1) % len(typecodes)]
def test_constructor(self):
a = array.array(self.typecode)
self.assertEqual(a.typecode, self.typecode)
self.assertGreaterEqual(a.itemsize, self.minitemsize)
self.assertRaises(TypeError, array.array, self.typecode, None)
def test_len(self):
a = array.array(self.typecode)
a.append(self.example[0])
self.assertEqual(len(a), 1)
a = array.array(self.typecode, self.example)
self.assertEqual(len(a), len(self.example))
def test_buffer_info(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.buffer_info, 42)
bi = a.buffer_info()
self.assertIsInstance(bi, tuple)
self.assertEqual(len(bi), 2)
self.assertIsInstance(bi[0], int)
self.assertIsInstance(bi[1], int)
self.assertEqual(bi[1], len(a))
def test_byteswap(self):
if self.typecode == 'u':
example = '\U00100100'
else:
example = self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
self.assertNotEqual(a, b)
b.byteswap()
self.assertEqual(a, b)
def test_copy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.copy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_deepcopy(self):
import copy
a = array.array(self.typecode, self.example)
b = copy.deepcopy(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
def test_reduce_ex(self):
a = array.array(self.typecode, self.example)
for protocol in range(3):
self.assertIs(a.__reduce_ex__(protocol)[0], array.array)
for protocol in range(3, pickle.HIGHEST_PROTOCOL + 1):
self.assertIs(a.__reduce_ex__(protocol)[0], array_reconstructor)
def test_pickle(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode, self.example)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode, self.example)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_pickle_for_empty_array(self):
for protocol in range(pickle.HIGHEST_PROTOCOL + 1):
a = array.array(self.typecode)
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
a = ArraySubclass(self.typecode)
a.x = 10
b = pickle.loads(pickle.dumps(a, protocol))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(type(a), type(b))
def test_iterator_pickle(self):
orig = array.array(self.typecode, self.example)
data = list(orig)
data2 = data[::-1]
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
itorig = iter(orig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data + data2)
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data[1:] + data2)
for i in range(1, len(data)):
next(itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(type(it), type(itorig))
self.assertEqual(list(it), data2)
self.assertRaises(StopIteration, next, itorig)
d = pickle.dumps((itorig, orig), proto)
it, a = pickle.loads(d)
a.fromlist(data2)
self.assertEqual(list(it), [])
def test_exhausted_iterator(self):
a = array.array(self.typecode, self.example)
self.assertEqual(list(a), list(self.example))
exhit = iter(a)
empit = iter(a)
for x in exhit:
next(empit)
a.append(self.outside)
self.assertEqual(list(exhit), [])
self.assertEqual(list(empit), [self.outside])
self.assertEqual(list(a), list(self.example) + [self.outside])
def test_insert(self):
a = array.array(self.typecode, self.example)
a.insert(0, self.example[0])
self.assertEqual(len(a), 1+len(self.example))
self.assertEqual(a[0], a[1])
self.assertRaises(TypeError, a.insert)
self.assertRaises(TypeError, a.insert, None)
self.assertRaises(TypeError, a.insert, 0, None)
a = array.array(self.typecode, self.example)
a.insert(-1, self.example[0])
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:-1] + self.example[:1] + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a.insert(-1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a.insert(1000, self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[:1])
)
def test_tofromfile(self):
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.tofile)
support.unlink(support.TESTFN)
f = open(support.TESTFN, 'wb')
try:
a.tofile(f)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
self.assertRaises(TypeError, b.fromfile)
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
self.assertRaises(EOFError, b.fromfile, f, len(self.example)+1)
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_fromfile_ioerror(self):
f = open(support.TESTFN, 'wb')
try:
self.assertRaises(OSError, a.fromfile, f, len(self.example))
finally:
f.close()
support.unlink(support.TESTFN)
def test_filewrite(self):
a = array.array(self.typecode, 2*self.example)
f = open(support.TESTFN, 'wb')
try:
f.write(a)
f.close()
b = array.array(self.typecode)
f = open(support.TESTFN, 'rb')
b.fromfile(f, len(self.example))
self.assertEqual(b, array.array(self.typecode, self.example))
self.assertNotEqual(a, b)
b.fromfile(f, len(self.example))
self.assertEqual(a, b)
f.close()
finally:
if not f.closed:
f.close()
support.unlink(support.TESTFN)
def test_tofromlist(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tolist, 42)
self.assertRaises(TypeError, b.fromlist)
self.assertRaises(TypeError, b.fromlist, 42)
self.assertRaises(TypeError, b.fromlist, [None])
b.fromlist(a.tolist())
self.assertEqual(a, b)
def test_tofrombytes(self):
a = array.array(self.typecode, 2*self.example)
b = array.array(self.typecode)
self.assertRaises(TypeError, a.tobytes, 42)
self.assertRaises(TypeError, b.frombytes)
self.assertRaises(TypeError, b.frombytes, 42)
b.frombytes(a.tobytes())
c = array.array(self.typecode, bytearray(a.tobytes()))
self.assertEqual(a, b)
self.assertEqual(a, c)
if a.itemsize>1:
self.assertRaises(ValueError, b.frombytes, b"x")
def test_fromarray(self):
a = array.array(self.typecode, self.example)
b = array.array(self.typecode, a)
self.assertEqual(a, b)
def test_repr(self):
a = array.array(self.typecode, 2*self.example)
self.assertEqual(a, eval(repr(a), {"array": array.array}))
a = array.array(self.typecode)
self.assertEqual(repr(a), "array('%s')" % self.typecode)
def test_str(self):
a = array.array(self.typecode, 2*self.example)
str(a)
def test_cmp(self):
a = array.array(self.typecode, self.example)
self.assertIs(a == 42, False)
self.assertIs(a != 42, True)
self.assertIs(a == a, True)
self.assertIs(a != a, False)
self.assertIs(a < a, False)
self.assertIs(a <= a, True)
self.assertIs(a > a, False)
self.assertIs(a >= a, True)
al = array.array(self.typecode, self.smallerexample)
ab = array.array(self.typecode, self.biggerexample)
self.assertIs(a == 2*a, False)
self.assertIs(a != 2*a, True)
self.assertIs(a < 2*a, True)
self.assertIs(a <= 2*a, True)
self.assertIs(a > 2*a, False)
self.assertIs(a >= 2*a, False)
self.assertIs(a == al, False)
self.assertIs(a != al, True)
self.assertIs(a < al, False)
self.assertIs(a <= al, False)
self.assertIs(a > al, True)
self.assertIs(a >= al, True)
self.assertIs(a == ab, False)
self.assertIs(a != ab, True)
self.assertIs(a < ab, True)
self.assertIs(a <= ab, True)
self.assertIs(a > ab, False)
self.assertIs(a >= ab, False)
def test_add(self):
a = array.array(self.typecode, self.example) \
+ array.array(self.typecode, self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[::-1])
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__add__, "bad")
def test_iadd(self):
a = array.array(self.typecode, self.example[::-1])
b = a
a += array.array(self.typecode, 2*self.example)
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1]+2*self.example)
)
a = array.array(self.typecode, self.example)
a += a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__add__, b)
self.assertRaises(TypeError, a.__iadd__, "bad")
def test_mul(self):
a = 5*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a = array.array(self.typecode, self.example)*5
self.assertEqual(
a,
array.array(self.typecode, self.example*5)
)
a = 0*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = (-1)*array.array(self.typecode, self.example)
self.assertEqual(
a,
array.array(self.typecode)
)
a = 5 * array.array(self.typecode, self.example[:1])
self.assertEqual(
a,
array.array(self.typecode, [a[0]] * 5)
)
self.assertRaises(TypeError, a.__mul__, "bad")
def test_imul(self):
a = array.array(self.typecode, self.example)
b = a
a *= 5
self.assertIs(a, b)
self.assertEqual(
a,
array.array(self.typecode, 5*self.example)
)
a *= 0
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= 1000
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a *= -1
self.assertIs(a, b)
self.assertEqual(a, array.array(self.typecode))
a = array.array(self.typecode, self.example)
a *= -1
self.assertEqual(a, array.array(self.typecode))
self.assertRaises(TypeError, a.__imul__, "bad")
def test_getitem(self):
a = array.array(self.typecode, self.example)
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[0], self.example[0])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[-1], self.example[-1])
self.assertEntryEqual(a[len(self.example)-1], self.example[-1])
self.assertEntryEqual(a[-len(self.example)], self.example[0])
self.assertRaises(TypeError, a.__getitem__)
self.assertRaises(IndexError, a.__getitem__, len(self.example))
self.assertRaises(IndexError, a.__getitem__, -len(self.example)-1)
def test_setitem(self):
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[0] = a[-1]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[len(self.example)-1] = a[0]
self.assertEntryEqual(a[0], a[-1])
a = array.array(self.typecode, self.example)
a[-len(self.example)] = a[-1]
self.assertEntryEqual(a[0], a[-1])
self.assertRaises(TypeError, a.__setitem__)
self.assertRaises(TypeError, a.__setitem__, None)
self.assertRaises(TypeError, a.__setitem__, 0, None)
self.assertRaises(
IndexError,
a.__setitem__,
len(self.example), self.example[0]
)
self.assertRaises(
IndexError,
a.__setitem__,
-len(self.example)-1, self.example[0]
)
def test_delitem(self):
a = array.array(self.typecode, self.example)
del a[0]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
a = array.array(self.typecode, self.example)
del a[-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[len(self.example)-1]
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1])
)
a = array.array(self.typecode, self.example)
del a[-len(self.example)]
self.assertEqual(
a,
array.array(self.typecode, self.example[1:])
)
self.assertRaises(TypeError, a.__delitem__)
self.assertRaises(TypeError, a.__delitem__, None)
self.assertRaises(IndexError, a.__delitem__, len(self.example))
self.assertRaises(IndexError, a.__delitem__, -len(self.example)-1)
def test_getslice(self):
a = array.array(self.typecode, self.example)
self.assertEqual(a[:], a)
self.assertEqual(
a[1:],
array.array(self.typecode, self.example[1:])
)
self.assertEqual(
a[:1],
array.array(self.typecode, self.example[:1])
)
self.assertEqual(
a[:-1],
array.array(self.typecode, self.example[:-1])
)
self.assertEqual(
a[-1:],
array.array(self.typecode, self.example[-1:])
)
self.assertEqual(
a[-1:-1],
array.array(self.typecode)
)
self.assertEqual(
a[2:1],
array.array(self.typecode)
)
self.assertEqual(
a[1000:],
array.array(self.typecode)
)
self.assertEqual(a[-1000:], a)
self.assertEqual(a[:1000], a)
self.assertEqual(
a[:-1000],
array.array(self.typecode)
)
self.assertEqual(a[-1000:1000], a)
self.assertEqual(
a[2000:1000],
array.array(self.typecode)
)
def test_extended_getslice(self):
a = array.array(self.typecode, self.example)
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
for step in indices[1:]:
self.assertEqual(list(a[start:stop:step]),
list(a)[start:stop:step])
def test_setslice(self):
a = array.array(self.typecode, self.example)
a[:1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[:-1] = a
self.assertEqual(
a,
array.array(self.typecode, self.example + self.example[-1:])
)
a = array.array(self.typecode, self.example)
a[-1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:-1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example)
)
a = array.array(self.typecode, self.example)
a[1:-1] = a
self.assertEqual(
a,
array.array(
self.typecode,
self.example[:1] + self.example + self.example[-1:]
)
)
a = array.array(self.typecode, self.example)
a[1000:] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[-1000:] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:1000] = a
self.assertEqual(
a,
array.array(self.typecode, self.example)
)
a = array.array(self.typecode, self.example)
a[:-1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
a[1:0] = a
self.assertEqual(
a,
array.array(self.typecode, self.example[:1] + self.example + self.example[1:])
)
a = array.array(self.typecode, self.example)
a[2000:1000] = a
self.assertEqual(
a,
array.array(self.typecode, 2*self.example)
)
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), None)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), None)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.__setitem__, slice(0, 0), b)
self.assertRaises(TypeError, a.__setitem__, slice(0, 1), b)
def test_extended_set_del_slice(self):
indices = (0, None, 1, 3, 19, 100, sys.maxsize, -1, -2, -31, -100)
for start in indices:
for stop in indices:
for step in indices[1:]:
a = array.array(self.typecode, self.example)
L = list(a)
data = L[start:stop:step]
data.reverse()
L[start:stop:step] = data
a[start:stop:step] = array.array(self.typecode, data)
self.assertEqual(a, array.array(self.typecode, L))
del L[start:stop:step]
del a[start:stop:step]
self.assertEqual(a, array.array(self.typecode, L))
def test_index(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.index)
for x in example:
self.assertEqual(a.index(x), example.index(x))
self.assertRaises(ValueError, a.index, None)
self.assertRaises(ValueError, a.index, self.outside)
def test_count(self):
example = 2*self.example
a = array.array(self.typecode, example)
self.assertRaises(TypeError, a.count)
for x in example:
self.assertEqual(a.count(x), example.count(x))
self.assertEqual(a.count(self.outside), 0)
self.assertEqual(a.count(None), 0)
def test_remove(self):
for x in self.example:
example = 2*self.example
a = array.array(self.typecode, example)
pos = example.index(x)
example2 = example[:pos] + example[pos+1:]
a.remove(x)
self.assertEqual(a, array.array(self.typecode, example2))
a = array.array(self.typecode, self.example)
self.assertRaises(ValueError, a.remove, self.outside)
self.assertRaises(ValueError, a.remove, None)
def test_pop(self):
a = array.array(self.typecode)
self.assertRaises(IndexError, a.pop)
a = array.array(self.typecode, 2*self.example)
self.assertRaises(TypeError, a.pop, 42, 42)
self.assertRaises(TypeError, a.pop, None)
self.assertRaises(IndexError, a.pop, len(a))
self.assertRaises(IndexError, a.pop, -len(a)-1)
self.assertEntryEqual(a.pop(0), self.example[0])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:]+self.example)
)
self.assertEntryEqual(a.pop(1), self.example[2])
self.assertEqual(
a,
array.array(self.typecode, self.example[1:2]+self.example[3:]+self.example)
)
self.assertEntryEqual(a.pop(0), self.example[1])
self.assertEntryEqual(a.pop(), self.example[-1])
self.assertEqual(
a,
array.array(self.typecode, self.example[3:]+self.example[:-1])
)
def test_reverse(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.reverse, 42)
a.reverse()
self.assertEqual(
a,
array.array(self.typecode, self.example[::-1])
)
def test_extend(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.extend)
a.extend(array.array(self.typecode, self.example[::-1]))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
a = array.array(self.typecode, self.example)
a.extend(a)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
b = array.array(self.badtypecode())
self.assertRaises(TypeError, a.extend, b)
a = array.array(self.typecode, self.example)
a.extend(self.example[::-1])
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
def test_constructor_with_iterable_argument(self):
a = array.array(self.typecode, iter(self.example))
b = array.array(self.typecode, self.example)
self.assertEqual(a, b)
self.assertRaises(TypeError, array.array, self.typecode, 10)
class A:
def __iter__(self):
raise UnicodeError
self.assertRaises(UnicodeError, array.array, self.typecode, A())
def B():
raise UnicodeError
yield None
self.assertRaises(UnicodeError, array.array, self.typecode, B())
def test_coveritertraverse(self):
try:
import gc
except ImportError:
self.skipTest('gc module not available')
a = array.array(self.typecode)
l = [iter(a)]
l.append(l)
gc.collect()
def test_buffer(self):
a = array.array(self.typecode, self.example)
m = memoryview(a)
expected = m.tobytes()
self.assertEqual(a.tobytes(), expected)
self.assertEqual(a.tobytes()[0], expected[0])
self.assertRaises(BufferError, a.append, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.extend, a[0:1])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.remove, a[0])
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.pop, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.fromlist, a.tolist())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, a.frombytes, a.tobytes())
self.assertEqual(m.tobytes(), expected)
if self.typecode == 'u':
self.assertRaises(BufferError, a.fromunicode, a.tounicode())
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 2)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.imul, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.setitem, a, slice(0, 0), a)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, 0)
self.assertEqual(m.tobytes(), expected)
self.assertRaises(BufferError, operator.delitem, a, slice(0, 1))
self.assertEqual(m.tobytes(), expected)
def test_weakref(self):
s = array.array(self.typecode, self.example)
p = weakref.proxy(s)
self.assertEqual(p.tobytes(), s.tobytes())
s = None
self.assertRaises(ReferenceError, len, p)
@unittest.skipUnless(hasattr(sys, 'getrefcount'),
'test needs sys.getrefcount()')
def test_bug_782369(self):
for i in range(10):
b = array.array('B', range(64))
rc = sys.getrefcount(10)
for i in range(10):
b = array.array('B', range(64))
self.assertEqual(rc, sys.getrefcount(10))
def test_subclass_with_kwargs(self):
ef test_create_from_bytes(self):
a = array.array('H', b"1234")
self.assertEqual(len(a) * a.itemsize, 4)
@support.cpython_only
def test_sizeof_with_buffer(self):
a = array.array(self.typecode, self.example)
basesize = support.calcvobjsize('Pn2Pi')
buffer_size = a.buffer_info()[1] * a.itemsize
support.check_sizeof(self, a, basesize + buffer_size)
@support.cpython_only
def test_sizeof_without_buffer(self):
a = array.array(self.typecode)
basesize = support.calcvobjsize('Pn2Pi')
support.check_sizeof(self, a, basesize)
def test_initialize_with_unicode(self):
if self.typecode != 'u':
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, 'foo')
self.assertIn("cannot use a str", str(cm.exception))
with self.assertRaises(TypeError) as cm:
a = array.array(self.typecode, array.array('u', 'foo'))
self.assertIn("cannot use a unicode array", str(cm.exception))
else:
a = array.array(self.typecode, "foo")
a = array.array(self.typecode, array.array('u', 'foo'))
@support.cpython_only
def test_obsolete_write_lock(self):
from _testcapi import getbuffer_with_null_view
a = array.array('B', b"")
self.assertRaises(BufferError, getbuffer_with_null_view, a)
def test_free_after_iterating(self):
support.check_free_after_iterating(self, iter, array.array,
(self.typecode,))
support.check_free_after_iterating(self, reversed, array.array,
(self.typecode,))
class StringTest(BaseTest):
def test_setitem(self):
super().test_setitem()
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.__setitem__, 0, self.example[:2])
class UnicodeTest(StringTest, unittest.TestCase):
typecode = 'u'
example = '\x01\u263a\x00\ufeff'
smallerexample = '\x01\u263a\x00\ufefe'
biggerexample = '\x01\u263a\x01\ufeff'
outside = str('\x33')
minitemsize = 2
def test_unicode(self):
self.assertRaises(TypeError, array.array, 'b', 'foo')
a = array.array('u', '\xa0\xc2\u1234')
a.fromunicode(' ')
a.fromunicode('')
a.fromunicode('')
a.fromunicode('\x11abc\xff\u1234')
s = a.tounicode()
self.assertEqual(s, '\xa0\xc2\u1234 \x11abc\xff\u1234')
self.assertEqual(a.itemsize, sizeof_wchar)
s = '\x00="\'a\\b\x80\xff\u0000\u0001\u1234'
a = array.array('u', s)
self.assertEqual(
repr(a),
"array('u', '\\x00=\"\\'a\\\\b\\x80\xff\\x00\\x01\u1234')")
self.assertRaises(TypeError, a.fromunicode)
def test_issue17223(self):
if sizeof_wchar == 4:
invalid_str = b'\xff\xff\xff\xff'
else:
self.skipTest("specific to 32-bit wchar_t")
a = array.array('u', invalid_str)
self.assertRaises(ValueError, a.tounicode)
self.assertRaises(ValueError, str, a)
class NumberTest(BaseTest):
def test_extslice(self):
a = array.array(self.typecode, range(5))
self.assertEqual(a[::], a)
self.assertEqual(a[::2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1::2], array.array(self.typecode, [1,3]))
self.assertEqual(a[::-1], array.array(self.typecode, [4,3,2,1,0]))
self.assertEqual(a[::-2], array.array(self.typecode, [4,2,0]))
self.assertEqual(a[3::-2], array.array(self.typecode, [3,1]))
self.assertEqual(a[-100:100:], a)
self.assertEqual(a[100:-100:-1], a[::-1])
self.assertEqual(a[-100:100:2], array.array(self.typecode, [0,2,4]))
self.assertEqual(a[1000:2000:2], array.array(self.typecode, []))
self.assertEqual(a[-1000:-2000:-2], array.array(self.typecode, []))
def test_delslice(self):
a = array.array(self.typecode, range(5))
del a[::2]
self.assertEqual(a, array.array(self.typecode, [1,3]))
a = array.array(self.typecode, range(5))
del a[1::2]
self.assertEqual(a, array.array(self.typecode, [0,2,4]))
a = array.array(self.typecode, range(5))
del a[1::-2]
self.assertEqual(a, array.array(self.typecode, [0,2,3,4]))
a = array.array(self.typecode, range(10))
del a[::1000]
self.assertEqual(a, array.array(self.typecode, [1,2,3,4,5,6,7,8,9]))
a = array.array(self.typecode, range(10))
del a[9::1<<333]
def test_assignment(self):
a = array.array(self.typecode, range(10))
a[::2] = array.array(self.typecode, [42]*5)
self.assertEqual(a, array.array(self.typecode, [42, 1, 42, 3, 42, 5, 42, 7, 42, 9]))
a = array.array(self.typecode, range(10))
a[::-4] = array.array(self.typecode, [10]*3)
self.assertEqual(a, array.array(self.typecode, [0, 10, 2, 3, 4, 10, 6, 7, 8 ,10]))
a = array.array(self.typecode, range(4))
a[::-1] = a
self.assertEqual(a, array.array(self.typecode, [3, 2, 1, 0]))
a = array.array(self.typecode, range(10))
b = a[:]
c = a[:]
ins = array.array(self.typecode, range(2))
a[2:3] = ins
b[slice(2,3)] = ins
c[2:3:] = ins
def test_iterationcontains(self):
a = array.array(self.typecode, range(10))
self.assertEqual(list(a), list(range(10)))
b = array.array(self.typecode, [20])
self.assertEqual(a[-1] in a, True)
self.assertEqual(b[0] not in a, True)
def check_overflow(self, lower, upper):
a = array.array(self.typecode, [lower])
a[0] = lower
self.assertRaises(OverflowError, array.array, self.typecode, [lower-1])
self.assertRaises(OverflowError, a.__setitem__, 0, lower-1)
a = array.array(self.typecode, [upper])
a[0] = upper
self.assertRaises(OverflowError, array.array, self.typecode, [upper+1])
self.assertRaises(OverflowError, a.__setitem__, 0, upper+1)
def test_subclassing(self):
typecode = self.typecode
class ExaggeratingArray(array.array):
__slots__ = ['offset']
def __new__(cls, typecode, data, offset):
return array.array.__new__(cls, typecode, data)
def __init__(self, typecode, data, offset):
self.offset = offset
def __getitem__(self, i):
return array.array.__getitem__(self, i) + self.offset
a = ExaggeratingArray(self.typecode, [3, 6, 7, 11], 4)
self.assertEntryEqual(a[0], 7)
self.assertRaises(AttributeError, setattr, a, "color", "blue")
def test_frombytearray(self):
a = array.array('b', range(10))
b = array.array(self.typecode, a)
self.assertEqual(a, b)
class IntegerNumberTest(NumberTest):
def test_type_error(self):
a = array.array(self.typecode)
a.append(42)
with self.assertRaises(TypeError):
a.append(42.0)
with self.assertRaises(TypeError):
a[0] = 42.0
class Intable:
def __init__(self, num):
self._num = num
def __index__(self):
return self._num
def __int__(self):
return self._num
def __sub__(self, other):
return Intable(int(self) - int(other))
def __add__(self, other):
return Intable(int(self) + int(other))
class SignedNumberTest(IntegerNumberTest):
example = [-1, 0, 1, 42, 0x7f]
smallerexample = [-1, 0, 1, 42, 0x7e]
biggerexample = [-1, 0, 1, 43, 0x7f]
outside = 23
def test_overflow(self):
a = array.array(self.typecode)
lower = -1 * int(pow(2, a.itemsize * 8 - 1))
upper = int(pow(2, a.itemsize * 8 - 1)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
class UnsignedNumberTest(IntegerNumberTest):
example = [0, 1, 17, 23, 42, 0xff]
smallerexample = [0, 1, 17, 23, 42, 0xfe]
biggerexample = [0, 1, 17, 23, 43, 0xff]
outside = 0xaa
def test_overflow(self):
a = array.array(self.typecode)
lower = 0
upper = int(pow(2, a.itemsize * 8)) - 1
self.check_overflow(lower, upper)
self.check_overflow(Intable(lower), Intable(upper))
def test_bytes_extend(self):
s = bytes(self.example)
a = array.array(self.typecode, self.example)
a.extend(s)
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example)
)
a = array.array(self.typecode, self.example)
a.extend(bytearray(reversed(s)))
self.assertEqual(
a,
array.array(self.typecode, self.example+self.example[::-1])
)
class ByteTest(SignedNumberTest, unittest.TestCase):
typecode = 'b'
minitemsize = 1
class UnsignedByteTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'B'
minitemsize = 1
class ShortTest(SignedNumberTest, unittest.TestCase):
typecode = 'h'
minitemsize = 2
class UnsignedShortTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'H'
minitemsize = 2
class IntTest(SignedNumberTest, unittest.TestCase):
typecode = 'i'
minitemsize = 2
class UnsignedIntTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'I'
minitemsize = 2
class LongTest(SignedNumberTest, unittest.TestCase):
typecode = 'l'
minitemsize = 4
class UnsignedLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'L'
minitemsize = 4
class LongLongTest(SignedNumberTest, unittest.TestCase):
typecode = 'q'
minitemsize = 8
class UnsignedLongLongTest(UnsignedNumberTest, unittest.TestCase):
typecode = 'Q'
minitemsize = 8
class FPTest(NumberTest):
example = [-42.0, 0, 42, 1e5, -1e10]
smallerexample = [-42.0, 0, 42, 1e5, -2e10]
biggerexample = [-42.0, 0, 42, 1e5, 1e10]
outside = 23
def assertEntryEqual(self, entry1, entry2):
self.assertAlmostEqual(entry1, entry2)
def test_nan(self):
a = array.array(self.typecode, [float('nan')])
b = array.array(self.typecode, [float('nan')])
self.assertIs(a != b, True)
self.assertIs(a == b, False)
self.assertIs(a > b, False)
self.assertIs(a >= b, False)
self.assertIs(a < b, False)
self.assertIs(a <= b, False)
def test_byteswap(self):
a = array.array(self.typecode, self.example)
self.assertRaises(TypeError, a.byteswap, 42)
if a.itemsize in (1, 2, 4, 8):
b = array.array(self.typecode, self.example)
b.byteswap()
if a.itemsize==1:
self.assertEqual(a, b)
else:
self.assertNotEqual(a.tobytes(), b.tobytes())
b.byteswap()
self.assertEqual(a, b)
class FloatTest(FPTest, unittest.TestCase):
typecode = 'f'
minitemsize = 4
class DoubleTest(FPTest, unittest.TestCase):
typecode = 'd'
minitemsize = 8
def test_alloc_overflow(self):
from sys import maxsize
a = array.array('d', [-1]*65536)
try:
a *= maxsize//65536 + 1
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
b = array.array('d', [ 2.71828183, 3.14159265, -1])
try:
b * (maxsize//3 + 1)
except MemoryError:
pass
else:
self.fail("Array of size > maxsize created - MemoryError expected")
class LargeArrayTest(unittest.TestCase):
typecode = 'b'
def example(self, size):
base = array.array(self.typecode, [0, 1, 2, 3, 4, 5, 6, 7]) * (size // 8)
base += array.array(self.typecode, [99]*(size % 8) + [8, 9, 10, 11])
return base
@support.bigmemtest(_2G, memuse=2.125)
def test_example_data(self, size):
example = self.example(size)
self.assertEqual(len(example), size+4)
@support.bigmemtest(_2G, memuse=2.125)
def test_access(self, size):
example = self.example(size)
self.assertEqual(example[0], 0)
self.assertEqual(example[-(size+4)], 0)
self.assertEqual(example[size], 8)
self.assertEqual(example[-4], 8)
self.assertEqual(example[size+3], 11)
self.assertEqual(example[-1], 11)
@support.bigmemtest(_2G, memuse=2.125+1)
def test_slice(self, size):
example = self.example(size)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
part = example[1:-1]
self.assertEqual(len(part), size+2)
self.assertEqual(part[0], 1)
self.assertEqual(part[-1], 10)
del part
part = example[::2]
self.assertEqual(len(part), (size+5)//2)
self.assertEqual(list(part[:4]), [0, 2, 4, 6])
if size % 2:
self.assertEqual(list(part[-2:]), [9, 11])
else:
self.assertEqual(list(part[-2:]), [8, 10])
@support.bigmemtest(_2G, memuse=2.125)
def test_count(self, size):
example = self.example(size)
self.assertEqual(example.count(0), size//8)
self.assertEqual(example.count(11), 1)
@support.bigmemtest(_2G, memuse=2.125)
def test_append(self, size):
example = self.example(size)
example.append(12)
self.assertEqual(example[-1], 12)
@support.bigmemtest(_2G, memuse=2.125)
def test_extend(self, size):
example = self.example(size)
example.extend(iter([12, 13, 14, 15]))
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_frombytes(self, size):
example = self.example(size)
example.frombytes(b'abcd')
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11] + list(b'abcd'))
@support.bigmemtest(_2G, memuse=2.125)
def test_fromlist(self, size):
example = self.example(size)
example.fromlist([12, 13, 14, 15])
self.assertEqual(len(example), size+8)
self.assertEqual(list(example[-8:]), [8, 9, 10, 11, 12, 13, 14, 15])
@support.bigmemtest(_2G, memuse=2.125)
def test_index(self, size):
example = self.example(size)
self.assertEqual(example.index(0), 0)
self.assertEqual(example.index(1), 1)
self.assertEqual(example.index(7), 7)
self.assertEqual(example.index(11), size+3)
@support.bigmemtest(_2G, memuse=2.125)
def test_insert(self, size):
example = self.example(size)
example.insert(0, 12)
example.insert(10, 13)
example.insert(size+1, 14)
self.assertEqual(len(example), size+7)
self.assertEqual(example[0], 12)
self.assertEqual(example[10], 13)
self.assertEqual(example[size+1], 14)
@support.bigmemtest(_2G, memuse=2.125)
def test_pop(self, size):
example = self.example(size)
self.assertEqual(example.pop(0), 0)
self.assertEqual(example[0], 1)
self.assertEqual(example.pop(size+1), 10)
self.assertEqual(example[size+1], 11)
self.assertEqual(example.pop(1), 2)
self.assertEqual(example[1], 3)
self.assertEqual(len(example), size+1)
self.assertEqual(example.pop(), 11)
self.assertEqual(len(example), size)
@support.bigmemtest(_2G, memuse=2.125)
def test_remove(self, size):
example = self.example(size)
example.remove(0)
self.assertEqual(len(example), size+3)
self.assertEqual(example[0], 1)
example.remove(10)
self.assertEqual(len(example), size+2)
self.assertEqual(example[size], 9)
self.assertEqual(example[size+1], 11)
@support.bigmemtest(_2G, memuse=2.125)
def test_reverse(self, size):
example = self.example(size)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(example[0], 11)
self.assertEqual(example[3], 8)
self.assertEqual(example[-1], 0)
example.reverse()
self.assertEqual(len(example), size+4)
self.assertEqual(list(example[:4]), [0, 1, 2, 3])
self.assertEqual(list(example[-4:]), [8, 9, 10, 11])
@support.bigmemtest(_2G, memuse=2.125+9)
def test_tolist(self, size):
example = self.example(size)
ls = example.tolist()
self.assertEqual(len(ls), len(example))
self.assertEqual(ls[:8], list(example[:8]))
self.assertEqual(ls[-8:], list(example[-8:]))
if __name__ == "__main__":
unittest.main()
| true | true |
f731b7992515f911392bdff02585fa6debdfa83b | 1,504 | py | Python | apps/contrib/models/mixins.py | vicobits/django-wise | 3fdc01eabdff459b31e016f9f6d1cafc19c5a292 | [
"MIT"
] | 5 | 2020-04-11T20:11:48.000Z | 2021-03-16T23:58:01.000Z | apps/contrib/models/mixins.py | victoraguilarc/django-wise | 3fdc01eabdff459b31e016f9f6d1cafc19c5a292 | [
"MIT"
] | 5 | 2020-04-11T20:17:56.000Z | 2021-06-16T19:18:29.000Z | apps/contrib/models/mixins.py | victoraguilarc/django-wise | 3fdc01eabdff459b31e016f9f6d1cafc19c5a292 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import uuid
from django.db import models
class TimeStampedModelMixin(models.Model):
"""Timestamp extra field.
An abstract base class model that provides self updating 'created' and 'modified' fields
https://docs.djangoproject.com/en/2.1/ref/models/fields/#django.db.models.DateField.auto_now_add
"""
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ['-created_at', '-updated_at']
class UUIDModelMixin(models.Model):
"""An abstract base class model that provides an uuid field."""
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
class Meta:
abstract = True
class SlugModelMixin(models.Model):
"""An abstract base class model that provides a slug field."""
slug = models.SlugField(max_length=255, unique=True)
class Meta:
abstract = True
class UUIDPrimaryKeyModelMixin(models.Model):
"""An abstract base class model that provides an uuid field that is the primary key."""
uuid = models.UUIDField(
verbose_name='UUID',
primary_key=True,
default=uuid.uuid4,
editable=False,
)
class Meta:
abstract = True
class UUIDWithTimestampMixin(UUIDPrimaryKeyModelMixin, TimeStampedModelMixin):
"""An abstract base class model that provides an uuid and timestamp fields."""
class Meta:
abstract = True
| 25.491525 | 100 | 0.691489 |
import uuid
from django.db import models
class TimeStampedModelMixin(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ['-created_at', '-updated_at']
class UUIDModelMixin(models.Model):
uuid = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
class Meta:
abstract = True
class SlugModelMixin(models.Model):
slug = models.SlugField(max_length=255, unique=True)
class Meta:
abstract = True
class UUIDPrimaryKeyModelMixin(models.Model):
uuid = models.UUIDField(
verbose_name='UUID',
primary_key=True,
default=uuid.uuid4,
editable=False,
)
class Meta:
abstract = True
class UUIDWithTimestampMixin(UUIDPrimaryKeyModelMixin, TimeStampedModelMixin):
class Meta:
abstract = True
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.