hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7d2f006479ca28ac543987f4cff4f932be0aedc4 | 5,867 | py | Python | mic_array.py | swm1718/mic_array | 9de54bf2f2351d8d14379dca2f7da53c736652f2 | [
"Apache-2.0"
] | null | null | null | mic_array.py | swm1718/mic_array | 9de54bf2f2351d8d14379dca2f7da53c736652f2 | [
"Apache-2.0"
] | null | null | null | mic_array.py | swm1718/mic_array | 9de54bf2f2351d8d14379dca2f7da53c736652f2 | [
"Apache-2.0"
] | null | null | null |
import argparse
import pyaudio
try:
import queue as Queue
except ImportError:
import Queue as Queue
import threading
import numpy as np
from gcc_phat import gcc_phat
import math
SOUND_SPEED = 343.2
MIC_DISTANCE_6P1 = 0.064
MAX_TDOA_6P1 = MIC_DISTANCE_6P1 / float(SOUND_SPEED)
MIC_DISTANCE_4 = 0.08127
MAX_TDOA_4 = MIC_DISTANCE_4 / float(SOUND_SPEED)
class MicArray(object):
def __init__(self, rate=16000, channels=8, chunk_size=None):
self.pyaudio_instance = pyaudio.PyAudio()
self.queue = Queue.Queue()
self.quit_event = threading.Event()
self.channels = channels
self.sample_rate = rate
self.chunk_size = chunk_size if chunk_size else rate / 100
device_index = None
for i in range(self.pyaudio_instance.get_device_count()):
dev = self.pyaudio_instance.get_device_info_by_index(i)
name = dev['name'].encode('utf-8')
print(i, name, dev['maxInputChannels'], dev['maxOutputChannels'])
if dev['maxInputChannels'] == self.channels:
print('Use {}'.format(name))
device_index = i
break
if device_index is None:
raise Exception('can not find input device with {} channel(s)'.format(self.channels))
self.stream = self.pyaudio_instance.open(
input=True,
start=False,
format=pyaudio.paInt16,
channels=self.channels,
rate=int(self.sample_rate),
frames_per_buffer=int(self.chunk_size),
stream_callback=self._callback,
input_device_index=device_index,
)
def _callback(self, in_data, frame_count, time_info, status):
self.queue.put(in_data)
return None, pyaudio.paContinue
def start(self):
self.queue.queue.clear()
self.stream.start_stream()
def read_chunks(self):
self.quit_event.clear()
while not self.quit_event.is_set():
frames = self.queue.get()
if not frames:
break
frames = np.fromstring(frames, dtype='int16')
yield frames
def stop(self):
self.quit_event.set()
self.stream.stop_stream()
self.queue.put('')
def __enter__(self):
self.start()
return self
def __exit__(self, type, value, traceback):
if value:
return False
self.stop()
def get_direction(self, buf):
best_guess = None
if self.channels == 8:
MIC_GROUP_N = 3
MIC_GROUP = [[1, 4], [2, 5], [3, 6]]
tau = [0] * MIC_GROUP_N
theta = [0] * MIC_GROUP_N
# buf = np.fromstring(buf, dtype='int16')
for i, v in enumerate(MIC_GROUP):
tau[i], _ = gcc_phat(buf[v[0]::8], buf[v[1]::8], fs=self.sample_rate, max_tau=MAX_TDOA_6P1, interp=1)
theta[i] = math.asin(tau[i] / MAX_TDOA_6P1) * 180 / math.pi
min_index = np.argmin(np.abs(tau))
if (min_index != 0 and theta[min_index - 1] >= 0) or (min_index == 0 and theta[MIC_GROUP_N - 1] < 0):
best_guess = (theta[min_index] + 360) % 360
else:
best_guess = (180 - theta[min_index])
best_guess = (best_guess + 120 + min_index * 60) % 360
elif self.channels == 4:
MIC_GROUP_N = 2
MIC_GROUP = [[0, 2], [1, 3]]
tau = [0] * MIC_GROUP_N
theta = [0] * MIC_GROUP_N
for i, v in enumerate(MIC_GROUP):
tau[i], _ = gcc_phat(buf[v[0]::4], buf[v[1]::4], fs=self.sample_rate, max_tau=MAX_TDOA_4, interp=1)
theta[i] = math.asin(tau[i] / MAX_TDOA_4) * 180 / math.pi
if np.abs(theta[0]) < np.abs(theta[1]):
if theta[1] > 0:
best_guess = (theta[0] + 360) % 360
else:
best_guess = (180 - theta[0])
else:
if theta[0] < 0:
best_guess = (theta[1] + 360) % 360
else:
best_guess = (180 - theta[1])
best_guess = (best_guess + 90 + 180) % 360
best_guess = (-best_guess + 120) % 360
elif self.channels == 2:
pass
return best_guess
def test_4mic():
import signal
import time
from pixel_ring_4mic import pixels
pixels.wakeup()
time.sleep(3)
is_quit = threading.Event()
def signal_handler(sig, num):
is_quit.set()
print('Quit')
signal.signal(signal.SIGINT, signal_handler)
with MicArray(16000, 4, 16000 / 4) as mic:
for chunk in mic.read_chunks():
direction = mic.get_direction(chunk)
pixels.set_direction((direction+180)%360)
pixels.listen()
print(int(direction))
if is_quit.is_set():
break
def test_8mic():
import signal
import time
from pixel_ring import pixel_ring
is_quit = threading.Event()
def signal_handler(sig, num):
is_quit.set()
print('Quit')
signal.signal(signal.SIGINT, signal_handler)
with MicArray(16000, 8, 16000 / 4) as mic:
for chunk in mic.read_chunks():
direction = mic.get_direction(chunk)
pixel_ring.set_direction(direction)
print(int(direction))
if is_quit.is_set():
break
pixel_ring.off()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Specify the number of mics in the array, either 4 or 8 (default 8).')
parser.add_argument('--num_mics', type=int, choices=[4, 8], default=8)
num_mics = parser.parse_args().num_mics
if num_mics == 4:
test_4mic()
else:
test_8mic()
| 28.342995 | 119 | 0.562979 |
70357d835804678b5a418e63147b69aa86a48447 | 57,814 | py | Python | numpy/testing/utils.py | immerrr/numpy | ceeeb1a65aaa508bd7097b5c39ba36fec51934b9 | [
"BSD-3-Clause"
] | null | null | null | numpy/testing/utils.py | immerrr/numpy | ceeeb1a65aaa508bd7097b5c39ba36fec51934b9 | [
"BSD-3-Clause"
] | null | null | null | numpy/testing/utils.py | immerrr/numpy | ceeeb1a65aaa508bd7097b5c39ba36fec51934b9 | [
"BSD-3-Clause"
] | null | null | null | """
Utility function to facilitate testing.
"""
from __future__ import division, absolute_import, print_function
import os
import sys
import re
import operator
import warnings
from functools import partial
from .nosetester import import_nose
from numpy.core import float32, empty, arange, array_repr, ndarray
if sys.version_info[0] >= 3:
from io import StringIO
else:
from StringIO import StringIO
__all__ = ['assert_equal', 'assert_almost_equal', 'assert_approx_equal',
'assert_array_equal', 'assert_array_less', 'assert_string_equal',
'assert_array_almost_equal', 'assert_raises', 'build_err_msg',
'decorate_methods', 'jiffies', 'memusage', 'print_assert_equal',
'raises', 'rand', 'rundocs', 'runstring', 'verbose', 'measure',
'assert_', 'assert_array_almost_equal_nulp', 'assert_raises_regex',
'assert_array_max_ulp', 'assert_warns', 'assert_no_warnings',
'assert_allclose', 'IgnoreException']
verbose = 0
def assert_(val, msg='') :
"""
Assert that works in release mode.
Accepts callable msg to allow deferring evaluation until failure.
The Python built-in ``assert`` does not work when executing code in
optimized mode (the ``-O`` flag) - no byte-code is generated for it.
For documentation on usage, refer to the Python documentation.
"""
if not val :
try:
smsg = msg()
except TypeError:
smsg = msg
raise AssertionError(smsg)
def gisnan(x):
"""like isnan, but always raise an error if type not supported instead of
returning a TypeError object.
Notes
-----
isnan and other ufunc sometimes return a NotImplementedType object instead
of raising any exception. This function is a wrapper to make sure an
exception is always raised.
This should be removed once this problem is solved at the Ufunc level."""
from numpy.core import isnan
st = isnan(x)
if isinstance(st, type(NotImplemented)):
raise TypeError("isnan not supported for this type")
return st
def gisfinite(x):
"""like isfinite, but always raise an error if type not supported instead of
returning a TypeError object.
Notes
-----
isfinite and other ufunc sometimes return a NotImplementedType object instead
of raising any exception. This function is a wrapper to make sure an
exception is always raised.
This should be removed once this problem is solved at the Ufunc level."""
from numpy.core import isfinite, errstate
with errstate(invalid='ignore'):
st = isfinite(x)
if isinstance(st, type(NotImplemented)):
raise TypeError("isfinite not supported for this type")
return st
def gisinf(x):
"""like isinf, but always raise an error if type not supported instead of
returning a TypeError object.
Notes
-----
isinf and other ufunc sometimes return a NotImplementedType object instead
of raising any exception. This function is a wrapper to make sure an
exception is always raised.
This should be removed once this problem is solved at the Ufunc level."""
from numpy.core import isinf, errstate
with errstate(invalid='ignore'):
st = isinf(x)
if isinstance(st, type(NotImplemented)):
raise TypeError("isinf not supported for this type")
return st
def rand(*args):
"""Returns an array of random numbers with the given shape.
This only uses the standard library, so it is useful for testing purposes.
"""
import random
from numpy.core import zeros, float64
results = zeros(args, float64)
f = results.flat
for i in range(len(f)):
f[i] = random.random()
return results
if sys.platform[:5]=='linux':
def jiffies(_proc_pid_stat = '/proc/%s/stat'%(os.getpid()),
_load_time=[]):
""" Return number of jiffies (1/100ths of a second) that this
process has been scheduled in user mode. See man 5 proc. """
import time
if not _load_time:
_load_time.append(time.time())
try:
f=open(_proc_pid_stat, 'r')
l = f.readline().split(' ')
f.close()
return int(l[13])
except:
return int(100*(time.time()-_load_time[0]))
def memusage(_proc_pid_stat = '/proc/%s/stat'%(os.getpid())):
""" Return virtual memory size in bytes of the running python.
"""
try:
f=open(_proc_pid_stat, 'r')
l = f.readline().split(' ')
f.close()
return int(l[22])
except:
return
else:
# os.getpid is not in all platforms available.
# Using time is safe but inaccurate, especially when process
# was suspended or sleeping.
def jiffies(_load_time=[]):
""" Return number of jiffies (1/100ths of a second) that this
process has been scheduled in user mode. [Emulation with time.time]. """
import time
if not _load_time:
_load_time.append(time.time())
return int(100*(time.time()-_load_time[0]))
def memusage():
""" Return memory usage of running python. [Not implemented]"""
raise NotImplementedError
if os.name=='nt' and sys.version[:3] > '2.3':
# Code "stolen" from enthought/debug/memusage.py
def GetPerformanceAttributes(object, counter, instance = None,
inum=-1, format = None, machine=None):
# NOTE: Many counters require 2 samples to give accurate results,
# including "% Processor Time" (as by definition, at any instant, a
# thread's CPU usage is either 0 or 100). To read counters like this,
# you should copy this function, but keep the counter open, and call
# CollectQueryData() each time you need to know.
# See http://msdn.microsoft.com/library/en-us/dnperfmo/html/perfmonpt2.asp
# My older explanation for this was that the "AddCounter" process forced
# the CPU to 100%, but the above makes more sense :)
import win32pdh
if format is None: format = win32pdh.PDH_FMT_LONG
path = win32pdh.MakeCounterPath( (machine, object, instance, None, inum, counter) )
hq = win32pdh.OpenQuery()
try:
hc = win32pdh.AddCounter(hq, path)
try:
win32pdh.CollectQueryData(hq)
type, val = win32pdh.GetFormattedCounterValue(hc, format)
return val
finally:
win32pdh.RemoveCounter(hc)
finally:
win32pdh.CloseQuery(hq)
def memusage(processName="python", instance=0):
# from win32pdhutil, part of the win32all package
import win32pdh
return GetPerformanceAttributes("Process", "Virtual Bytes",
processName, instance,
win32pdh.PDH_FMT_LONG, None)
def build_err_msg(arrays, err_msg, header='Items are not equal:',
verbose=True, names=('ACTUAL', 'DESIRED'), precision=8):
msg = ['\n' + header]
if err_msg:
if err_msg.find('\n') == -1 and len(err_msg) < 79-len(header):
msg = [msg[0] + ' ' + err_msg]
else:
msg.append(err_msg)
if verbose:
for i, a in enumerate(arrays):
if isinstance(a, ndarray):
# precision argument is only needed if the objects are ndarrays
r_func = partial(array_repr, precision=precision)
else:
r_func = repr
try:
r = r_func(a)
except:
r = '[repr failed]'
if r.count('\n') > 3:
r = '\n'.join(r.splitlines()[:3])
r += '...'
msg.append(' %s: %s' % (names[i], r))
return '\n'.join(msg)
def assert_equal(actual,desired,err_msg='',verbose=True):
"""
Raise an assertion if two objects are not equal.
Given two objects (scalars, lists, tuples, dictionaries or numpy arrays),
check that all elements of these objects are equal. An exception is raised
at the first conflicting values.
Parameters
----------
actual : array_like
The object to check.
desired : array_like
The expected object.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired are not equal.
Examples
--------
>>> np.testing.assert_equal([4,5], [4,6])
...
<type 'exceptions.AssertionError'>:
Items are not equal:
item=1
ACTUAL: 5
DESIRED: 6
"""
if isinstance(desired, dict):
if not isinstance(actual, dict) :
raise AssertionError(repr(type(actual)))
assert_equal(len(actual), len(desired), err_msg, verbose)
for k, i in desired.items():
if k not in actual :
raise AssertionError(repr(k))
assert_equal(actual[k], desired[k], 'key=%r\n%s' % (k, err_msg), verbose)
return
if isinstance(desired, (list, tuple)) and isinstance(actual, (list, tuple)):
assert_equal(len(actual), len(desired), err_msg, verbose)
for k in range(len(desired)):
assert_equal(actual[k], desired[k], 'item=%r\n%s' % (k, err_msg), verbose)
return
from numpy.core import ndarray, isscalar, signbit
from numpy.lib import iscomplexobj, real, imag
if isinstance(actual, ndarray) or isinstance(desired, ndarray):
return assert_array_equal(actual, desired, err_msg, verbose)
msg = build_err_msg([actual, desired], err_msg, verbose=verbose)
# Handle complex numbers: separate into real/imag to handle
# nan/inf/negative zero correctly
# XXX: catch ValueError for subclasses of ndarray where iscomplex fail
try:
usecomplex = iscomplexobj(actual) or iscomplexobj(desired)
except ValueError:
usecomplex = False
if usecomplex:
if iscomplexobj(actual):
actualr = real(actual)
actuali = imag(actual)
else:
actualr = actual
actuali = 0
if iscomplexobj(desired):
desiredr = real(desired)
desiredi = imag(desired)
else:
desiredr = desired
desiredi = 0
try:
assert_equal(actualr, desiredr)
assert_equal(actuali, desiredi)
except AssertionError:
raise AssertionError(msg)
# Inf/nan/negative zero handling
try:
# isscalar test to check cases such as [np.nan] != np.nan
if isscalar(desired) != isscalar(actual):
raise AssertionError(msg)
# If one of desired/actual is not finite, handle it specially here:
# check that both are nan if any is a nan, and test for equality
# otherwise
if not (gisfinite(desired) and gisfinite(actual)):
isdesnan = gisnan(desired)
isactnan = gisnan(actual)
if isdesnan or isactnan:
if not (isdesnan and isactnan):
raise AssertionError(msg)
else:
if not desired == actual:
raise AssertionError(msg)
return
elif desired == 0 and actual == 0:
if not signbit(desired) == signbit(actual):
raise AssertionError(msg)
# If TypeError or ValueError raised while using isnan and co, just handle
# as before
except (TypeError, ValueError, NotImplementedError):
pass
# Explicitly use __eq__ for comparison, ticket #2552
if not (desired == actual):
raise AssertionError(msg)
def print_assert_equal(test_string, actual, desired):
"""
Test if two objects are equal, and print an error message if test fails.
The test is performed with ``actual == desired``.
Parameters
----------
test_string : str
The message supplied to AssertionError.
actual : object
The object to test for equality against `desired`.
desired : object
The expected result.
Examples
--------
>>> np.testing.print_assert_equal('Test XYZ of func xyz', [0, 1], [0, 1])
>>> np.testing.print_assert_equal('Test XYZ of func xyz', [0, 1], [0, 2])
Traceback (most recent call last):
...
AssertionError: Test XYZ of func xyz failed
ACTUAL:
[0, 1]
DESIRED:
[0, 2]
"""
import pprint
if not (actual == desired):
msg = StringIO()
msg.write(test_string)
msg.write(' failed\nACTUAL: \n')
pprint.pprint(actual, msg)
msg.write('DESIRED: \n')
pprint.pprint(desired, msg)
raise AssertionError(msg.getvalue())
def assert_almost_equal(actual,desired,decimal=7,err_msg='',verbose=True):
"""
Raise an assertion if two items are not equal up to desired precision.
.. note:: It is recommended to use one of `assert_allclose`,
`assert_array_almost_equal_nulp` or `assert_array_max_ulp`
instead of this function for more consistent floating point
comparisons.
The test is equivalent to ``abs(desired-actual) < 0.5 * 10**(-decimal)``.
Given two objects (numbers or ndarrays), check that all elements of these
objects are almost equal. An exception is raised at conflicting values.
For ndarrays this delegates to assert_array_almost_equal
Parameters
----------
actual : array_like
The object to check.
desired : array_like
The expected object.
decimal : int, optional
Desired precision, default is 7.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired are not equal up to specified precision.
See Also
--------
assert_allclose: Compare two array_like objects for equality with desired
relative and/or absolute precision.
assert_array_almost_equal_nulp, assert_array_max_ulp, assert_equal
Examples
--------
>>> import numpy.testing as npt
>>> npt.assert_almost_equal(2.3333333333333, 2.33333334)
>>> npt.assert_almost_equal(2.3333333333333, 2.33333334, decimal=10)
...
<type 'exceptions.AssertionError'>:
Items are not equal:
ACTUAL: 2.3333333333333002
DESIRED: 2.3333333399999998
>>> npt.assert_almost_equal(np.array([1.0,2.3333333333333]),
... np.array([1.0,2.33333334]), decimal=9)
...
<type 'exceptions.AssertionError'>:
Arrays are not almost equal
<BLANKLINE>
(mismatch 50.0%)
x: array([ 1. , 2.33333333])
y: array([ 1. , 2.33333334])
"""
from numpy.core import ndarray
from numpy.lib import iscomplexobj, real, imag
# Handle complex numbers: separate into real/imag to handle
# nan/inf/negative zero correctly
# XXX: catch ValueError for subclasses of ndarray where iscomplex fail
try:
usecomplex = iscomplexobj(actual) or iscomplexobj(desired)
except ValueError:
usecomplex = False
def _build_err_msg():
header = ('Arrays are not almost equal to %d decimals' % decimal)
return build_err_msg([actual, desired], err_msg, verbose=verbose,
header=header)
if usecomplex:
if iscomplexobj(actual):
actualr = real(actual)
actuali = imag(actual)
else:
actualr = actual
actuali = 0
if iscomplexobj(desired):
desiredr = real(desired)
desiredi = imag(desired)
else:
desiredr = desired
desiredi = 0
try:
assert_almost_equal(actualr, desiredr, decimal=decimal)
assert_almost_equal(actuali, desiredi, decimal=decimal)
except AssertionError:
raise AssertionError(_build_err_msg())
if isinstance(actual, (ndarray, tuple, list)) \
or isinstance(desired, (ndarray, tuple, list)):
return assert_array_almost_equal(actual, desired, decimal, err_msg)
try:
# If one of desired/actual is not finite, handle it specially here:
# check that both are nan if any is a nan, and test for equality
# otherwise
if not (gisfinite(desired) and gisfinite(actual)):
if gisnan(desired) or gisnan(actual):
if not (gisnan(desired) and gisnan(actual)):
raise AssertionError(_build_err_msg())
else:
if not desired == actual:
raise AssertionError(_build_err_msg())
return
except (NotImplementedError, TypeError):
pass
if round(abs(desired - actual), decimal) != 0 :
raise AssertionError(_build_err_msg())
def assert_approx_equal(actual,desired,significant=7,err_msg='',verbose=True):
"""
Raise an assertion if two items are not equal up to significant digits.
.. note:: It is recommended to use one of `assert_allclose`,
`assert_array_almost_equal_nulp` or `assert_array_max_ulp`
instead of this function for more consistent floating point
comparisons.
Given two numbers, check that they are approximately equal.
Approximately equal is defined as the number of significant digits
that agree.
Parameters
----------
actual : scalar
The object to check.
desired : scalar
The expected object.
significant : int, optional
Desired precision, default is 7.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired are not equal up to specified precision.
See Also
--------
assert_allclose: Compare two array_like objects for equality with desired
relative and/or absolute precision.
assert_array_almost_equal_nulp, assert_array_max_ulp, assert_equal
Examples
--------
>>> np.testing.assert_approx_equal(0.12345677777777e-20, 0.1234567e-20)
>>> np.testing.assert_approx_equal(0.12345670e-20, 0.12345671e-20,
significant=8)
>>> np.testing.assert_approx_equal(0.12345670e-20, 0.12345672e-20,
significant=8)
...
<type 'exceptions.AssertionError'>:
Items are not equal to 8 significant digits:
ACTUAL: 1.234567e-021
DESIRED: 1.2345672000000001e-021
the evaluated condition that raises the exception is
>>> abs(0.12345670e-20/1e-21 - 0.12345672e-20/1e-21) >= 10**-(8-1)
True
"""
import numpy as np
(actual, desired) = map(float, (actual, desired))
if desired==actual:
return
# Normalized the numbers to be in range (-10.0,10.0)
# scale = float(pow(10,math.floor(math.log10(0.5*(abs(desired)+abs(actual))))))
with np.errstate(invalid='ignore'):
scale = 0.5*(np.abs(desired) + np.abs(actual))
scale = np.power(10, np.floor(np.log10(scale)))
try:
sc_desired = desired/scale
except ZeroDivisionError:
sc_desired = 0.0
try:
sc_actual = actual/scale
except ZeroDivisionError:
sc_actual = 0.0
msg = build_err_msg([actual, desired], err_msg,
header='Items are not equal to %d significant digits:' %
significant,
verbose=verbose)
try:
# If one of desired/actual is not finite, handle it specially here:
# check that both are nan if any is a nan, and test for equality
# otherwise
if not (gisfinite(desired) and gisfinite(actual)):
if gisnan(desired) or gisnan(actual):
if not (gisnan(desired) and gisnan(actual)):
raise AssertionError(msg)
else:
if not desired == actual:
raise AssertionError(msg)
return
except (TypeError, NotImplementedError):
pass
if np.abs(sc_desired - sc_actual) >= np.power(10., -(significant-1)) :
raise AssertionError(msg)
def assert_array_compare(comparison, x, y, err_msg='', verbose=True,
header='', precision=6):
from numpy.core import array, isnan, isinf, any, all, inf
x = array(x, copy=False, subok=True)
y = array(y, copy=False, subok=True)
def isnumber(x):
return x.dtype.char in '?bhilqpBHILQPefdgFDG'
def chk_same_position(x_id, y_id, hasval='nan'):
"""Handling nan/inf: check that x and y have the nan/inf at the same
locations."""
try:
assert_array_equal(x_id, y_id)
except AssertionError:
msg = build_err_msg([x, y],
err_msg + '\nx and y %s location mismatch:' \
% (hasval), verbose=verbose, header=header,
names=('x', 'y'), precision=precision)
raise AssertionError(msg)
try:
cond = (x.shape==() or y.shape==()) or x.shape == y.shape
if not cond:
msg = build_err_msg([x, y],
err_msg
+ '\n(shapes %s, %s mismatch)' % (x.shape,
y.shape),
verbose=verbose, header=header,
names=('x', 'y'), precision=precision)
if not cond :
raise AssertionError(msg)
if isnumber(x) and isnumber(y):
x_isnan, y_isnan = isnan(x), isnan(y)
x_isinf, y_isinf = isinf(x), isinf(y)
# Validate that the special values are in the same place
if any(x_isnan) or any(y_isnan):
chk_same_position(x_isnan, y_isnan, hasval='nan')
if any(x_isinf) or any(y_isinf):
# Check +inf and -inf separately, since they are different
chk_same_position(x == +inf, y == +inf, hasval='+inf')
chk_same_position(x == -inf, y == -inf, hasval='-inf')
# Combine all the special values
x_id, y_id = x_isnan, y_isnan
x_id |= x_isinf
y_id |= y_isinf
# Only do the comparison if actual values are left
if all(x_id):
return
if any(x_id):
val = comparison(x[~x_id], y[~y_id])
else:
val = comparison(x, y)
else:
val = comparison(x, y)
if isinstance(val, bool):
cond = val
reduced = [0]
else:
reduced = val.ravel()
cond = reduced.all()
reduced = reduced.tolist()
if not cond:
match = 100-100.0*reduced.count(1)/len(reduced)
msg = build_err_msg([x, y],
err_msg
+ '\n(mismatch %s%%)' % (match,),
verbose=verbose, header=header,
names=('x', 'y'), precision=precision)
if not cond :
raise AssertionError(msg)
except ValueError as e:
import traceback
efmt = traceback.format_exc()
header = 'error during assertion:\n\n%s\n\n%s' % (efmt, header)
msg = build_err_msg([x, y], err_msg, verbose=verbose, header=header,
names=('x', 'y'), precision=precision)
raise ValueError(msg)
def assert_array_equal(x, y, err_msg='', verbose=True):
"""
Raise an assertion if two array_like objects are not equal.
Given two array_like objects, check that the shape is equal and all
elements of these objects are equal. An exception is raised at
shape mismatch or conflicting values. In contrast to the standard usage
in numpy, NaNs are compared like numbers, no assertion is raised if
both objects have NaNs in the same positions.
The usual caution for verifying equality with floating point numbers is
advised.
Parameters
----------
x : array_like
The actual object to check.
y : array_like
The desired, expected object.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired objects are not equal.
See Also
--------
assert_allclose: Compare two array_like objects for equality with desired
relative and/or absolute precision.
assert_array_almost_equal_nulp, assert_array_max_ulp, assert_equal
Examples
--------
The first assert does not raise an exception:
>>> np.testing.assert_array_equal([1.0,2.33333,np.nan],
... [np.exp(0),2.33333, np.nan])
Assert fails with numerical inprecision with floats:
>>> np.testing.assert_array_equal([1.0,np.pi,np.nan],
... [1, np.sqrt(np.pi)**2, np.nan])
...
<type 'exceptions.ValueError'>:
AssertionError:
Arrays are not equal
<BLANKLINE>
(mismatch 50.0%)
x: array([ 1. , 3.14159265, NaN])
y: array([ 1. , 3.14159265, NaN])
Use `assert_allclose` or one of the nulp (number of floating point values)
functions for these cases instead:
>>> np.testing.assert_allclose([1.0,np.pi,np.nan],
... [1, np.sqrt(np.pi)**2, np.nan],
... rtol=1e-10, atol=0)
"""
assert_array_compare(operator.__eq__, x, y, err_msg=err_msg,
verbose=verbose, header='Arrays are not equal')
def assert_array_almost_equal(x, y, decimal=6, err_msg='', verbose=True):
"""
Raise an assertion if two objects are not equal up to desired precision.
.. note:: It is recommended to use one of `assert_allclose`,
`assert_array_almost_equal_nulp` or `assert_array_max_ulp`
instead of this function for more consistent floating point
comparisons.
The test verifies identical shapes and verifies values with
``abs(desired-actual) < 0.5 * 10**(-decimal)``.
Given two array_like objects, check that the shape is equal and all
elements of these objects are almost equal. An exception is raised at
shape mismatch or conflicting values. In contrast to the standard usage
in numpy, NaNs are compared like numbers, no assertion is raised if
both objects have NaNs in the same positions.
Parameters
----------
x : array_like
The actual object to check.
y : array_like
The desired, expected object.
decimal : int, optional
Desired precision, default is 6.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired are not equal up to specified precision.
See Also
--------
assert_allclose: Compare two array_like objects for equality with desired
relative and/or absolute precision.
assert_array_almost_equal_nulp, assert_array_max_ulp, assert_equal
Examples
--------
the first assert does not raise an exception
>>> np.testing.assert_array_almost_equal([1.0,2.333,np.nan],
[1.0,2.333,np.nan])
>>> np.testing.assert_array_almost_equal([1.0,2.33333,np.nan],
... [1.0,2.33339,np.nan], decimal=5)
...
<type 'exceptions.AssertionError'>:
AssertionError:
Arrays are not almost equal
<BLANKLINE>
(mismatch 50.0%)
x: array([ 1. , 2.33333, NaN])
y: array([ 1. , 2.33339, NaN])
>>> np.testing.assert_array_almost_equal([1.0,2.33333,np.nan],
... [1.0,2.33333, 5], decimal=5)
<type 'exceptions.ValueError'>:
ValueError:
Arrays are not almost equal
x: array([ 1. , 2.33333, NaN])
y: array([ 1. , 2.33333, 5. ])
"""
from numpy.core import around, number, float_, result_type, array
from numpy.core.numerictypes import issubdtype
from numpy.core.fromnumeric import any as npany
def compare(x, y):
try:
if npany(gisinf(x)) or npany( gisinf(y)):
xinfid = gisinf(x)
yinfid = gisinf(y)
if not xinfid == yinfid:
return False
# if one item, x and y is +- inf
if x.size == y.size == 1:
return x == y
x = x[~xinfid]
y = y[~yinfid]
except (TypeError, NotImplementedError):
pass
# make sure y is an inexact type to avoid abs(MIN_INT); will cause
# casting of x later.
dtype = result_type(y, 1.)
y = array(y, dtype=dtype, copy=False)
z = abs(x-y)
if not issubdtype(z.dtype, number):
z = z.astype(float_) # handle object arrays
return around(z, decimal) <= 10.0**(-decimal)
assert_array_compare(compare, x, y, err_msg=err_msg, verbose=verbose,
header=('Arrays are not almost equal to %d decimals' % decimal),
precision=decimal)
def assert_array_less(x, y, err_msg='', verbose=True):
"""
Raise an assertion if two array_like objects are not ordered by less than.
Given two array_like objects, check that the shape is equal and all
elements of the first object are strictly smaller than those of the
second object. An exception is raised at shape mismatch or incorrectly
ordered values. Shape mismatch does not raise if an object has zero
dimension. In contrast to the standard usage in numpy, NaNs are
compared, no assertion is raised if both objects have NaNs in the same
positions.
Parameters
----------
x : array_like
The smaller object to check.
y : array_like
The larger object to compare.
err_msg : string
The error message to be printed in case of failure.
verbose : bool
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired objects are not equal.
See Also
--------
assert_array_equal: tests objects for equality
assert_array_almost_equal: test objects for equality up to precision
Examples
--------
>>> np.testing.assert_array_less([1.0, 1.0, np.nan], [1.1, 2.0, np.nan])
>>> np.testing.assert_array_less([1.0, 1.0, np.nan], [1, 2.0, np.nan])
...
<type 'exceptions.ValueError'>:
Arrays are not less-ordered
(mismatch 50.0%)
x: array([ 1., 1., NaN])
y: array([ 1., 2., NaN])
>>> np.testing.assert_array_less([1.0, 4.0], 3)
...
<type 'exceptions.ValueError'>:
Arrays are not less-ordered
(mismatch 50.0%)
x: array([ 1., 4.])
y: array(3)
>>> np.testing.assert_array_less([1.0, 2.0, 3.0], [4])
...
<type 'exceptions.ValueError'>:
Arrays are not less-ordered
(shapes (3,), (1,) mismatch)
x: array([ 1., 2., 3.])
y: array([4])
"""
assert_array_compare(operator.__lt__, x, y, err_msg=err_msg,
verbose=verbose,
header='Arrays are not less-ordered')
def runstring(astr, dict):
exec(astr, dict)
def assert_string_equal(actual, desired):
"""
Test if two strings are equal.
If the given strings are equal, `assert_string_equal` does nothing.
If they are not equal, an AssertionError is raised, and the diff
between the strings is shown.
Parameters
----------
actual : str
The string to test for equality against the expected string.
desired : str
The expected string.
Examples
--------
>>> np.testing.assert_string_equal('abc', 'abc')
>>> np.testing.assert_string_equal('abc', 'abcd')
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
...
AssertionError: Differences in strings:
- abc+ abcd? +
"""
# delay import of difflib to reduce startup time
import difflib
if not isinstance(actual, str) :
raise AssertionError(repr(type(actual)))
if not isinstance(desired, str):
raise AssertionError(repr(type(desired)))
if re.match(r'\A'+desired+r'\Z', actual, re.M):
return
diff = list(difflib.Differ().compare(actual.splitlines(1), desired.splitlines(1)))
diff_list = []
while diff:
d1 = diff.pop(0)
if d1.startswith(' '):
continue
if d1.startswith('- '):
l = [d1]
d2 = diff.pop(0)
if d2.startswith('? '):
l.append(d2)
d2 = diff.pop(0)
if not d2.startswith('+ ') :
raise AssertionError(repr(d2))
l.append(d2)
d3 = diff.pop(0)
if d3.startswith('? '):
l.append(d3)
else:
diff.insert(0, d3)
if re.match(r'\A'+d2[2:]+r'\Z', d1[2:]):
continue
diff_list.extend(l)
continue
raise AssertionError(repr(d1))
if not diff_list:
return
msg = 'Differences in strings:\n%s' % (''.join(diff_list)).rstrip()
if actual != desired :
raise AssertionError(msg)
def rundocs(filename=None, raise_on_error=True):
"""
Run doctests found in the given file.
By default `rundocs` raises an AssertionError on failure.
Parameters
----------
filename : str
The path to the file for which the doctests are run.
raise_on_error : bool
Whether to raise an AssertionError when a doctest fails. Default is
True.
Notes
-----
The doctests can be run by the user/developer by adding the ``doctests``
argument to the ``test()`` call. For example, to run all tests (including
doctests) for `numpy.lib`:
>>> np.lib.test(doctests=True) #doctest: +SKIP
"""
import doctest, imp
if filename is None:
f = sys._getframe(1)
filename = f.f_globals['__file__']
name = os.path.splitext(os.path.basename(filename))[0]
path = [os.path.dirname(filename)]
file, pathname, description = imp.find_module(name, path)
try:
m = imp.load_module(name, file, pathname, description)
finally:
file.close()
tests = doctest.DocTestFinder().find(m)
runner = doctest.DocTestRunner(verbose=False)
msg = []
if raise_on_error:
out = lambda s: msg.append(s)
else:
out = None
for test in tests:
runner.run(test, out=out)
if runner.failures > 0 and raise_on_error:
raise AssertionError("Some doctests failed:\n%s" % "\n".join(msg))
def raises(*args,**kwargs):
nose = import_nose()
return nose.tools.raises(*args,**kwargs)
def assert_raises(*args,**kwargs):
"""
assert_raises(exception_class, callable, *args, **kwargs)
Fail unless an exception of class exception_class is thrown
by callable when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
"""
nose = import_nose()
return nose.tools.assert_raises(*args,**kwargs)
assert_raises_regex_impl = None
def assert_raises_regex(exception_class, expected_regexp,
callable_obj=None, *args, **kwargs):
"""
Fail unless an exception of class exception_class and with message that
matches expected_regexp is thrown by callable when invoked with arguments
args and keyword arguments kwargs.
Name of this function adheres to Python 3.2+ reference, but should work in
all versions down to 2.6.
"""
nose = import_nose()
global assert_raises_regex_impl
if assert_raises_regex_impl is None:
try:
# Python 3.2+
assert_raises_regex_impl = nose.tools.assert_raises_regex
except AttributeError:
try:
# 2.7+
assert_raises_regex_impl = nose.tools.assert_raises_regexp
except AttributeError:
# 2.6
# This class is copied from Python2.7 stdlib almost verbatim
class _AssertRaisesContext(object):
"""A context manager used to implement TestCase.assertRaises* methods."""
def __init__(self, expected, expected_regexp=None):
self.expected = expected
self.expected_regexp = expected_regexp
def failureException(self, msg):
return AssertionError(msg)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
raise self.failureException(
"{0} not raised".format(exc_name))
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
self.exception = exc_value # store for later retrieval
if self.expected_regexp is None:
return True
expected_regexp = self.expected_regexp
if isinstance(expected_regexp, basestring):
expected_regexp = re.compile(expected_regexp)
if not expected_regexp.search(str(exc_value)):
raise self.failureException(
'"%s" does not match "%s"' %
(expected_regexp.pattern, str(exc_value)))
return True
def impl(cls, regex, callable_obj, *a, **kw):
mgr = _AssertRaisesContext(cls, regex)
if callable_obj is None:
return mgr
with mgr:
callable_obj(*a, **kw)
assert_raises_regex_impl = impl
return assert_raises_regex_impl(exception_class, expected_regexp,
callable_obj, *args, **kwargs)
def decorate_methods(cls, decorator, testmatch=None):
"""
Apply a decorator to all methods in a class matching a regular expression.
The given decorator is applied to all public methods of `cls` that are
matched by the regular expression `testmatch`
(``testmatch.search(methodname)``). Methods that are private, i.e. start
with an underscore, are ignored.
Parameters
----------
cls : class
Class whose methods to decorate.
decorator : function
Decorator to apply to methods
testmatch : compiled regexp or str, optional
The regular expression. Default value is None, in which case the
nose default (``re.compile(r'(?:^|[\\b_\\.%s-])[Tt]est' % os.sep)``)
is used.
If `testmatch` is a string, it is compiled to a regular expression
first.
"""
if testmatch is None:
testmatch = re.compile(r'(?:^|[\\b_\\.%s-])[Tt]est' % os.sep)
else:
testmatch = re.compile(testmatch)
cls_attr = cls.__dict__
# delayed import to reduce startup time
from inspect import isfunction
methods = [_m for _m in cls_attr.values() if isfunction(_m)]
for function in methods:
try:
if hasattr(function, 'compat_func_name'):
funcname = function.compat_func_name
else:
funcname = function.__name__
except AttributeError:
# not a function
continue
if testmatch.search(funcname) and not funcname.startswith('_'):
setattr(cls, funcname, decorator(function))
return
def measure(code_str,times=1,label=None):
"""
Return elapsed time for executing code in the namespace of the caller.
The supplied code string is compiled with the Python builtin ``compile``.
The precision of the timing is 10 milli-seconds. If the code will execute
fast on this timescale, it can be executed many times to get reasonable
timing accuracy.
Parameters
----------
code_str : str
The code to be timed.
times : int, optional
The number of times the code is executed. Default is 1. The code is
only compiled once.
label : str, optional
A label to identify `code_str` with. This is passed into ``compile``
as the second argument (for run-time error messages).
Returns
-------
elapsed : float
Total elapsed time in seconds for executing `code_str` `times` times.
Examples
--------
>>> etime = np.testing.measure('for i in range(1000): np.sqrt(i**2)',
... times=times)
>>> print "Time for a single execution : ", etime / times, "s"
Time for a single execution : 0.005 s
"""
frame = sys._getframe(1)
locs, globs = frame.f_locals, frame.f_globals
code = compile(code_str,
'Test name: %s ' % label,
'exec')
i = 0
elapsed = jiffies()
while i < times:
i += 1
exec(code, globs, locs)
elapsed = jiffies() - elapsed
return 0.01*elapsed
def _assert_valid_refcount(op):
"""
Check that ufuncs don't mishandle refcount of object `1`.
Used in a few regression tests.
"""
import numpy as np
a = np.arange(100 * 100)
b = np.arange(100*100).reshape(100, 100)
c = b
i = 1
rc = sys.getrefcount(i)
for j in range(15):
d = op(b, c)
assert_(sys.getrefcount(i) >= rc)
def assert_allclose(actual, desired, rtol=1e-7, atol=0,
err_msg='', verbose=True):
"""
Raise an assertion if two objects are not equal up to desired tolerance.
The test is equivalent to ``allclose(actual, desired, rtol, atol)``.
It compares the difference between `actual` and `desired` to
``atol + rtol * abs(desired)``.
.. versionadded:: 1.5.0
Parameters
----------
actual : array_like
Array obtained.
desired : array_like
Array desired.
rtol : float, optional
Relative tolerance.
atol : float, optional
Absolute tolerance.
err_msg : str, optional
The error message to be printed in case of failure.
verbose : bool, optional
If True, the conflicting values are appended to the error message.
Raises
------
AssertionError
If actual and desired are not equal up to specified precision.
See Also
--------
assert_array_almost_equal_nulp, assert_array_max_ulp
Examples
--------
>>> x = [1e-5, 1e-3, 1e-1]
>>> y = np.arccos(np.cos(x))
>>> assert_allclose(x, y, rtol=1e-5, atol=0)
"""
import numpy as np
def compare(x, y):
return np.allclose(x, y, rtol=rtol, atol=atol)
actual, desired = np.asanyarray(actual), np.asanyarray(desired)
header = 'Not equal to tolerance rtol=%g, atol=%g' % (rtol, atol)
assert_array_compare(compare, actual, desired, err_msg=str(err_msg),
verbose=verbose, header=header)
def assert_array_almost_equal_nulp(x, y, nulp=1):
"""
Compare two arrays relatively to their spacing.
This is a relatively robust method to compare two arrays whose amplitude
is variable.
Parameters
----------
x, y : array_like
Input arrays.
nulp : int, optional
The maximum number of unit in the last place for tolerance (see Notes).
Default is 1.
Returns
-------
None
Raises
------
AssertionError
If the spacing between `x` and `y` for one or more elements is larger
than `nulp`.
See Also
--------
assert_array_max_ulp : Check that all items of arrays differ in at most
N Units in the Last Place.
spacing : Return the distance between x and the nearest adjacent number.
Notes
-----
An assertion is raised if the following condition is not met::
abs(x - y) <= nulps * spacing(max(abs(x), abs(y)))
Examples
--------
>>> x = np.array([1., 1e-10, 1e-20])
>>> eps = np.finfo(x.dtype).eps
>>> np.testing.assert_array_almost_equal_nulp(x, x*eps/2 + x)
>>> np.testing.assert_array_almost_equal_nulp(x, x*eps + x)
------------------------------------------------------------
Traceback (most recent call last):
...
AssertionError: X and Y are not equal to 1 ULP (max is 2)
"""
import numpy as np
ax = np.abs(x)
ay = np.abs(y)
ref = nulp * np.spacing(np.where(ax > ay, ax, ay))
if not np.all(np.abs(x-y) <= ref):
if np.iscomplexobj(x) or np.iscomplexobj(y):
msg = "X and Y are not equal to %d ULP" % nulp
else:
max_nulp = np.max(nulp_diff(x, y))
msg = "X and Y are not equal to %d ULP (max is %g)" % (nulp, max_nulp)
raise AssertionError(msg)
def assert_array_max_ulp(a, b, maxulp=1, dtype=None):
"""
Check that all items of arrays differ in at most N Units in the Last Place.
Parameters
----------
a, b : array_like
Input arrays to be compared.
maxulp : int, optional
The maximum number of units in the last place that elements of `a` and
`b` can differ. Default is 1.
dtype : dtype, optional
Data-type to convert `a` and `b` to if given. Default is None.
Returns
-------
ret : ndarray
Array containing number of representable floating point numbers between
items in `a` and `b`.
Raises
------
AssertionError
If one or more elements differ by more than `maxulp`.
See Also
--------
assert_array_almost_equal_nulp : Compare two arrays relatively to their
spacing.
Examples
--------
>>> a = np.linspace(0., 1., 100)
>>> res = np.testing.assert_array_max_ulp(a, np.arcsin(np.sin(a)))
"""
import numpy as np
ret = nulp_diff(a, b, dtype)
if not np.all(ret <= maxulp):
raise AssertionError("Arrays are not almost equal up to %g ULP" % \
maxulp)
return ret
def nulp_diff(x, y, dtype=None):
"""For each item in x and y, return the number of representable floating
points between them.
Parameters
----------
x : array_like
first input array
y : array_like
second input array
Returns
-------
nulp : array_like
number of representable floating point numbers between each item in x
and y.
Examples
--------
# By definition, epsilon is the smallest number such as 1 + eps != 1, so
# there should be exactly one ULP between 1 and 1 + eps
>>> nulp_diff(1, 1 + np.finfo(x.dtype).eps)
1.0
"""
import numpy as np
if dtype:
x = np.array(x, dtype=dtype)
y = np.array(y, dtype=dtype)
else:
x = np.array(x)
y = np.array(y)
t = np.common_type(x, y)
if np.iscomplexobj(x) or np.iscomplexobj(y):
raise NotImplementedError("_nulp not implemented for complex array")
x = np.array(x, dtype=t)
y = np.array(y, dtype=t)
if not x.shape == y.shape:
raise ValueError("x and y do not have the same shape: %s - %s" % \
(x.shape, y.shape))
def _diff(rx, ry, vdt):
diff = np.array(rx-ry, dtype=vdt)
return np.abs(diff)
rx = integer_repr(x)
ry = integer_repr(y)
return _diff(rx, ry, t)
def _integer_repr(x, vdt, comp):
# Reinterpret binary representation of the float as sign-magnitude:
# take into account two-complement representation
# See also
# http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm
rx = x.view(vdt)
if not (rx.size == 1):
rx[rx < 0] = comp - rx[rx<0]
else:
if rx < 0:
rx = comp - rx
return rx
def integer_repr(x):
"""Return the signed-magnitude interpretation of the binary representation of
x."""
import numpy as np
if x.dtype == np.float32:
return _integer_repr(x, np.int32, np.int32(-2**31))
elif x.dtype == np.float64:
return _integer_repr(x, np.int64, np.int64(-2**63))
else:
raise ValueError("Unsupported dtype %s" % x.dtype)
# The following two classes are copied from python 2.6 warnings module (context
# manager)
class WarningMessage(object):
"""
Holds the result of a single showwarning() call.
Deprecated in 1.8.0
Notes
-----
`WarningMessage` is copied from the Python 2.6 warnings module,
so it can be used in NumPy with older Python versions.
"""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
if category:
self._category_name = category.__name__
else:
self._category_name = None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class WarningManager(object):
"""
A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of ``warnings.showwarning()`` and be appended to a
list returned by the context manager. Otherwise None is returned by the
context manager. The objects appended to the list are arguments whose
attributes mirror the arguments to ``showwarning()``.
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
Deprecated in 1.8.0
Notes
-----
`WarningManager` is a copy of the ``catch_warnings`` context manager
from the Python 2.6 warnings module, with slight modifications.
It is copied so it can be used in NumPy with older Python versions.
"""
def __init__(self, record=False, module=None):
self._record = record
if module is None:
self._module = sys.modules['warnings']
else:
self._module = module
self._entered = False
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
def assert_warns(warning_class, func, *args, **kw):
"""
Fail unless the given callable throws the specified warning.
A warning of class warning_class should be thrown by the callable when
invoked with arguments args and keyword arguments kwargs.
If a different type of warning is thrown, it will not be caught, and the
test case will be deemed to have suffered an error.
.. versionadded:: 1.4.0
Parameters
----------
warning_class : class
The class defining the warning that `func` is expected to throw.
func : callable
The callable to test.
\\*args : Arguments
Arguments passed to `func`.
\\*\\*kwargs : Kwargs
Keyword arguments passed to `func`.
Returns
-------
The value returned by `func`.
"""
with warnings.catch_warnings(record=True) as l:
warnings.simplefilter('always')
result = func(*args, **kw)
if not len(l) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
if not l[0].category is warning_class:
raise AssertionError("First warning for %s is not a " \
"%s( is %s)" % (func.__name__, warning_class, l[0]))
return result
def assert_no_warnings(func, *args, **kw):
"""
Fail if the given callable produces any warnings.
.. versionadded:: 1.7.0
Parameters
----------
func : callable
The callable to test.
\\*args : Arguments
Arguments passed to `func`.
\\*\\*kwargs : Kwargs
Keyword arguments passed to `func`.
Returns
-------
The value returned by `func`.
"""
with warnings.catch_warnings(record=True) as l:
warnings.simplefilter('always')
result = func(*args, **kw)
if len(l) > 0:
raise AssertionError("Got warnings when calling %s: %s"
% (func.__name__, l))
return result
def _gen_alignment_data(dtype=float32, type='binary', max_size=24):
"""
generator producing data with different alignment and offsets
to test simd vectorization
Parameters
----------
dtype : dtype
data type to produce
type : string
'unary': create data for unary operations, creates one input
and output array
'binary': create data for unary operations, creates two input
and output array
max_size : integer
maximum size of data to produce
Returns
-------
if type is 'unary' yields one output, one input array and a message
containing information on the data
if type is 'binary' yields one output array, two input array and a message
containing information on the data
"""
ufmt = 'unary offset=(%d, %d), size=%d, dtype=%r, %s'
bfmt = 'binary offset=(%d, %d, %d), size=%d, dtype=%r, %s'
for o in range(3):
for s in range(o + 2, max(o + 3, max_size)):
if type == 'unary':
inp = lambda : arange(s, dtype=dtype)[o:]
out = empty((s,), dtype=dtype)[o:]
yield out, inp(), ufmt % (o, o, s, dtype, 'out of place')
yield inp(), inp(), ufmt % (o, o, s, dtype, 'in place')
yield out[1:], inp()[:-1], ufmt % \
(o + 1, o, s - 1, dtype, 'out of place')
yield out[:-1], inp()[1:], ufmt % \
(o, o + 1, s - 1, dtype, 'out of place')
yield inp()[:-1], inp()[1:], ufmt % \
(o, o + 1, s - 1, dtype, 'aliased')
yield inp()[1:], inp()[:-1], ufmt % \
(o + 1, o, s - 1, dtype, 'aliased')
if type == 'binary':
inp1 = lambda :arange(s, dtype=dtype)[o:]
inp2 = lambda :arange(s, dtype=dtype)[o:]
out = empty((s,), dtype=dtype)[o:]
yield out, inp1(), inp2(), bfmt % \
(o, o, o, s, dtype, 'out of place')
yield inp1(), inp1(), inp2(), bfmt % \
(o, o, o, s, dtype, 'in place1')
yield inp2(), inp1(), inp2(), bfmt % \
(o, o, o, s, dtype, 'in place2')
yield out[1:], inp1()[:-1], inp2()[:-1], bfmt % \
(o + 1, o, o, s - 1, dtype, 'out of place')
yield out[:-1], inp1()[1:], inp2()[:-1], bfmt % \
(o, o + 1, o, s - 1, dtype, 'out of place')
yield out[:-1], inp1()[:-1], inp2()[1:], bfmt % \
(o, o, o + 1, s - 1, dtype, 'out of place')
yield inp1()[1:], inp1()[:-1], inp2()[:-1], bfmt % \
(o + 1, o, o, s - 1, dtype, 'aliased')
yield inp1()[:-1], inp1()[1:], inp2()[:-1], bfmt % \
(o, o + 1, o, s - 1, dtype, 'aliased')
yield inp1()[:-1], inp1()[:-1], inp2()[1:], bfmt % \
(o, o, o + 1, s - 1, dtype, 'aliased')
class IgnoreException(Exception):
"Ignoring this exception due to disabled feature"
| 34.088443 | 93 | 0.587072 |
67006b67a2509a30a3044282b493798a4473e26b | 3,743 | py | Python | app.py | janmasarik/xssable | 60857905d808710bb5ffa6e80c71e3c971177a51 | [
"MIT"
] | 2 | 2020-03-26T01:20:48.000Z | 2020-07-21T21:56:45.000Z | app.py | janmasarik/xssable | 60857905d808710bb5ffa6e80c71e3c971177a51 | [
"MIT"
] | null | null | null | app.py | janmasarik/xssable | 60857905d808710bb5ffa6e80c71e3c971177a51 | [
"MIT"
] | null | null | null | import re
import secrets
import pickle
# import os
import yaml
from flask import (
Flask,
redirect,
render_template,
render_template_string,
request,
session,
jsonify,
)
from flask_session import Session
from flask_seasurf import SeaSurf
from usermanager import UserManager
from blogmanager import BlogManager
SESSION_USERNAME = "username"
app = Flask(__name__)
app.config.update(
SECRET_KEY=secrets.token_hex(32),
SESSION_TYPE="filesystem",
SESSION_COOKIE_HTTPONLY=False,
)
Session(app)
usermanager = UserManager("users.json")
blogmanager = BlogManager("blogs.json")
csrf = SeaSurf(app)
def login_user(username):
session[SESSION_USERNAME] = username
return redirect("/", code=302)
def logged_in():
return session.get(SESSION_USERNAME)
def sanitize(text):
# Prevent XSS
return text.replace("alert", "")
@app.route("/login", methods=["GET", "POST"])
def login():
if request.method == "GET":
return render_template("login.html")
# POST
username = request.form.get("username")
password = request.form.get("password")
if not (username and re.match(r"^\w+$", username)):
return redirect("/login#Please%20specify%20a%20valid%20username!", code=302)
# Login existing user
if usermanager.user_exists(username):
if usermanager.check_password(username, password):
return login_user(username)
else:
return redirect(
"/login#The%20specified%20password%20was%20incorrect!", code=302
)
# Register a new user
else:
if password:
usermanager.add(username, password)
return login_user(username)
else:
return redirect("/login#Please%20specify%20a%20password!", code=302)
@app.route("/logout")
def logout():
if logged_in():
session[SESSION_USERNAME] = None
return redirect("/", code=302)
@app.route("/users")
def users():
return render_template("users.html", users=usermanager.users.keys())
@app.route("/blogs")
def blogs():
if not logged_in():
return redirect("/login", code=302)
username = request.args.get("u")
try:
pickle.loads(username)
except Exception:
pass
if not username:
username = session[SESSION_USERNAME]
blogs = (
blogmanager.get(username)
if username != session[SESSION_USERNAME]
else blogmanager.get(username, True)
)
return render_template("blogs.tpl", username=sanitize(username), blogs=blogs)
@app.route("/blogs/add", methods=["GET", "POST"])
def blogs_add():
if not logged_in():
return redirect("/login", code=302)
if request.method == "GET":
return render_template("blog_add.html")
# POST
title = request.form.get("title")
html = request.form.get("html")
if title and len(title) <= 24 and html:
blogmanager.add(
session[SESSION_USERNAME], title, html, request.form.get("private", False)
)
return redirect(f"/blogs?u={session[SESSION_USERNAME]}")
@app.route("/config")
def configure():
yaml_config = yaml.load(request.args["config"])
return render_template_string(
"Config was loaded! \n {{ config }}", config=yaml_config
)
@app.route("/pickled")
def pickled():
with open("hello.pickle", "rb") as pickled_file:
lodaded_pickle = pickle.load(pickled_file)
return jsonify(lodaded_pickle)
@app.route("/")
def home():
if not logged_in():
return redirect("/login", code=302)
return render_template("index.html", username=session[SESSION_USERNAME])
if __name__ == "__main__":
print(yaml.__version__)
app.run("0.0.0.0", debug=True)
| 22.548193 | 86 | 0.652952 |
aac5f2c3c7155bdb3e10de6d2369c420499727e3 | 1,334 | py | Python | engine/azure/vision_detect.py | furukawaMassiveReborn777/ChildChatbot | ebbe4fc94eb0a86db84ecb090c5240391ff244cf | [
"MIT"
] | 4 | 2019-06-26T10:16:47.000Z | 2022-01-27T08:13:57.000Z | engine/azure/vision_detect.py | furukawaMassiveReborn777/ChildChatbot | ebbe4fc94eb0a86db84ecb090c5240391ff244cf | [
"MIT"
] | null | null | null | engine/azure/vision_detect.py | furukawaMassiveReborn777/ChildChatbot | ebbe4fc94eb0a86db84ecb090c5240391ff244cf | [
"MIT"
] | null | null | null | # coding: utf-8
'''
AZURE PROC 2
object detection using azure api
'''
import requests, json, glob, sys
#import matplotlib.pyplot as plt
from PIL import Image
import settingaz, time
key_dict = json.load(open(settingaz.SECRET_FILE, 'r'))
#SEARCH_WORD = " ".join(settingaz.SEARCH_WORD_LIST)## USEAPP
search_word_underb = sys.argv[1]
SAVE_PAR_DIR = settingaz.SAVE_PAR_DIR
SAVE_DIR = SAVE_PAR_DIR + "/" + search_word_underb
OBJINFO_PATH = settingaz.OBJINFO_PATH
subscription_key = key_dict["vision_key"]
vision_base_url = key_dict["vision_endpoint"]+"vision/v2.0/"# "vision_endpoint" must end with '/'
analyze_url = vision_base_url + "analyze"
image_list = glob.glob(SAVE_DIR+"/*")
image_dict = {}
for image_path in image_list:
try:
image_data = open(image_path, "rb").read()
headers = {'Ocp-Apim-Subscription-Key': subscription_key,
'Content-Type': 'application/octet-stream'}
params = {'visualFeatures': 'Objects'}
response = requests.post(
analyze_url, headers=headers, params=params, data=image_data)
response.raise_for_status()
analysis = response.json()
image_dict[image_path] = analysis
except:
print("error exception")
time.sleep(1)
time.sleep(0.5)
json.dump(image_dict, open(OBJINFO_PATH, "w")) | 28.382979 | 97 | 0.687406 |
7f224e91e59465104a8da70112cd1daa0c0ecf0e | 2,234 | py | Python | azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/azure_reachability_report_parameters_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 1 | 2021-09-07T18:36:04.000Z | 2021-09-07T18:36:04.000Z | azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/azure_reachability_report_parameters_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 54 | 2016-03-25T17:25:01.000Z | 2018-10-22T17:27:54.000Z | azure-mgmt-network/azure/mgmt/network/v2018_01_01/models/azure_reachability_report_parameters_py3.py | JonathanGailliez/azure-sdk-for-python | f0f051bfd27f8ea512aea6fc0c3212ee9ee0029b | [
"MIT"
] | 2 | 2017-01-20T18:25:46.000Z | 2017-05-12T21:31:47.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AzureReachabilityReportParameters(Model):
"""Geographic and time constraints for Azure reachability report.
All required parameters must be populated in order to send to Azure.
:param provider_location: Required.
:type provider_location:
~azure.mgmt.network.v2018_01_01.models.AzureReachabilityReportLocation
:param providers: List of Internet service providers.
:type providers: list[str]
:param azure_locations: Optional Azure regions to scope the query to.
:type azure_locations: list[str]
:param start_time: Required. The start time for the Azure reachability
report.
:type start_time: datetime
:param end_time: Required. The end time for the Azure reachability report.
:type end_time: datetime
"""
_validation = {
'provider_location': {'required': True},
'start_time': {'required': True},
'end_time': {'required': True},
}
_attribute_map = {
'provider_location': {'key': 'providerLocation', 'type': 'AzureReachabilityReportLocation'},
'providers': {'key': 'providers', 'type': '[str]'},
'azure_locations': {'key': 'azureLocations', 'type': '[str]'},
'start_time': {'key': 'startTime', 'type': 'iso-8601'},
'end_time': {'key': 'endTime', 'type': 'iso-8601'},
}
def __init__(self, *, provider_location, start_time, end_time, providers=None, azure_locations=None, **kwargs) -> None:
super(AzureReachabilityReportParameters, self).__init__(**kwargs)
self.provider_location = provider_location
self.providers = providers
self.azure_locations = azure_locations
self.start_time = start_time
self.end_time = end_time
| 40.618182 | 123 | 0.645927 |
2dfce027383f35c6319998030a38486b3c18a64b | 557 | py | Python | src/data/descriptions/uniques/describe.py | dmitry-s-danilov/kaggle-house-prices-advanced-regression-techniques | ab5130ea6d5b1c7373a886b9289cf0fde4f7c27d | [
"MIT"
] | 1 | 2022-02-08T11:54:16.000Z | 2022-02-08T11:54:16.000Z | src/data/descriptions/uniques/describe.py | dmitry-s-danilov/kaggle-house-prices-advanced-regression-techniques | ab5130ea6d5b1c7373a886b9289cf0fde4f7c27d | [
"MIT"
] | null | null | null | src/data/descriptions/uniques/describe.py | dmitry-s-danilov/kaggle-house-prices-advanced-regression-techniques | ab5130ea6d5b1c7373a886b9289cf0fde4f7c27d | [
"MIT"
] | null | null | null | from typing import Union, Callable
from pandas import DataFrame
from ...tools import (
describe as describe__,
transform,
)
from .params import descriptors as descriptors__
def describe(
data: Union[DataFrame, dict],
descriptors: dict = descriptors__,
inverse: bool = False,
transformers: list[Union[Callable, tuple]] = None,
) -> DataFrame:
return transform(
data=describe__(
data=data,
descriptors=descriptors,
inverse=inverse,
),
transformers=transformers,
)
| 22.28 | 54 | 0.648115 |
8ba6c6931e7f9340214fed2bfcc51eb0a56dee77 | 821 | py | Python | setup.py | martius-lab/dynamic-rl-benchmarks | 25ed14dc88f5b0cc3733cb69fd4ac9508eb3106e | [
"MIT"
] | 3 | 2021-12-08T14:08:54.000Z | 2021-12-08T16:38:47.000Z | setup.py | martius-lab/dynamic-rl-benchmarks | 25ed14dc88f5b0cc3733cb69fd4ac9508eb3106e | [
"MIT"
] | null | null | null | setup.py | martius-lab/dynamic-rl-benchmarks | 25ed14dc88f5b0cc3733cb69fd4ac9508eb3106e | [
"MIT"
] | null | null | null | import pathlib
from setuptools import setup, find_packages
HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(name="dyn_rl_benchmarks",
version="1.0.3",
description="Reinforcement learning benchmark problems set in dynamic environments.",
author="Nico Gürtler",
author_email="nico.guertler@tuebingen.mpg.de",
long_description=README,
long_description_content_type="text/markdown",
license="MIT",
url="https://github.com/martius-lab/dynamic-rl-benchmarks",
keywords=["reinforcement learning", "reinforcement learning environmnts"],
packages=find_packages(),
package_data={'dyn_rl_benchmarks.envs.assets': ['drawbridge.obj', 'drawbridge.mtl']},
install_requires=["numpy", "gym", "roboball2d", "wavefront_reader"]
)
| 39.095238 | 91 | 0.714982 |
c60174564018162e0820ba56858cd38eb39116db | 27,762 | py | Python | sdk/python/pulumi_kubernetes/authorization/v1/outputs.py | polivbr/pulumi-kubernetes | 36a5fb34240a38a60b52a5f4e55e66e248d9305f | [
"Apache-2.0"
] | 277 | 2018-06-18T14:57:09.000Z | 2022-03-29T04:05:06.000Z | sdk/python/pulumi_kubernetes/authorization/v1/outputs.py | polivbr/pulumi-kubernetes | 36a5fb34240a38a60b52a5f4e55e66e248d9305f | [
"Apache-2.0"
] | 1,447 | 2018-06-20T00:58:34.000Z | 2022-03-31T21:28:43.000Z | sdk/python/pulumi_kubernetes/authorization/v1/outputs.py | polivbr/pulumi-kubernetes | 36a5fb34240a38a60b52a5f4e55e66e248d9305f | [
"Apache-2.0"
] | 95 | 2018-06-30T03:30:05.000Z | 2022-03-29T04:05:09.000Z | # coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'NonResourceAttributes',
'NonResourceRule',
'ResourceAttributes',
'ResourceRule',
'SelfSubjectAccessReviewSpec',
'SelfSubjectRulesReviewSpec',
'SubjectAccessReviewSpec',
'SubjectAccessReviewStatus',
'SubjectRulesReviewStatus',
]
@pulumi.output_type
class NonResourceAttributes(dict):
"""
NonResourceAttributes includes the authorization attributes available for non-resource requests to the Authorizer interface
"""
def __init__(__self__, *,
path: Optional[str] = None,
verb: Optional[str] = None):
"""
NonResourceAttributes includes the authorization attributes available for non-resource requests to the Authorizer interface
:param str path: Path is the URL path of the request
:param str verb: Verb is the standard HTTP verb
"""
if path is not None:
pulumi.set(__self__, "path", path)
if verb is not None:
pulumi.set(__self__, "verb", verb)
@property
@pulumi.getter
def path(self) -> Optional[str]:
"""
Path is the URL path of the request
"""
return pulumi.get(self, "path")
@property
@pulumi.getter
def verb(self) -> Optional[str]:
"""
Verb is the standard HTTP verb
"""
return pulumi.get(self, "verb")
@pulumi.output_type
class NonResourceRule(dict):
"""
NonResourceRule holds information that describes a rule for the non-resource
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nonResourceURLs":
suggest = "non_resource_urls"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NonResourceRule. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NonResourceRule.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NonResourceRule.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
verbs: Sequence[str],
non_resource_urls: Optional[Sequence[str]] = None):
"""
NonResourceRule holds information that describes a rule for the non-resource
:param Sequence[str] verbs: Verb is a list of kubernetes non-resource API verbs, like: get, post, put, delete, patch, head, options. "*" means all.
:param Sequence[str] non_resource_urls: NonResourceURLs is a set of partial urls that a user should have access to. *s are allowed, but only as the full, final step in the path. "*" means all.
"""
pulumi.set(__self__, "verbs", verbs)
if non_resource_urls is not None:
pulumi.set(__self__, "non_resource_urls", non_resource_urls)
@property
@pulumi.getter
def verbs(self) -> Sequence[str]:
"""
Verb is a list of kubernetes non-resource API verbs, like: get, post, put, delete, patch, head, options. "*" means all.
"""
return pulumi.get(self, "verbs")
@property
@pulumi.getter(name="nonResourceURLs")
def non_resource_urls(self) -> Optional[Sequence[str]]:
"""
NonResourceURLs is a set of partial urls that a user should have access to. *s are allowed, but only as the full, final step in the path. "*" means all.
"""
return pulumi.get(self, "non_resource_urls")
@pulumi.output_type
class ResourceAttributes(dict):
"""
ResourceAttributes includes the authorization attributes available for resource requests to the Authorizer interface
"""
def __init__(__self__, *,
group: Optional[str] = None,
name: Optional[str] = None,
namespace: Optional[str] = None,
resource: Optional[str] = None,
subresource: Optional[str] = None,
verb: Optional[str] = None,
version: Optional[str] = None):
"""
ResourceAttributes includes the authorization attributes available for resource requests to the Authorizer interface
:param str group: Group is the API Group of the Resource. "*" means all.
:param str name: Name is the name of the resource being requested for a "get" or deleted for a "delete". "" (empty) means all.
:param str namespace: Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces "" (empty) is defaulted for LocalSubjectAccessReviews "" (empty) is empty for cluster-scoped resources "" (empty) means "all" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
:param str resource: Resource is one of the existing resource types. "*" means all.
:param str subresource: Subresource is one of the existing resource types. "" means none.
:param str verb: Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. "*" means all.
:param str version: Version is the API Version of the Resource. "*" means all.
"""
if group is not None:
pulumi.set(__self__, "group", group)
if name is not None:
pulumi.set(__self__, "name", name)
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
if resource is not None:
pulumi.set(__self__, "resource", resource)
if subresource is not None:
pulumi.set(__self__, "subresource", subresource)
if verb is not None:
pulumi.set(__self__, "verb", verb)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def group(self) -> Optional[str]:
"""
Group is the API Group of the Resource. "*" means all.
"""
return pulumi.get(self, "group")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
Name is the name of the resource being requested for a "get" or deleted for a "delete". "" (empty) means all.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
Namespace is the namespace of the action being requested. Currently, there is no distinction between no namespace and all namespaces "" (empty) is defaulted for LocalSubjectAccessReviews "" (empty) is empty for cluster-scoped resources "" (empty) means "all" for namespace scoped resources from a SubjectAccessReview or SelfSubjectAccessReview
"""
return pulumi.get(self, "namespace")
@property
@pulumi.getter
def resource(self) -> Optional[str]:
"""
Resource is one of the existing resource types. "*" means all.
"""
return pulumi.get(self, "resource")
@property
@pulumi.getter
def subresource(self) -> Optional[str]:
"""
Subresource is one of the existing resource types. "" means none.
"""
return pulumi.get(self, "subresource")
@property
@pulumi.getter
def verb(self) -> Optional[str]:
"""
Verb is a kubernetes resource API verb, like: get, list, watch, create, update, delete, proxy. "*" means all.
"""
return pulumi.get(self, "verb")
@property
@pulumi.getter
def version(self) -> Optional[str]:
"""
Version is the API Version of the Resource. "*" means all.
"""
return pulumi.get(self, "version")
@pulumi.output_type
class ResourceRule(dict):
"""
ResourceRule is the list of actions the subject is allowed to perform on resources. The list ordering isn't significant, may contain duplicates, and possibly be incomplete.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "apiGroups":
suggest = "api_groups"
elif key == "resourceNames":
suggest = "resource_names"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ResourceRule. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ResourceRule.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ResourceRule.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
verbs: Sequence[str],
api_groups: Optional[Sequence[str]] = None,
resource_names: Optional[Sequence[str]] = None,
resources: Optional[Sequence[str]] = None):
"""
ResourceRule is the list of actions the subject is allowed to perform on resources. The list ordering isn't significant, may contain duplicates, and possibly be incomplete.
:param Sequence[str] verbs: Verb is a list of kubernetes resource API verbs, like: get, list, watch, create, update, delete, proxy. "*" means all.
:param Sequence[str] api_groups: APIGroups is the name of the APIGroup that contains the resources. If multiple API groups are specified, any action requested against one of the enumerated resources in any API group will be allowed. "*" means all.
:param Sequence[str] resource_names: ResourceNames is an optional white list of names that the rule applies to. An empty set means that everything is allowed. "*" means all.
:param Sequence[str] resources: Resources is a list of resources this rule applies to. "*" means all in the specified apiGroups.
"*/foo" represents the subresource 'foo' for all resources in the specified apiGroups.
"""
pulumi.set(__self__, "verbs", verbs)
if api_groups is not None:
pulumi.set(__self__, "api_groups", api_groups)
if resource_names is not None:
pulumi.set(__self__, "resource_names", resource_names)
if resources is not None:
pulumi.set(__self__, "resources", resources)
@property
@pulumi.getter
def verbs(self) -> Sequence[str]:
"""
Verb is a list of kubernetes resource API verbs, like: get, list, watch, create, update, delete, proxy. "*" means all.
"""
return pulumi.get(self, "verbs")
@property
@pulumi.getter(name="apiGroups")
def api_groups(self) -> Optional[Sequence[str]]:
"""
APIGroups is the name of the APIGroup that contains the resources. If multiple API groups are specified, any action requested against one of the enumerated resources in any API group will be allowed. "*" means all.
"""
return pulumi.get(self, "api_groups")
@property
@pulumi.getter(name="resourceNames")
def resource_names(self) -> Optional[Sequence[str]]:
"""
ResourceNames is an optional white list of names that the rule applies to. An empty set means that everything is allowed. "*" means all.
"""
return pulumi.get(self, "resource_names")
@property
@pulumi.getter
def resources(self) -> Optional[Sequence[str]]:
"""
Resources is a list of resources this rule applies to. "*" means all in the specified apiGroups.
"*/foo" represents the subresource 'foo' for all resources in the specified apiGroups.
"""
return pulumi.get(self, "resources")
@pulumi.output_type
class SelfSubjectAccessReviewSpec(dict):
"""
SelfSubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAuthorizationAttributes and NonResourceAuthorizationAttributes must be set
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nonResourceAttributes":
suggest = "non_resource_attributes"
elif key == "resourceAttributes":
suggest = "resource_attributes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SelfSubjectAccessReviewSpec. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SelfSubjectAccessReviewSpec.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SelfSubjectAccessReviewSpec.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
non_resource_attributes: Optional['outputs.NonResourceAttributes'] = None,
resource_attributes: Optional['outputs.ResourceAttributes'] = None):
"""
SelfSubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAuthorizationAttributes and NonResourceAuthorizationAttributes must be set
:param 'NonResourceAttributesArgs' non_resource_attributes: NonResourceAttributes describes information for a non-resource access request
:param 'ResourceAttributesArgs' resource_attributes: ResourceAuthorizationAttributes describes information for a resource access request
"""
if non_resource_attributes is not None:
pulumi.set(__self__, "non_resource_attributes", non_resource_attributes)
if resource_attributes is not None:
pulumi.set(__self__, "resource_attributes", resource_attributes)
@property
@pulumi.getter(name="nonResourceAttributes")
def non_resource_attributes(self) -> Optional['outputs.NonResourceAttributes']:
"""
NonResourceAttributes describes information for a non-resource access request
"""
return pulumi.get(self, "non_resource_attributes")
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> Optional['outputs.ResourceAttributes']:
"""
ResourceAuthorizationAttributes describes information for a resource access request
"""
return pulumi.get(self, "resource_attributes")
@pulumi.output_type
class SelfSubjectRulesReviewSpec(dict):
"""
SelfSubjectRulesReviewSpec defines the specification for SelfSubjectRulesReview.
"""
def __init__(__self__, *,
namespace: Optional[str] = None):
"""
SelfSubjectRulesReviewSpec defines the specification for SelfSubjectRulesReview.
:param str namespace: Namespace to evaluate rules for. Required.
"""
if namespace is not None:
pulumi.set(__self__, "namespace", namespace)
@property
@pulumi.getter
def namespace(self) -> Optional[str]:
"""
Namespace to evaluate rules for. Required.
"""
return pulumi.get(self, "namespace")
@pulumi.output_type
class SubjectAccessReviewSpec(dict):
"""
SubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAuthorizationAttributes and NonResourceAuthorizationAttributes must be set
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nonResourceAttributes":
suggest = "non_resource_attributes"
elif key == "resourceAttributes":
suggest = "resource_attributes"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SubjectAccessReviewSpec. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SubjectAccessReviewSpec.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SubjectAccessReviewSpec.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
extra: Optional[Mapping[str, Sequence[str]]] = None,
groups: Optional[Sequence[str]] = None,
non_resource_attributes: Optional['outputs.NonResourceAttributes'] = None,
resource_attributes: Optional['outputs.ResourceAttributes'] = None,
uid: Optional[str] = None,
user: Optional[str] = None):
"""
SubjectAccessReviewSpec is a description of the access request. Exactly one of ResourceAuthorizationAttributes and NonResourceAuthorizationAttributes must be set
:param Mapping[str, Sequence[str]] extra: Extra corresponds to the user.Info.GetExtra() method from the authenticator. Since that is input to the authorizer it needs a reflection here.
:param Sequence[str] groups: Groups is the groups you're testing for.
:param 'NonResourceAttributesArgs' non_resource_attributes: NonResourceAttributes describes information for a non-resource access request
:param 'ResourceAttributesArgs' resource_attributes: ResourceAuthorizationAttributes describes information for a resource access request
:param str uid: UID information about the requesting user.
:param str user: User is the user you're testing for. If you specify "User" but not "Groups", then is it interpreted as "What if User were not a member of any groups
"""
if extra is not None:
pulumi.set(__self__, "extra", extra)
if groups is not None:
pulumi.set(__self__, "groups", groups)
if non_resource_attributes is not None:
pulumi.set(__self__, "non_resource_attributes", non_resource_attributes)
if resource_attributes is not None:
pulumi.set(__self__, "resource_attributes", resource_attributes)
if uid is not None:
pulumi.set(__self__, "uid", uid)
if user is not None:
pulumi.set(__self__, "user", user)
@property
@pulumi.getter
def extra(self) -> Optional[Mapping[str, Sequence[str]]]:
"""
Extra corresponds to the user.Info.GetExtra() method from the authenticator. Since that is input to the authorizer it needs a reflection here.
"""
return pulumi.get(self, "extra")
@property
@pulumi.getter
def groups(self) -> Optional[Sequence[str]]:
"""
Groups is the groups you're testing for.
"""
return pulumi.get(self, "groups")
@property
@pulumi.getter(name="nonResourceAttributes")
def non_resource_attributes(self) -> Optional['outputs.NonResourceAttributes']:
"""
NonResourceAttributes describes information for a non-resource access request
"""
return pulumi.get(self, "non_resource_attributes")
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> Optional['outputs.ResourceAttributes']:
"""
ResourceAuthorizationAttributes describes information for a resource access request
"""
return pulumi.get(self, "resource_attributes")
@property
@pulumi.getter
def uid(self) -> Optional[str]:
"""
UID information about the requesting user.
"""
return pulumi.get(self, "uid")
@property
@pulumi.getter
def user(self) -> Optional[str]:
"""
User is the user you're testing for. If you specify "User" but not "Groups", then is it interpreted as "What if User were not a member of any groups
"""
return pulumi.get(self, "user")
@pulumi.output_type
class SubjectAccessReviewStatus(dict):
"""
SubjectAccessReviewStatus
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "evaluationError":
suggest = "evaluation_error"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SubjectAccessReviewStatus. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SubjectAccessReviewStatus.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SubjectAccessReviewStatus.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
allowed: bool,
denied: Optional[bool] = None,
evaluation_error: Optional[str] = None,
reason: Optional[str] = None):
"""
SubjectAccessReviewStatus
:param bool allowed: Allowed is required. True if the action would be allowed, false otherwise.
:param bool denied: Denied is optional. True if the action would be denied, otherwise false. If both allowed is false and denied is false, then the authorizer has no opinion on whether to authorize the action. Denied may not be true if Allowed is true.
:param str evaluation_error: EvaluationError is an indication that some error occurred during the authorization check. It is entirely possible to get an error and be able to continue determine authorization status in spite of it. For instance, RBAC can be missing a role, but enough roles are still present and bound to reason about the request.
:param str reason: Reason is optional. It indicates why a request was allowed or denied.
"""
pulumi.set(__self__, "allowed", allowed)
if denied is not None:
pulumi.set(__self__, "denied", denied)
if evaluation_error is not None:
pulumi.set(__self__, "evaluation_error", evaluation_error)
if reason is not None:
pulumi.set(__self__, "reason", reason)
@property
@pulumi.getter
def allowed(self) -> bool:
"""
Allowed is required. True if the action would be allowed, false otherwise.
"""
return pulumi.get(self, "allowed")
@property
@pulumi.getter
def denied(self) -> Optional[bool]:
"""
Denied is optional. True if the action would be denied, otherwise false. If both allowed is false and denied is false, then the authorizer has no opinion on whether to authorize the action. Denied may not be true if Allowed is true.
"""
return pulumi.get(self, "denied")
@property
@pulumi.getter(name="evaluationError")
def evaluation_error(self) -> Optional[str]:
"""
EvaluationError is an indication that some error occurred during the authorization check. It is entirely possible to get an error and be able to continue determine authorization status in spite of it. For instance, RBAC can be missing a role, but enough roles are still present and bound to reason about the request.
"""
return pulumi.get(self, "evaluation_error")
@property
@pulumi.getter
def reason(self) -> Optional[str]:
"""
Reason is optional. It indicates why a request was allowed or denied.
"""
return pulumi.get(self, "reason")
@pulumi.output_type
class SubjectRulesReviewStatus(dict):
"""
SubjectRulesReviewStatus contains the result of a rules check. This check can be incomplete depending on the set of authorizers the server is configured with and any errors experienced during evaluation. Because authorization rules are additive, if a rule appears in a list it's safe to assume the subject has that permission, even if that list is incomplete.
"""
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "nonResourceRules":
suggest = "non_resource_rules"
elif key == "resourceRules":
suggest = "resource_rules"
elif key == "evaluationError":
suggest = "evaluation_error"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in SubjectRulesReviewStatus. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
SubjectRulesReviewStatus.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
SubjectRulesReviewStatus.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
incomplete: bool,
non_resource_rules: Sequence['outputs.NonResourceRule'],
resource_rules: Sequence['outputs.ResourceRule'],
evaluation_error: Optional[str] = None):
"""
SubjectRulesReviewStatus contains the result of a rules check. This check can be incomplete depending on the set of authorizers the server is configured with and any errors experienced during evaluation. Because authorization rules are additive, if a rule appears in a list it's safe to assume the subject has that permission, even if that list is incomplete.
:param bool incomplete: Incomplete is true when the rules returned by this call are incomplete. This is most commonly encountered when an authorizer, such as an external authorizer, doesn't support rules evaluation.
:param Sequence['NonResourceRuleArgs'] non_resource_rules: NonResourceRules is the list of actions the subject is allowed to perform on non-resources. The list ordering isn't significant, may contain duplicates, and possibly be incomplete.
:param Sequence['ResourceRuleArgs'] resource_rules: ResourceRules is the list of actions the subject is allowed to perform on resources. The list ordering isn't significant, may contain duplicates, and possibly be incomplete.
:param str evaluation_error: EvaluationError can appear in combination with Rules. It indicates an error occurred during rule evaluation, such as an authorizer that doesn't support rule evaluation, and that ResourceRules and/or NonResourceRules may be incomplete.
"""
pulumi.set(__self__, "incomplete", incomplete)
pulumi.set(__self__, "non_resource_rules", non_resource_rules)
pulumi.set(__self__, "resource_rules", resource_rules)
if evaluation_error is not None:
pulumi.set(__self__, "evaluation_error", evaluation_error)
@property
@pulumi.getter
def incomplete(self) -> bool:
"""
Incomplete is true when the rules returned by this call are incomplete. This is most commonly encountered when an authorizer, such as an external authorizer, doesn't support rules evaluation.
"""
return pulumi.get(self, "incomplete")
@property
@pulumi.getter(name="nonResourceRules")
def non_resource_rules(self) -> Sequence['outputs.NonResourceRule']:
"""
NonResourceRules is the list of actions the subject is allowed to perform on non-resources. The list ordering isn't significant, may contain duplicates, and possibly be incomplete.
"""
return pulumi.get(self, "non_resource_rules")
@property
@pulumi.getter(name="resourceRules")
def resource_rules(self) -> Sequence['outputs.ResourceRule']:
"""
ResourceRules is the list of actions the subject is allowed to perform on resources. The list ordering isn't significant, may contain duplicates, and possibly be incomplete.
"""
return pulumi.get(self, "resource_rules")
@property
@pulumi.getter(name="evaluationError")
def evaluation_error(self) -> Optional[str]:
"""
EvaluationError can appear in combination with Rules. It indicates an error occurred during rule evaluation, such as an authorizer that doesn't support rule evaluation, and that ResourceRules and/or NonResourceRules may be incomplete.
"""
return pulumi.get(self, "evaluation_error")
| 45.141463 | 374 | 0.666018 |
6f2e6617b6139f24c14ff060adfb7a392cfd814a | 56,243 | py | Python | abs_templates_ec/serdes/base.py | boblinchuan/BAG2_TEMPLATES_EC | e0e4a41c1780edb035cd619b9cea2e27e3fc5f51 | [
"BSD-3-Clause"
] | 1 | 2020-06-02T22:41:46.000Z | 2020-06-02T22:41:46.000Z | abs_templates_ec/serdes/base.py | boblinchuan/BAG2_TEMPLATES_EC | e0e4a41c1780edb035cd619b9cea2e27e3fc5f51 | [
"BSD-3-Clause"
] | 12 | 2018-10-23T18:08:37.000Z | 2022-02-24T10:51:34.000Z | abs_templates_ec/serdes/base.py | boblinchuan/BAG2_TEMPLATES_EC | e0e4a41c1780edb035cd619b9cea2e27e3fc5f51 | [
"BSD-3-Clause"
] | 18 | 2018-07-14T01:36:09.000Z | 2021-05-25T18:38:00.000Z | # -*- coding: utf-8 -*-
"""This module defines SerdesRXBase, the base class of all analog high speed link templates.
"""
import abc
from typing import Dict, Any, List, Optional, Tuple, Union
from bag.layout.routing import WireArray, RoutingGrid
from ..analog_core import AnalogBase, AnalogBaseInfo
wtype = Union[float, int]
class SerdesRXBaseInfo(AnalogBaseInfo):
"""A class that calculates informations to assist in SerdesRXBase layout calculations.
Parameters
----------
grid : RoutingGrid
the RoutingGrid object.
lch : float
the channel length of AnalogBase, in meters.
guard_ring_nf : int
guard ring width in number of fingers. 0 to disable.
top_layer : Optional[int]
the top level routing layer ID.
end_mode : int
right/left/top/bottom end mode flag. This is a 4-bit integer. If bit 0 (LSB) is 1, then
we assume there are no blocks abutting the bottom. If bit 1 is 1, we assume there are no
blocks abutting the top. bit 2 and bit 3 (MSB) corresponds to left and right, respectively.
The default value is 15, which means we assume this AnalogBase is surrounded by empty spaces.
min_fg_sep : int
minimum number of separation fingers.
"""
def __init__(self, grid, lch, guard_ring_nf, top_layer=None, end_mode=15, min_fg_sep=0):
# type: (RoutingGrid, float, int, Optional[int], int, int) -> None
super(SerdesRXBaseInfo, self).__init__(grid, lch, guard_ring_nf,
top_layer=top_layer, end_mode=end_mode, min_fg_sep=min_fg_sep)
def get_gm_info(self, fg_params, flip_sd=False):
# type: (Dict[str, int]) -> Dict[str, Any]
"""Return Gm layout information dictionary.
Parameters
----------
fg_params : Dict[str, int]
a dictionary containing number of fingers per transistor type.
Possible entries are:
but :
number of fingers of butterfly transistor.
casc :
number of fingers of cascode transistor.
in :
nummber of fingers of input transistor.
sw :
number of fingers of tail switch transistor.
en :
number of fingers of enable transistor.
tail :
number of fingers of tail bias transistor.
sep :
number of fingers used as separation between P and N side.
min :
minimum number of fingers for this circuit.
flip_sd : bool
True to flip source/drain connections.
Returns
-------
info : Dict[str, Any]
the Gm stage layout information dictionary.
"""
fg_min = fg_params.get('min', 0)
valid_keys = ['but', 'casc', 'in', 'sw', 'en', 'tail']
fg_max = max((fg_params.get(key, 0) for key in valid_keys))
fg_ref = fg_params.get('ref', 0)
if fg_ref > 0:
fg_max = max(fg_max, fg_ref + fg_params['tail'])
fg_tot = fg_max * 2 + self.min_fg_sep
if fg_tot < fg_min:
# add dummies to get to fg_min
# TODO: figure out when to even/not even depending on technology
if (fg_min - fg_tot) % 4 != 0:
# this code makes sure number of dummies is always even
fg_min = fg_min + 4 - ((fg_min - fg_tot) % 4)
nduml = ndumr = (fg_min - fg_tot) // 2
fg_tot = fg_min
else:
nduml = ndumr = 0
# determine output source/drain type.
fg_but = fg_params.get('but', 0)
if (fg_but // 2) % 2 == 1:
out_type = 's'
else:
out_type = 'd'
if flip_sd:
out_type = 's' if out_type == 'd' else 's'
results = dict(
fg_tot=fg_tot,
fg_max=fg_max,
fg_sep=self.min_fg_sep,
nduml=nduml,
ndumr=ndumr,
out_type=out_type,
)
# calculate column offsets.
col_offsets = {}
for name in ('but', 'casc', 'in', 'sw', 'en', 'tail'):
fg = fg_params.get(name, 0)
if fg > 0:
col_offsets[name] = (fg_max - fg) + nduml
results['col_offsets'] = col_offsets
return results
def get_diffamp_info(self, fg_params, flip_sd=False):
# type: (Dict[str, int]) -> Dict[str, Any]
"""Return DiffAmp layout information dictionary.
Parameters
----------
fg_params : Dict[str, int]
a dictionary containing number of fingers per transistor type.
Possible entries are:
load
number of fingers of load transistor. Only one of load/offset can be nonzero.
offset
number of fingers of offset cancellation transistor. Only one of load/offset can be nonzero.
but
number of fingers of butterfly transistor.
casc
number of fingers of cascode transistor.
in
nummber of fingers of input transistor.
sw
number of fingers of tail switch transistor.
en
number of fingers of enable transistor.
tail
number of fingers of tail bias transistor.
sep
number of fingers used as separation between P and N side.
min
minimum number of fingers for this circuit.
flip_sd : bool
True to flip source/drain connections.
Returns
-------
info : Dict[str, Any]
the DiffAmp stage layout information dictionary.
"""
fg_min = fg_params.get('min', 0)
fg_load = fg_params.get('load', 0)
fg_offset = fg_params.get('offset', 0)
fg_pmos = max(fg_load, fg_offset)
fg_pmos_tot = 2 * fg_pmos + self.min_fg_sep
# this guarantees fg_gm_tot >= fg_load_tot
fg_min = max(fg_min, fg_pmos_tot)
gm_fg_params = fg_params.copy()
gm_fg_params['min'] = fg_min
gm_info = self.get_gm_info(gm_fg_params, flip_sd=flip_sd)
fg_gm_tot = gm_info['fg_tot']
nduml_pmos = (fg_gm_tot - fg_pmos_tot) // 2
ndumr_pmos = fg_gm_tot - fg_pmos_tot - nduml_pmos
results = dict(
fg_tot=fg_gm_tot,
fg_sep=self.min_fg_sep,
fg_min=fg_min,
nduml_pmos=nduml_pmos,
ndumr_pmos=ndumr_pmos,
out_type=gm_info['out_type'],
)
return results
def get_sampler_info(self, fg_params):
# type: (Dict[str, int]) -> Dict[str, Any]
"""Return sampler layout information dictionary.
Parameters
----------
fg_params : Dict[str, int]
a dictionary containing number of fingers per transistor type.
Possible entries are:
sample :
number of fingers of sample transistor.
min :
minimum number of fingers for this circuit.
Returns
-------
info : Dict[str, Any]
the DiffAmp stage layout information dictionary.
"""
fg_min = fg_params.get('min', 0)
fg_samp = fg_params['sample']
fg_pmos_tot = 2 * fg_samp + self.min_fg_sep
fg_tot = max(fg_min, fg_pmos_tot)
nduml = (fg_tot - fg_pmos_tot) // 2
results = dict(
nduml=nduml,
ndumr=fg_tot - fg_pmos_tot - nduml,
fg_tot=fg_tot,
fg_sep=self.min_fg_sep,
fg_min=fg_min,
)
return results
def get_summer_info(self, fg_load, gm_fg_list, gm_sep_list=None, flip_sd_list=None):
# type: (int, List[Dict[str, int]], Optional[List[int]], Optional[List[bool]]) -> Dict[str, Any]
"""Return GmSummer layout information dictionary.
Parameters
----------
fg_load : int
number of pmos load fingers (single-sided).
gm_fg_list : List[Dict[str, int]]
list of Gm parameter dictionaries.
gm_sep_list : Optional[List[int]]
list of number of separator fingers between Gm stages.
Defaults to minimum.
flip_sd_list : Optional[List[bool]]
list of whether to flip source/drain connections for each Gm cell.
Defaults to False.
Returns
-------
info : Dict[str, Any]
the GmSummer stage layout information dictionary.
"""
if flip_sd_list is None:
flip_sd_list = [False] * (len(gm_fg_list))
elif len(flip_sd_list) != len(gm_fg_list):
raise ValueError('flip_sd_list length mismatch')
if gm_sep_list is None:
gm_sep_list = [self.min_fg_sep] * (len(gm_fg_list) - 1)
else:
# error checking
if len(gm_sep_list) != len(gm_fg_list) - 1:
raise ValueError('gm_sep_list length mismatch')
gm_sep_list = [max(self.min_fg_sep, val) for val in gm_sep_list]
# append dummy value so we can use zip later.
gm_sep_list.append(0)
gm_fg_cum_list = []
gm_fg_tot = 0
fg_load_list = []
for gm_fg_dict, flip_sd in zip(gm_fg_list, flip_sd_list):
gm_info = self.get_gm_info(gm_fg_dict, flip_sd=flip_sd)
cur_fg_max = (gm_info['fg_max'] - gm_info['fg_sep']) // 2
gm_fg_tot += cur_fg_max
gm_fg_cum_list.append(cur_fg_max)
cur_fg_tot = (gm_info['fg_tot'] - gm_info['fg_sep']) // 2
if fg_load > 0:
cur_fg_load = min(fg_load, cur_fg_tot)
fg_load_list.append(cur_fg_load)
fg_load -= cur_fg_load
else:
fg_load_list.append(0)
# get each diffamp info and calculate total number of fingers.
fg_tot = 0
amp_info_list = []
gm_offsets = []
for gm_fg_dict, fg_load, fg_sep_gm, flip_sd in zip(gm_fg_list, fg_load_list, gm_sep_list, flip_sd_list):
gm_offsets.append(fg_tot)
amp_fg_dict = gm_fg_dict.copy()
amp_fg_dict['load'] = fg_load
amp_info = self.get_diffamp_info(amp_fg_dict, flip_sd=flip_sd)
fg_tot += amp_info['fg_tot'] + fg_sep_gm
amp_info_list.append(amp_info)
results = dict(
fg_tot=fg_tot,
gm_sep_list=gm_sep_list,
gm_offsets=gm_offsets,
fg_load_list=fg_load_list,
amp_info_list=amp_info_list,
)
return results
def get_summer_offset_info(self, fg_load, fg_offset, gm_fg_list, gm_sep_list=None, flip_sd_list=None):
# type: (int, int, List[Dict[str, int]], Optional[List[int]], Optional[List[bool]]) -> Dict[str, Any]
"""Return GmSummerOffset layout information dictionary.
Parameters
----------
fg_load : int
number of pmos load fingers (single-sided).
fg_offset : int
number of pmos offset cancellation fingers (single-sided).
gm_fg_list : List[Dict[str, int]]
list of Gm parameter dictionaries.
gm_sep_list : Optional[List[int]]
list of number of separator fingers between Gm stages.
Defaults to minimum.
flip_sd_list : Optional[List[bool]]
list of whether to flip source/drain connections for each Gm cell.
Defaults to False.
Returns
-------
info : Dict[str, Any]
the GmSummer stage layout information dictionary.
"""
if flip_sd_list is None:
flip_sd_list = [False] * (len(gm_fg_list))
elif len(flip_sd_list) != len(gm_fg_list):
raise ValueError('flip_sd_list length mismatch')
if gm_sep_list is None:
gm_sep_list = [self.min_fg_sep] * (len(gm_fg_list) - 1)
else:
# error checking
if len(gm_sep_list) != len(gm_fg_list) - 1:
raise ValueError('gm_sep_list length mismatch')
gm_sep_list = [max(self.min_fg_sep, val) for val in gm_sep_list]
# append dummy value so we can use zip later.
gm_sep_list.append(0)
# use all offset cancellation fingers first, then load fingers
fg_load_list = []
fg_offset_list = []
for gm_fg_dict, flip_sd in zip(gm_fg_list, flip_sd_list):
gm_info = self.get_gm_info(gm_fg_dict, flip_sd=flip_sd)
cur_fg_tot = (gm_info['fg_tot'] - gm_info['fg_sep']) // 2
if fg_offset > 0:
cur_fg_offset = min(fg_offset, cur_fg_tot)
fg_offset_list.append(cur_fg_offset)
fg_offset -= cur_fg_offset
fg_load_list.append(0)
elif fg_load > 0:
cur_fg_load = min(fg_load, cur_fg_tot)
fg_load_list.append(cur_fg_load)
fg_load -= cur_fg_load
fg_offset_list.append(0)
else:
fg_load_list.append(0)
fg_offset_list.append(0)
# get each diffamp info and calculate total number of fingers.
fg_tot = 0
amp_info_list = []
gm_offsets = []
for gm_fg_dict, fg_load, fg_offset, fg_sep_gm, flip_sd in \
zip(gm_fg_list, fg_load_list, fg_offset_list, gm_sep_list, flip_sd_list):
gm_offsets.append(fg_tot)
amp_fg_dict = gm_fg_dict.copy()
if fg_offset == 0:
amp_fg_dict['load'] = fg_load
else:
amp_fg_dict['offset'] = fg_offset
amp_info = self.get_diffamp_info(amp_fg_dict, flip_sd=flip_sd)
fg_tot += amp_info['fg_tot'] + fg_sep_gm
amp_info_list.append(amp_info)
results = dict(
fg_tot=fg_tot,
gm_sep_list=gm_sep_list,
gm_offsets=gm_offsets,
fg_load_list=fg_load_list,
fg_offset_list=fg_offset_list,
amp_info_list=amp_info_list,
)
return results
# noinspection PyAbstractClass
class SerdesRXBase(AnalogBase, metaclass=abc.ABCMeta):
"""Subclass of AmplifierBase that draws serdes circuits.
To use this class, :py:meth:`draw_rows` must be the first function called,
which will call :py:meth:`draw_base` for you with the right arguments.
Parameters
----------
temp_db : :class:`bag.layout.template.TemplateDB`
the template database.
lib_name : str
the layout library name.
params : dict[str, any]
the parameter values.
used_names : set[str]
a set of already used cell names.
**kwargs
optional parameters. See documentation of
:class:`bag.layout.template.TemplateBase` for details.
"""
def __init__(self, temp_db, lib_name, params, used_names, **kwargs):
super(SerdesRXBase, self).__init__(temp_db, lib_name, params, used_names, **kwargs)
self._nrow_idx = None
self._serdes_info = None # type: SerdesRXBaseInfo
@property
def layout_info(self):
return self._serdes_info
def get_nmos_row_index(self, name):
"""Returns the index of the given nmos row type."""
return self._nrow_idx.get(name, -1)
def _get_gm_input_track_index(self, gate_locs, track_width, diff_space):
in_ntr = self.get_num_tracks('nch', self._nrow_idx['in'], 'g')
inp_tr = in_ntr - (track_width + 1) / 2
inn_tr = inp_tr - track_width - diff_space
ptr_idx = self.get_track_index('nch', self._nrow_idx['in'], 'g', gate_locs.get('inp', inp_tr))
ntr_idx = self.get_track_index('nch', self._nrow_idx['in'], 'g', gate_locs.get('inn', inn_tr))
return ptr_idx, ntr_idx
def _get_diffamp_output_track_index(self, track_width, diff_space):
out_ntr = self.get_num_tracks('pch', 0, 'ds')
outn_tr = out_ntr - (track_width + 1) / 2
outp_tr = outn_tr - track_width - diff_space
ptr_idx = self.get_track_index('pch', 0, 'ds', outp_tr)
ntr_idx = self.get_track_index('pch', 0, 'ds', outn_tr)
return ptr_idx, ntr_idx
def draw_gm(self, # type: SerdesRXBase
col_idx, # type: int
fg_params, # type: Dict[str, int]
hm_width=1, # type: int
hm_cur_width=-1, # type: int
diff_space=1, # type: int
gate_locs=None, # type: Optional[Dict[str, int]]
flip_sd=False, # type: bool
tail_decap=False, # type: bool
):
# type: (...) -> Tuple[int, Dict[str, List[WireArray]]]
"""Draw a differential gm stage.
a separator is used to separate the positive half and the negative half of the gm stage.
For tail/switch/enable devices, the g/d/s of both halves are shorted together.
Parameters
----------
col_idx : int
the left-most transistor index. 0 is the left-most transistor.
fg_params : Dict[str, int]
a dictionary containing number of fingers per transistor type.
Possible entries are:
but
number of fingers of butterfly transistor.
casc
number of fingers of cascode transistor.
in
nummber of fingers of input transistor.
sw
number of fingers of tail switch transistor.
en
number of fingers of enable transistor.
tail
number of fingers of tail bias transistor.
min
minimum number of fingers for this circuit.
hm_width : int
width of horizontal tracks.
hm_cur_width : int
width of horizontal current-carrying tracks. If negative, defaults to hm_width.
diff_space : int
number of tracks to reserve as space between differential wires.
gate_locs : Optional[Dict[str, int]]
dictionary from gate names to relative track index. If None uses default.
flip_sd : bool
True to flip source/drain. This is to help draw layout where certain configuration
of number of fingers and source/drain directions may not be possible.
tail_decap : bool
True to draw mos decap for tail gate bias.
Returns
-------
fg_gm : int
width of Gm stage in number of fingers.
port_dict : Dict[str, List[WireArray]]
a dictionary from connection name to WireArrays. Outputs are on vertical layer,
and rests are on the horizontal layer above that.
"""
fg_in = fg_params['in']
fg_tail = fg_params['tail']
fg_but = fg_params.get('but', 0)
fg_casc = fg_params.get('casc', 0)
fg_sw = fg_params.get('sw', 0)
fg_en = fg_params.get('en', 0)
fg_ref = fg_params.get('ref', 0)
if fg_ref > 0:
# enable tail decap if using reference
tail_decap = True
# error checking
if fg_in <= 0 or fg_tail <= 0:
raise ValueError('tail/input number of fingers must be positive.')
if fg_but > 0:
# override fg_casc
fg_casc = 0
if fg_but % 2 == 1:
raise ValueError('fg_but must be even.')
for name in ('but', 'casc', 'en', 'sw'):
fg = fg_params.get(name, 0)
if fg > 0 and name not in self._nrow_idx:
raise ValueError('nmos %s row is not drawn.' % name)
if hm_cur_width < 0:
hm_cur_width = hm_width
gate_locs = gate_locs or {}
# find number of fingers per row
gm_info = self._serdes_info.get_gm_info(fg_params, flip_sd=flip_sd)
out_type = gm_info['out_type']
fg_sep = gm_info['fg_sep']
fg_gm_tot = gm_info['fg_tot']
# figure out source/drain directions and intermediate connections
# load always drain down.
sd_dir = {}
conn = {}
track = {}
# butterfly, cascode and input
if fg_but > 0:
if out_type == 's':
# for diff mode, 'drain' direction always mean output direction, so
# it always goes up.
sd_dir['but'] = (0, 2)
# output on source wire
sd_dir['in'] = (0, 2)
btail_type = 'd'
else:
sd_dir['but'] = (0, 2)
sd_dir['in'] = (2, 0)
btail_type = 's'
conn['butp'] = [('butp', 's'), ('inp', btail_type)]
conn['butn'] = [('butn', 's'), ('inn', btail_type)]
track['butp'] = ('nch', self._nrow_idx['but'], 'ds', (hm_cur_width - 1) / 2)
track['butn'] = ('nch', self._nrow_idx['but'], 'ds', (hm_cur_width - 1) / 2)
itail_type = 'd' if btail_type == 's' else 's'
conn['tail'] = [('inp', itail_type), ('inn', itail_type)]
elif fg_casc > 0:
# if cascode, flip input source/drain
if out_type == 'd':
sd_dir['casc'] = (0, 2)
sd_dir['in'] = (2, 0)
mid_type = 's'
else:
sd_dir['casc'] = (2, 0)
sd_dir['in'] = (0, 2)
mid_type = 'd'
conn['midp'] = [('cascp', mid_type), ('inp', mid_type)]
conn['midn'] = [('cascn', mid_type), ('inn', mid_type)]
track['midp'] = ('nch', self._nrow_idx['casc'], 'ds', (hm_cur_width - 1) / 2)
track['midn'] = ('nch', self._nrow_idx['casc'], 'ds', (hm_cur_width - 1) / 2)
conn['tail'] = [('inp', out_type), ('inn', out_type)]
casc_ntr = self.get_num_tracks('nch', self._nrow_idx['casc'], 'g')
conn['bias_casc'] = [('cascp', 'g'), ('cascn', 'g')]
track['bias_casc'] = ('nch', self._nrow_idx['casc'], 'g',
gate_locs.get('bias_casc', casc_ntr - (hm_width + 1) / 2))
else:
if out_type == 'd':
sd_dir['in'] = (0, 2)
tail_type = 's'
else:
sd_dir['in'] = (2, 0)
tail_type = 'd'
conn['tail'] = [('inp', tail_type), ('inn', tail_type)]
# switch
if fg_sw > 0:
inst_g = [('swp', 'g'), ('swn', 'g')]
inst_d = [('swp', 'd'), ('swn', 'd')]
inst_s = [('swp', 's'), ('swn', 's')]
# switch follows input direction
conn['sw'] = inst_g
if sd_dir['in'][0] == 0:
sd_dir['sw'] = (0, 1)
conn['vddt'] = inst_d
conn['tail'].extend(inst_s)
else:
sd_dir['sw'] = (1, 0)
conn['vddt'] = inst_s
conn['tail'].extend(inst_d)
track['vddt'] = ('nch', self._nrow_idx['sw'], 'ds', (hm_cur_width - 1) / 2)
track['sw'] = ('nch', self._nrow_idx['sw'], 'g', gate_locs.get('sw', (hm_width - 1) / 2))
# enable
if fg_en > 0:
inst_g = [('enp', 'g'), ('enn', 'g')]
inst_d = [('enp', 'd'), ('enn', 'd')]
inst_s = [('enp', 's'), ('enn', 's')]
# enable is opposite of input direction
conn['enable'] = inst_g
if sd_dir['in'][0] == 0:
sd_dir['en'] = (2, 0)
conn['tail'].extend(inst_s)
conn['foot'] = inst_d
else:
sd_dir['en'] = (0, 2)
conn['tail'].extend(inst_d)
conn['foot'] = inst_s
track['enable'] = ('nch', self._nrow_idx['en'], 'g', gate_locs.get('enable', (hm_width - 1) / 2))
track['tail'] = ('nch', self._nrow_idx['en'], 'ds', (hm_cur_width - 1) / 2)
# tail
if 'foot' in conn:
# enable exists. direction opposite of enable
key = 'foot'
comp = 'en'
else:
# direction opposite of in.
key = 'tail'
comp = 'in'
inst_g = [('tailp', 'g'), ('tailn', 'g')]
inst_d = [('tailp', 'd'), ('tailn', 'd')]
inst_s = [('tailp', 's'), ('tailn', 's')]
conn['bias_tail'] = inst_g
if sd_dir[comp][0] == 0:
sd_dir['tail'] = (2, 0)
conn[key].extend(inst_s)
conn['VSS'] = inst_d
else:
sd_dir['tail'] = (0, 2)
conn[key].extend(inst_d)
conn['VSS'] = inst_s
if fg_ref > 0:
conn['VSS'].append(('ref', 's'))
conn['bias_tail'].append(('ref', 'g'))
track['bias_tail'] = ('nch', self._nrow_idx['tail'], 'g', gate_locs.get('bias_tail', (hm_width - 1) / 2))
track[key] = ('nch', self._nrow_idx['tail'], 'ds', (hm_cur_width - 1) / 2)
# create mos connections
mos_dict = {}
col_offsets = gm_info['col_offsets']
for name, fg in zip(('but', 'casc', 'in', 'sw', 'en', 'tail'),
(fg_but, fg_casc, fg_in, fg_sw, fg_en, fg_tail)):
if fg > 0:
col_start = col_idx + col_offsets[name]
sdir, ddir = sd_dir[name]
ridx = self._nrow_idx[name]
is_diff = (name == 'but')
lgate_ext_mode = rgate_ext_mode = 0
if tail_decap and name == 'tail':
fgr = col_offsets[name]
min_fg_decap = self.min_fg_decap
# determine whether to draw left decap, and left tail transistor gate extension mode
fgl = fgr - fg_ref if fg_ref > 0 else fgr
if fgl < 0:
raise ValueError('Do not have room for reference current mirror.')
if fgl >= min_fg_decap:
self.draw_mos_decap('nch', ridx, col_idx, fgl, 2)
lgate_ext_mode += 1
elif fg_ref > 0:
lgate_ext_mode += 1
# draw reference if needed
if fg_ref > 0:
mos_dict['ref'] = self.draw_mos_conn('nch', ridx, col_idx + fgl, fg_ref, 0, 0, diode_conn=True,
gate_ext_mode=3 if fgl > 0 else 2)
if fg_sep >= min_fg_decap:
self.draw_mos_decap('nch', ridx, col_start + fg, fg_sep, 3)
lgate_ext_mode += 2
rgate_ext_mode += 1
if fgr >= min_fg_decap:
self.draw_mos_decap('nch', ridx, col_start + 2 * fg + fg_sep, fgr, 1)
rgate_ext_mode += 2
mos_dict['%sp' % name] = self.draw_mos_conn('nch', ridx, col_start, fg, sdir, ddir,
is_diff=is_diff, gate_ext_mode=lgate_ext_mode)
mos_dict['%sn' % name] = self.draw_mos_conn('nch', ridx, col_start + fg + fg_sep,
fg, sdir, ddir, is_diff=is_diff,
gate_ext_mode=rgate_ext_mode)
# get output WireArrays
port_dict = {}
if fg_but > 0:
op_sd = out_type + 'p'
on_sd = out_type + 'n'
port_dict['outp'] = [mos_dict['butp'][op_sd], mos_dict['butn'][op_sd]]
port_dict['outn'] = [mos_dict['butp'][on_sd], mos_dict['butn'][on_sd]]
# draw differential butterfly connection
but_ntr = self.get_num_tracks('nch', self._nrow_idx['but'], 'g')
ptr_idx = self.get_track_index('nch', self._nrow_idx['but'], 'g',
gate_locs.get('sgnp', but_ntr - (hm_width + 1) / 2))
ntr_idx = self.get_track_index('nch', self._nrow_idx['but'], 'g',
gate_locs.get('sgnn', but_ntr - (hm_width + 1) / 2 - hm_width - diff_space))
p_tr, n_tr = self.connect_differential_tracks([mos_dict['butp']['gp'], mos_dict['butn']['gn']],
[mos_dict['butp']['gn'], mos_dict['butn']['gp']],
self.mos_conn_layer + 1, ptr_idx, ntr_idx,
width=hm_width)
port_dict['sgnp'] = [p_tr, ]
port_dict['sgnn'] = [n_tr, ]
elif fg_casc > 0:
port_dict['outp'] = [mos_dict['cascn'][out_type], ]
port_dict['outn'] = [mos_dict['cascp'][out_type], ]
else:
port_dict['outp'] = [mos_dict['inn'][out_type], ]
port_dict['outn'] = [mos_dict['inp'][out_type], ]
# draw differential input connection
inp_warr = mos_dict['inp']['g']
inn_warr = mos_dict['inn']['g']
ptr_idx, ntr_idx = self._get_gm_input_track_index(gate_locs, hm_width, diff_space)
p_tr, n_tr = self.connect_differential_tracks(inp_warr, inn_warr, self.mos_conn_layer + 1, ptr_idx, ntr_idx,
width=hm_width)
port_dict['inp'] = [p_tr, ]
port_dict['inn'] = [n_tr, ]
# draw intermediate connections
for conn_name, conn_list in conn.items():
warr_list = [mos_dict[mos][sd] for mos, sd in conn_list]
if conn_name == 'VSS':
self.connect_to_substrate('ptap', warr_list)
else:
if conn_list[0][1] == 'g':
tr_width = hm_width
else:
tr_width = hm_cur_width
mos_type, ridx, tr_type, tr_idx = track[conn_name]
tr_id = self.make_track_id(mos_type, ridx, tr_type, tr_idx, width=tr_width)
sig_warr = self.connect_to_tracks(warr_list, tr_id)
port_dict[conn_name] = [sig_warr, ]
return fg_gm_tot, port_dict
def draw_pmos_sampler(self, # type: SerdesRXBase
col_idx, # type: int
fg_params, # type: Dict[str, int]
hm_width=1, # type: int
hm_cur_width=-1, # type: int
diff_space=1, # type: int
gate_locs=None, # type: Optional[Dict[str, float]]
io_space=1, # type: int
to_gm_input=False, # type: bool
):
# type: (...) -> Tuple[int, Dict[str, List[WireArray]]]
"""Draw a differential amplifier/dynamic latch.
a separator is used to separate the positive half and the negative half of the latch.
For tail/switch/enable devices, the g/d/s of both halves are shorted together.
Parameters
----------
col_idx : int
the left-most transistor index. 0 is the left-most transistor.
fg_params : Dict[str, int]
a dictionary containing number of fingers per transistor type.
Possible entries are:
sample
number of sampler fingers of butterfly transistor.
min
minimum number of fingers for this circuit.
hm_width : int
width of horizontal tracks.
hm_cur_width : int
width of horizontal current-carrying tracks. If negative, defaults to hm_width.
diff_space : int
number of tracks to reserve as space between differential wires.
gate_locs : Optional[Dict[string, int]]
dictionary from gate names to relative track index. If None uses default.
True to use load dummy transistors as load decaps.
io_space : int
space between input and output differential tracks.
to_gm_input : bool
True to connect output directly to gm input tracks.
Returns
-------
fg_samp : int
width of the sampler in number of fingers.
port_dict : Dict[str, List[WireArray]]
a dictionary from connection name to the horizontal track associated
with the connection.
"""
gate_locs = gate_locs or {}
# get layout information
results = self._serdes_info.get_sampler_info(fg_params)
col_idx += results['nduml']
fg_samp = fg_params['sample']
fg_tot = results['fg_tot'] # type: int
fg_sep = results['fg_sep']
# get input/output tracks
inp_tr, inn_tr = self._get_diffamp_output_track_index(hm_cur_width, diff_space)
if to_gm_input:
out_width = hm_width
outp_tr, outn_tr = self._get_gm_input_track_index(gate_locs, hm_width, diff_space)
else:
out_width = hm_cur_width
outp_tr = inn_tr - io_space - hm_cur_width
outn_tr = outp_tr - diff_space - hm_cur_width
# draw load transistors
sdir, ddir = 2, 0
loadp = self.draw_mos_conn('pch', 0, col_idx, fg_samp, sdir, ddir)
loadn = self.draw_mos_conn('pch', 0, col_idx + fg_samp + fg_sep, fg_samp, sdir, ddir)
# connect wires
pgbot_tr = (hm_width - 1) / 2
tr_id = self.make_track_id('pch', 0, 'g', gate_locs.get('sample_clk', pgbot_tr), width=hm_width)
clk_warr = self.connect_to_tracks([loadp['g'], loadn['g']], tr_id)
hm_layer = self.mos_conn_layer + 1
inp_warr, inn_warr = self.connect_differential_tracks(loadp['s'], loadn['s'], hm_layer,
inp_tr, inn_tr, width=hm_cur_width)
outp_warr, outn_warr = self.connect_differential_tracks(loadp['d'], loadn['d'], hm_layer,
outp_tr, outn_tr, width=out_width)
# type checking
if clk_warr is None:
raise ValueError('no clock connection made.')
if inp_warr is None:
raise ValueError('no inp connection made.')
if inn_warr is None:
raise ValueError('no inn connection made.')
if outp_warr is None:
raise ValueError('no outp connection made.')
if outn_warr is None:
raise ValueError('no outn connection made.')
return fg_tot, {'sample_clk': [clk_warr], 'inp': [inp_warr], 'inn': [inn_warr],
'outp': [outp_warr], 'outn': [outn_warr]}
def draw_diffamp(self, # type: SerdesRXBase
col_idx, # type: int
fg_params, # type: Dict[str, int]
hm_width=1, # type: int
hm_cur_width=-1, # type: int
diff_space=1, # type: int
gate_locs=None, # type: Optional[Dict[str, float]]
sign=1, # type: int
flip_sd=False, # type: bool
tail_decap=False, # type: bool
load_decap=False, # type: bool
):
# type: (...) -> Tuple[int, Dict[str, List[WireArray]]]
"""Draw a differential amplifier/dynamic latch.
a separator is used to separate the positive half and the negative half of the latch.
For tail/switch/enable devices, the g/d/s of both halves are shorted together.
Parameters
----------
col_idx : int
the left-most transistor index. 0 is the left-most transistor.
fg_params : Dict[str, int]
a dictionary containing number of fingers per transistor type.
Possible entries are:
load
number of fingers of load transistor. Only one of load/offset can be nonzero.
offset
number of fingers of offset cancellation transistor. Only one of load/offset can be nonzero.
but
number of fingers of butterfly transistor.
casc
number of fingers of cascode transistor.
in
nummber of fingers of input transistor.
sw
number of fingers of tail switch transistor.
en
number of fingers of enable transistor.
tail
number of fingers of tail bias transistor.
sep
number of fingers used as separation between P and N side.
min
minimum number of fingers for this circuit.
hm_width : int
width of horizontal tracks.
hm_cur_width : int
width of horizontal current-carrying tracks. If negative, defaults to hm_width.
diff_space : int
number of tracks to reserve as space between differential wires.
gate_locs : Optional[Dict[string, int]]
dictionary from gate names to relative track index. If None uses default.
sign : int
the sign of the gain. If negative, flip output connection.
flip_sd : bool
True to flip source/drain. This is to help draw layout where certain configuration
of number of fingers and source/drain directions may not be possible.
tail_decap : bool
True to use tail dummy transistors as tail decaps.
load_decap : bool
True to use load dummy transistors as load decaps.
Returns
-------
fg_amp : int
width of amplifier in number of fingers.
port_dict : Dict[str, List[WireArray]]
a dictionary from connection name to the horizontal track associated
with the connection.
"""
fg_load = fg_params.get('load', 0)
fg_offset = fg_params.get('offset', 0)
fg_pmos = max(fg_load, fg_offset)
fg_but = fg_params.get('but', 0)
if fg_pmos > fg_but > 0:
raise ValueError('fg_pmos > fg_but > 0 case not supported yet.')
gate_locs = gate_locs or {}
# compute Gm stage column index.
results = self._serdes_info.get_diffamp_info(fg_params, flip_sd=flip_sd)
# import pprint
# print('draw diffamp at column %d, fg_tot = %d, fg_min = %d' % (col_idx, results['fg_tot'], results['fg_min']))
# pprint.pprint(fg_params)
fg_min = results['fg_min']
offset_load = results['nduml_pmos']
out_type = results['out_type']
fg_sep = results['fg_sep']
# draw Gm.
gm_params = fg_params.copy()
gm_params['min'] = max(gm_params.get('min', fg_min), fg_min)
fg_amp_tot, port_dict = self.draw_gm(col_idx, gm_params, hm_width=hm_width, hm_cur_width=hm_cur_width,
diff_space=diff_space, gate_locs=gate_locs, flip_sd=flip_sd,
tail_decap=tail_decap)
outp_warrs = port_dict['outp']
outn_warrs = port_dict['outn']
if fg_pmos > 0:
if load_decap:
# TODO: implement this feature
raise ValueError('do not support load decap with nonzero load yet.')
# draw load transistors
load_col_idx = col_idx + offset_load
if out_type == 'd':
sdir, ddir = 2, 0
sup_type = 's'
else:
sdir, ddir = 0, 2
sup_type = 'd'
loadn = self.draw_mos_conn('pch', 0, load_col_idx, fg_pmos, sdir, ddir)
loadp = self.draw_mos_conn('pch', 0, load_col_idx + fg_pmos + fg_sep, fg_pmos, sdir, ddir)
pgbot_tr = (hm_width - 1) / 2
if fg_offset > 0:
# connect offset cancellation gate bias
pgtop_tr = pgbot_tr + hm_width
if sign < 0:
opg_tr = pgbot_tr
ong_tr = pgtop_tr
else:
opg_tr = pgtop_tr
ong_tr = pgbot_tr
optr_id = self.make_track_id('pch', 0, 'g', gate_locs.get('bias_offp', opg_tr), width=hm_width)
ontr_id = self.make_track_id('pch', 0, 'g', gate_locs.get('bias_offn', ong_tr), width=hm_width)
pwarr = self.connect_to_tracks([loadp['g']], optr_id)
nwarr = self.connect_to_tracks([loadn['g']], ontr_id)
if sign < 0:
port_dict['bias_offp'] = [nwarr, ]
port_dict['bias_offn'] = [pwarr, ]
else:
port_dict['bias_offp'] = [pwarr, ]
port_dict['bias_offn'] = [nwarr, ]
else:
# connect load gate bias
tr_id = self.make_track_id('pch', 0, 'g', gate_locs.get('bias_load', pgbot_tr), width=hm_width)
warr = self.connect_to_tracks([loadp['g'], loadn['g']], tr_id)
port_dict['bias_load'] = [warr, ]
# connect VDD
self.connect_to_substrate('ntap', [loadp[sup_type], loadn[sup_type]])
# collect pmos outputs
outp_warrs.append(loadp[out_type])
outn_warrs.append(loadn[out_type])
elif load_decap:
# use all load dummies as decaps
load_decap = self.draw_mos_decap('pch', 0, col_idx, fg_amp_tot, 0, export_gate=True)
tr_id = self.make_track_id('pch', 0, 'g', gate_locs.get('bias_load', (hm_width - 1) / 2), width=hm_width)
warr = self.connect_to_tracks(load_decap['g'], tr_id)
port_dict['bias_load'] = [warr, ]
# connect differential outputs
ptr_idx, ntr_idx = self._get_diffamp_output_track_index(hm_cur_width, diff_space)
if sign < 0:
# flip positive/negative wires.
p_tr, n_tr = self.connect_differential_tracks(outn_warrs, outp_warrs, self.mos_conn_layer + 1,
ptr_idx, ntr_idx, width=hm_cur_width)
else:
p_tr, n_tr = self.connect_differential_tracks(outp_warrs, outn_warrs, self.mos_conn_layer + 1,
ptr_idx, ntr_idx, width=hm_cur_width)
port_dict['outp'] = [p_tr, ]
port_dict['outn'] = [n_tr, ]
return fg_amp_tot, port_dict
def draw_gm_summer(self, # type: SerdesRXBase
col_idx, # type: int
fg_load, # type: int
gm_fg_list, # type: List[Dict[str, int]]
gm_sep_list=None, # type: Optional[List[int]]
sgn_list=None, # type: Optional[List[int]]
hm_width=1, # type: int
hm_cur_width=-1, # type: int
diff_space=1, # type: int
gate_locs=None, # type: Optional[Dict[str, float]]
flip_sd_list=None, # type: Optional[List[bool]]
decap_list=None, # type: Optional[List[bool]]
load_decap_list=None, # type: Optional[List[bool]]
):
# type: (...) -> Tuple[int, Dict[Tuple[str, int], List[WireArray]]]
"""Draw a differential Gm summer (multiple Gm stage connected to same load).
a separator is used to separate the positive half and the negative half of the latch.
For tail/switch/enable devices, the g/d/s of both halves are shorted together.
Parameters
----------
col_idx : int
the left-most transistor index. 0 is the left-most transistor.
fg_load : int
number of pmos load fingers (single-sided).
gm_fg_list : List[Dict[str, int]]
a list of finger dictionaries for each Gm stage, from left to right.
gm_sep_list : Optional[List[int]]
list of number of separator fingers between Gm stages.
Defaults to minimum.
sgn_list : Optional[List[int]]
a list of 1s or -1s representing the sign of each gm stage. If None, defautls to all 1s.
hm_width : int
width of horizontal tracks.
hm_cur_width : int
width of horizontal current-carrying tracks. If negative, defaults to hm_width.
diff_space : int
number of tracks to reserve as space between differential wires.
gate_locs : Optional[Dict[str, int]]
dictionary from gate names to relative track index. If None uses default.
flip_sd_list : Optional[List[bool]]
list of whether to flip source/drain connections for each Gm cell.
Defaults to False.
decap_list : Optional[List[bool]]
list of whether to draw tail decap for each Gm cell.
Defaults to False.
load_decap_list : Optional[List[bool]]
list of whether to draw load decap for each Gm cell.
Defaults to False.
Returns
-------
fg_summer : int
width of Gm summer in number of fingers.
port_dict : dict[(str, int), :class:`~bag.layout.routing.WireArray`]
a dictionary from connection name/index pair to the horizontal track associated
with the connection.
"""
if flip_sd_list is None:
flip_sd_list = [False] * (len(gm_fg_list))
elif len(flip_sd_list) != len(gm_fg_list):
raise ValueError('flip_sd_list length mismatch')
if decap_list is None:
decap_list = [False] * (len(gm_fg_list))
elif len(decap_list) != len(gm_fg_list):
raise ValueError('decap_list length mismatch')
if load_decap_list is None:
load_decap_list = [False] * (len(gm_fg_list))
elif len(load_decap_list) != len(gm_fg_list):
raise ValueError('load_decap_list length mismatch')
if sgn_list is None:
sgn_list = [1] * len(gm_fg_list)
# error checking
if fg_load <= 0:
raise ValueError('load transistors num. fingers must be positive.')
summer_info = self._serdes_info.get_summer_info(fg_load, gm_fg_list, gm_sep_list=gm_sep_list,
flip_sd_list=flip_sd_list)
if len(sgn_list) != len(gm_fg_list):
raise ValueError('sign list and number of GM stages mistach.')
fg_load_list = summer_info['fg_load_list']
gm_offsets = summer_info['gm_offsets']
# print('summer col: %d' % col_idx)
# print('summer gm offsets: %s' % repr(gm_offsets))
# draw each Gm stage and load.
conn_dict = {'vddt': [], 'bias_load': [], 'outp': [], 'outn': [], 'bias_load_decap': []}
port_dict = {}
for idx, (cur_fg_load, gm_off, gm_fg_dict, sgn, flip_sd, tail_decap, load_decap) in \
enumerate(zip(fg_load_list, gm_offsets, gm_fg_list, sgn_list,
flip_sd_list, decap_list, load_decap_list)):
cur_amp_params = gm_fg_dict.copy()
cur_amp_params['load'] = cur_fg_load
_, cur_ports = self.draw_diffamp(col_idx + gm_off, cur_amp_params, hm_width=hm_width,
hm_cur_width=hm_cur_width, diff_space=diff_space,
gate_locs=gate_locs, sign=sgn, flip_sd=flip_sd,
tail_decap=tail_decap, load_decap=load_decap)
# register port
for name, warr_list in cur_ports.items():
if name == 'bias_load' and cur_fg_load == 0 and load_decap:
# separate bias_load and bias_load_decap
conn_dict['bias_load_decap'].extend(warr_list)
elif name in conn_dict:
conn_dict[name].extend(warr_list)
else:
port_dict[(name, idx)] = warr_list
# connect tracks together
for name, warr_list in conn_dict.items():
if warr_list:
conn_list = self.connect_wires(warr_list)
if len(conn_list) != 1:
# error checking
raise ValueError('%s wire are on different tracks.' % name)
port_dict[(name, -1)] = conn_list
return summer_info['fg_tot'], port_dict
def draw_gm_summer_offset(self, # type: SerdesRXBase
col_idx, # type: int
fg_load, # type: int
fg_offset, # type: int
gm_fg_list, # type: List[Dict[str, int]]
gm_sep_list=None, # type: Optional[List[int]]
sgn_list=None, # type: Optional[List[int]]
hm_width=1, # type: int
hm_cur_width=-1, # type: int
diff_space=1, # type: int
gate_locs=None # type: Optional[Dict[str, float]]
):
# type: (...) -> Tuple[int, Dict[Tuple[str, int], List[WireArray]]]
"""Draw a differential Gm summer (multiple Gm stage connected to same load).
a separator is used to separate the positive half and the negative half of the latch.
For tail/switch/enable devices, the g/d/s of both halves are shorted together.
Parameters
----------
col_idx : int
the left-most transistor index. 0 is the left-most transistor.
fg_load : int
number of pmos load fingers (single-sided).
fg_offset : int
number of pmos offset cancellation fingers (single-sided).
gm_fg_list : List[Dict[str, int]]
a list of finger dictionaries for each Gm stage, from left to right.
gm_sep_list : Optional[List[int]]
list of number of separator fingers between Gm stages.
Defaults to minimum.
sgn_list : Optional[List[int]]
a list of 1s or -1s representing the sign of each gm stage. If None, defautls to all 1s.
hm_width : int
width of horizontal tracks.
hm_cur_width : int
width of horizontal current-carrying tracks. If negative, defaults to hm_width.
diff_space : int
number of tracks to reserve as space between differential wires.
gate_locs : Optional[Dict[str, int]]
dictionary from gate names to relative track index. If None uses default.
Returns
-------
fg_summer : int
width of Gm summer in number of fingers.
port_dict : dict[(str, int), :class:`~bag.layout.routing.WireArray`]
a dictionary from connection name/index pair to the horizontal track associated
with the connection.
"""
if sgn_list is None:
sgn_list = [1] * len(gm_fg_list)
# error checking
if fg_load <= 0:
raise ValueError('load transistors num. fingers must be positive.')
summer_info = self._serdes_info.get_summer_offset_info(fg_load, fg_offset, gm_fg_list, gm_sep_list=gm_sep_list)
if len(sgn_list) != len(gm_fg_list):
raise ValueError('sign list and number of GM stages mistach.')
fg_load_list = summer_info['fg_load_list']
fg_offset_list = summer_info['fg_offset_list']
gm_offsets = summer_info['gm_offsets']
# print('summer col: %d' % col_idx)
# print('summer gm offsets: %s' % repr(gm_offsets))
# draw each Gm stage and load.
conn_dict = {'vddt': [], 'bias_load': [], 'outp': [], 'outn': [], 'bias_offp': [], 'bias_offn': []}
port_dict = {}
for idx, (cur_fg_load, cur_fg_offset, gm_off, gm_fg_dict, sgn) in enumerate(zip(fg_load_list, fg_offset_list,
gm_offsets, gm_fg_list,
sgn_list)):
cur_amp_params = gm_fg_dict.copy()
cur_amp_params['load'] = cur_fg_load
cur_amp_params['offset'] = cur_fg_offset
_, cur_ports = self.draw_diffamp(col_idx + gm_off, cur_amp_params, hm_width=hm_width,
hm_cur_width=hm_cur_width, diff_space=diff_space,
gate_locs=gate_locs, sign=sgn)
# register port
for name, warr_list in cur_ports.items():
if name in conn_dict:
conn_dict[name].extend(warr_list)
else:
port_dict[(name, idx)] = warr_list
# connect tracks together
for name, warr_list in conn_dict.items():
if warr_list:
conn_list = self.connect_wires(warr_list)
if len(conn_list) != 1:
# error checking
raise ValueError('%s wire are on different tracks.' % name)
port_dict[(name, -1)] = conn_list
return summer_info['fg_tot'], port_dict
def draw_rows(self, lch, fg_tot, ptap_w, ntap_w, w_dict, th_dict, **kwargs):
# type: (float, int, wtype, wtype, Dict[str, wtype], Dict[str, str], **Any) -> None
"""Draw the transistors and substrate rows.
Parameters
----------
lch : float
the transistor channel length, in meters
fg_tot : int
total number of fingers for each row.
ptap_w : Union[float, int]
pwell substrate contact width.
ntap_w : Union[float, int]
nwell substrate contact width.
w_dict : Dict[str, Union[float, int]]
dictionary from transistor type to row width. Possible entries are:
load
width of load transistor.
casc
width of butterfly/cascode transistor.
in
width of input transistor.
sw
width of tail switch transistor.
en
width of enable transistor.
tail
width of tail bias transistor.
th_dict : Dict[str, str]
dictionary from transistor type to threshold flavor. Possible entries are:
load
threshold of load transistor.
casc
threshold of butterfly/cascode transistor.
in
threshold of input transistor.
sw
threshold of tail switch transistor.
en
threshold of enable transistor.
tail
threshold of tail bias transistor.
**kwargs
any addtional parameters for AnalogBase's draw_base() method.
"""
# error checking
w_tail = w_dict['tail']
w_in = w_dict['in']
w_load = w_dict['load']
th_load = th_dict['load']
if w_tail <= 0 or w_in <= 0 or w_load <= 0:
raise ValueError('tail/input/load transistors width must be positive.')
self._serdes_info = SerdesRXBaseInfo(self.grid, lch, kwargs.get('guard_ring_nf', 0),
min_fg_sep=kwargs.get('min_fg_sep', 0))
# figure out row indices for each nmos row type,
# and build nw_list/nth_list
self._nrow_idx = {}
nw_list = []
nth_list = []
cur_idx = 0
for name in ('tail', 'en', 'sw', 'in', 'casc'):
width = w_dict.get(name, 0)
if width > 0:
thres = th_dict[name]
self._nrow_idx[name] = cur_idx
nw_list.append(width)
nth_list.append(thres)
cur_idx += 1
if 'casc' in self._nrow_idx:
# butterfly switch and cascode share the same row.
self._nrow_idx['but'] = self._nrow_idx['casc']
# draw base
self.draw_base(lch, fg_tot, ptap_w, ntap_w, nw_list,
nth_list, [w_load], [th_load], **kwargs)
| 42.511716 | 120 | 0.54245 |
a6f893d9a38ecf373f369c122f432517de05aa84 | 388 | py | Python | cryptograph/rot13.py | slowy07/pythonApps | 22f9766291dbccd8185035745950c5ee4ebd6a3e | [
"MIT"
] | 10 | 2020-10-09T11:05:18.000Z | 2022-02-13T03:22:10.000Z | cryptograph/rot13.py | khairanabila/pythonApps | f90b8823f939b98f7bf1dea7ed35fe6e22e2f730 | [
"MIT"
] | null | null | null | cryptograph/rot13.py | khairanabila/pythonApps | f90b8823f939b98f7bf1dea7ed35fe6e22e2f730 | [
"MIT"
] | 6 | 2020-11-26T12:49:43.000Z | 2022-03-06T06:46:43.000Z | from string import maketrans
rot13trans = maketrans{'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
'NOPQRSTUVWXYZABCDEFGHIJKLMnopqrstuvwxyzabcdefghijklm'}
def rot13(text):
return text.translate(rot13trans)
def main():
textMessage = "besok saya pergi ke amerika"
print("message :",textMessage)
print(rot13(textMessage))
if __name__== '__main__':
main() | 25.866667 | 79 | 0.762887 |
a1d514b25b602e3a4962bfac608e87e8f66bcccc | 10,241 | py | Python | tests/test_process_functions.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | null | null | null | tests/test_process_functions.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | null | null | null | tests/test_process_functions.py | veo-ibd/Genie | 735e3aa0dc71aab0c404fd0cb3a34c8e1d9784c2 | [
"MIT"
] | 1 | 2022-01-20T16:33:19.000Z | 2022-01-20T16:33:19.000Z | import pytest
import mock
import pandas as pd
import synapseclient
import genie.process_functions
syn = mock.create_autospec(synapseclient.Synapse)
DATABASE_DF = pd.DataFrame({
'UNIQUE_KEY': ['test1', 'test2', 'test3'],
"test": ['test1', 'test2', 'test3'],
"foo": [1, 2, 3],
"baz": [float('nan'), float('nan'), float('nan')]})
DATABASE_DF.index = ['1_3', '2_3', '3_5']
def test_valid__check_valid_df():
genie.process_functions._check_valid_df(DATABASE_DF, "test")
def test_invalid__check_valid_df():
with pytest.raises(ValueError, match="Must pass in pandas dataframe"):
genie.process_functions._check_valid_df("foo", "test")
with pytest.raises(
ValueError,
match="'error' column must exist in dataframe"):
genie.process_functions._check_valid_df(DATABASE_DF, "error")
def test__get_left_diff_df():
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test1', 'test2', 'test3', 'test4'],
"test": ['test1', 'test2', 'test3', 'test4'],
"foo": [1, 2, 3, 4],
"baz": [float('nan'), float('nan'), float('nan'), 3.2]})
get_diff = genie.process_functions._get_left_diff_df(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
expecteddf = new_datadf.loc[[3]]
assert get_diff.equals(expecteddf[get_diff.columns])
def test_norows_get_left_diff_df():
append_rows = genie.process_functions._get_left_diff_df(
DATABASE_DF, DATABASE_DF, 'UNIQUE_KEY')
assert append_rows.empty
def test_first_validation_get_left_diff_df():
'''
This checks to make sure that validation is called
- In a situation where someone comments out the validation
line, this will cause an error
'''
with pytest.raises(
ValueError,
match="'FOO' column must exist in dataframe"):
genie.process_functions._get_left_diff_df(
DATABASE_DF, DATABASE_DF, 'FOO')
def test_second_validation_get_left_diff_df():
'''
This checks to make sure that validation is called
- In a situation where someone comments out the validation
line, this will cause an error
'''
testing = DATABASE_DF.copy()
testing['FOO'] = float('nan')
with pytest.raises(
ValueError,
match="'FOO' column must exist in dataframe"):
genie.process_functions._get_left_diff_df(
testing, DATABASE_DF, 'FOO')
def test_first_validation_get_left_union_df():
'''
This checks to make sure that validation is called
- In a situation where someone comments out the 1st validation
line, this will cause an error
'''
with pytest.raises(
ValueError,
match="'FOO' column must exist in dataframe"):
genie.process_functions._get_left_union_df(
DATABASE_DF, DATABASE_DF, 'FOO')
def test_second_validation_get_left_union_df():
'''
This checks to make sure that validation is called
- In a situation where someone comments out the 2nd validation
line, this will cause an error
'''
testing = DATABASE_DF.copy()
testing['FOO'] = float('nan')
with pytest.raises(
ValueError,
match="'FOO' column must exist in dataframe"):
genie.process_functions._get_left_union_df(
testing, DATABASE_DF, 'FOO')
def test_append__append_rows():
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test1', 'test2', 'test3', 'test4'],
"test": ['test1', 'test2', 'test3', 'test4'],
"foo": [1, 2, 3, 4],
"baz": [float('nan'), float('nan'), float('nan'), 3.2]})
expecteddf = pd.DataFrame({
'test': ['test4'],
'foo': [4],
'baz': [3.2]})
append_rows = genie.process_functions._append_rows(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
append_rows.fillna('', inplace=True)
expecteddf.fillna('', inplace=True)
assert append_rows.equals(expecteddf[append_rows.columns])
def test___create_update_rowsdf():
differentrows = [True, True, False]
database = pd.DataFrame({
"test": ['test', 'test2', 'test3'],
"foo": [1, 3, 3],
"baz": [float('nan'), 5, float('nan')]},
index=['test1', 'test5', 'test4'])
new_datadf = pd.DataFrame({
"test": ['test1', 'test4', 'test3'],
"foo": [2, 3, 3],
"baz": [3, 5, float('nan')]},
index=['test1', 'test5', 'test4'])
to_update_rowsdf = genie.process_functions._create_update_rowsdf(
database, new_datadf, DATABASE_DF.index, differentrows)
expecteddf = pd.DataFrame({
"test": ['test1', 'test4'],
"foo": [2, 3],
"baz": [3.0, 5.0],
"ROW_ID": ["1", "2"],
"ROW_VERSION": ["3", "3"]})
assert to_update_rowsdf.equals(expecteddf[to_update_rowsdf.columns])
def test_none__create_update_rowsdf():
differentrows = [False, False, False]
database = pd.DataFrame({
"test": ['test', 'test2', 'test3'],
"foo": [1, 3, 3],
"baz": [float('nan'), 5, float('nan')]},
index=['test1', 'test5', 'test4'])
new_datadf = pd.DataFrame({
"test": ['test1', 'test4', 'test3'],
"foo": [2, 3, 3],
"baz": [3, 5, float('nan')]},
index=['test1', 'test5', 'test4'])
to_update_rowsdf = genie.process_functions._create_update_rowsdf(
database, new_datadf, DATABASE_DF.index, differentrows)
assert to_update_rowsdf.empty
def test___get_left_union_df():
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test1', 'test5', 'test4'],
"test": ['test', 'test2', 'test3'],
"foo": [1, 3, 3],
"baz": [float('nan'), 5, float('nan')]})
left_union = genie.process_functions._get_left_union_df(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
expecteddf = pd.DataFrame({
'UNIQUE_KEY': ['test1'],
'test': ['test'],
'foo': [1],
'baz': [float('nan')]})
assert left_union.equals(expecteddf[left_union.columns])
def test_none__get_left_union_df():
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test7', 'test5', 'test4'],
"test": ['test', 'test2', 'test3'],
"foo": [1, 3, 3],
"baz": [float('nan'), 5, float('nan')]})
left_union = genie.process_functions._get_left_union_df(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
assert left_union.empty
def test_update__update_rows():
'''
Tests index comparison for updating rows
'''
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test1', 'test2', 'test3'],
"test": ['test', 'test2', 'test3'],
"foo": [1, 3, 3],
"baz": [float('nan'), 5, float('nan')]})
expecteddf = pd.DataFrame({
"test": ['test', 'test2'],
"foo": [1, 3],
"baz": ['', 5],
'ROW_ID': ['1', '2'],
'ROW_VERSION': ['3', '3']})
update_rows = genie.process_functions._update_rows(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
assert update_rows.equals(expecteddf[update_rows.columns])
def test_maintaintype__update_rows():
'''
Test pandas behavior. Integer -> Float if NA exists
'''
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test1', 'test2', 'test3'],
"test": ['test1', 'test2', 'test3'],
"foo": [1, 3, 3],
"baz": [float('nan'), 5, float('nan')]})
# Test that the datatype passed into from new_datadf gets preserved
expecteddf = pd.DataFrame({
"test": ['test2'],
"foo": [3],
"baz": [5],
'ROW_ID': ['2'],
'ROW_VERSION': ['3']})
expecteddf = expecteddf.astype({'baz': object})
update_rows = genie.process_functions._update_rows(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
assert update_rows.equals(expecteddf[update_rows.columns])
def test_noupdate__update_rows():
'''
Tests the index comparison to get no updates
'''
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test4'],
"test": ['test'],
"foo": [1],
"baz": [float('nan')]})
update_rows = genie.process_functions._update_rows(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
assert update_rows.empty
def test_delete__delete_rows():
new_datadf = pd.DataFrame({
'UNIQUE_KEY': ['test1'],
"test": ['test1'],
"foo": [1],
"baz": [float('nan')]})
expecteddf = pd.DataFrame({
0: ['2', '3'],
1: ['3', '5']})
delete_rows = genie.process_functions._delete_rows(
new_datadf, DATABASE_DF, 'UNIQUE_KEY')
assert delete_rows.equals(expecteddf)
def test_norows__delete_rows():
delete_rows = genie.process_functions._delete_rows(
DATABASE_DF, DATABASE_DF, 'UNIQUE_KEY')
assert delete_rows.empty
@pytest.fixture(params=[
# tuple with (input, expectedOutput)
(False, False, "syn10967259"),
(False, True, "syn12094210"),
(True, False, "syn11600968")])
def database_map(request):
return request.param
class argparser:
def asDataFrame(self):
database_dict = {"Database": ["centerMapping"],
"Id": ["syn123"]}
databasetosynid_mappingdf = pd.DataFrame(database_dict)
return(databasetosynid_mappingdf)
def test_get_synid_database_mappingdf(database_map):
'''
Test getting database mapping config
no flags
staging flag
test flag
'''
(test, staging, synid) = database_map
arg = argparser()
with mock.patch(
"genie.process_functions.get_syntabledf",
return_value=arg.asDataFrame()) as patch_gettabledf:
df = genie.process_functions.get_synid_database_mappingdf(
syn, test=test, staging=staging)
patch_gettabledf.assert_called_once_with(
syn, "SELECT * FROM {}".format(synid))
assert df.equals(arg.asDataFrame())
def test_get_syntabledf():
'''
Test helper function that queries synapse tables and returns dataframes
'''
arg = argparser()
with mock.patch.object(
syn, "tableQuery", return_value=arg) as patch_syn_tablequery:
querystring = "select * from foo"
df = genie.process_functions.get_syntabledf(syn, querystring)
patch_syn_tablequery.assert_called_once_with(querystring)
assert df.equals(arg.asDataFrame())
| 32.408228 | 75 | 0.613221 |
58292d201618a150a8d52c7ec608de421875a45c | 14,378 | py | Python | tests/test_geo_utilities.py | CSHS-CWRA/RavenPy | 279505d7270c3f796500f2cb992af1cd66dfb44c | [
"MIT"
] | 12 | 2020-12-07T23:07:13.000Z | 2022-03-08T20:50:58.000Z | tests/test_geo_utilities.py | CSHS-CWRA/RavenPy | 279505d7270c3f796500f2cb992af1cd66dfb44c | [
"MIT"
] | 119 | 2020-08-25T08:17:17.000Z | 2022-03-30T16:12:19.000Z | tests/test_geo_utilities.py | CSHS-CWRA/RavenPy | 279505d7270c3f796500f2cb992af1cd66dfb44c | [
"MIT"
] | 3 | 2020-12-02T17:33:13.000Z | 2021-08-31T15:39:26.000Z | import tempfile
from pathlib import Path
import numpy as np
import pytest
from ravenpy.utilities.testdata import get_local_testdata
class TestOperations:
analysis = pytest.importorskip("ravenpy.utilities.analysis")
io = pytest.importorskip("ravenpy.utilities.io")
zipped_file = get_local_testdata("polygons/mars.zip")
non_zipped_file = get_local_testdata("polygons/mars.geojson")
def test_circular_mean_aspect(self):
northern_angles = np.array([330, 30, 15, 345])
slight_northeast_angles = np.append(northern_angles, [0.000001])
eastern_angles = np.arange(45, 125, 1.25)
southwest_angles = np.array([181, 182.25, 183.5, 222])
assert self.analysis.circular_mean_aspect(northern_angles) == 360
np.testing.assert_almost_equal(
self.analysis.circular_mean_aspect(slight_northeast_angles), 0, decimal=3
)
assert self.analysis.circular_mean_aspect(eastern_angles) == 84.375
np.testing.assert_almost_equal(
self.analysis.circular_mean_aspect(southwest_angles), 191.88055987
)
def test_address_append(self):
non_existing_tarred_file = "polygons.tar"
assert "zip://" in self.io.address_append(self.zipped_file)
assert "tar://" in self.io.address_append(non_existing_tarred_file)
assert not self.io.address_append(self.non_zipped_file).startswith(
("zip://", "tar://")
)
def test_archive_sniffer(self, tmp_path):
probable_shp = self.io.archive_sniffer(self.zipped_file)
assert Path(probable_shp[0]).name == "mars.shp"
probable_shp = self.io.archive_sniffer(self.zipped_file, working_dir=tmp_path)
assert Path(probable_shp[0]).name == "mars.shp"
def test_archive_extract(self, tmp_path):
assert self.zipped_file.exists()
files = list()
with tempfile.TemporaryDirectory(dir=tmp_path) as tdir:
files.extend(
self.io.generic_extract_archive(self.zipped_file, output_dir=tdir)
)
assert len(files) == 5
for f in files:
assert Path(f).exists()
assert not np.any([Path(f).exists() for f in files])
files = self.io.generic_extract_archive(self.zipped_file)
assert np.all([Path(f).exists() for f in files])
class TestFileInfoFuncs:
checks = pytest.importorskip("ravenpy.utilities.checks")
io = pytest.importorskip("ravenpy.utilities.io")
zipped_file = get_local_testdata("polygons/mars.zip")
geojson_file = get_local_testdata("polygons/mars.geojson")
raster_file = get_local_testdata(
"nasa/Mars_MGS_MOLA_DEM_georeferenced_region_compressed.tiff"
)
non_existing_file = "unreal.zip"
def test_raster_datatype_sniffer(self):
datatype = self.io.raster_datatype_sniffer(self.raster_file)
assert datatype.lower() == "uint8"
def test_crs_sniffer(self):
assert self.io.crs_sniffer(self.zipped_file) == 4326
assert set(self.io.crs_sniffer(self.geojson_file, self.raster_file)) == {4326}
def test_single_file_check(self):
one = [Path(__file__).parent / "__init__.py"]
zero = list()
three = [1, Path().root, 2.333]
assert self.checks.single_file_check(one) == one[0]
with pytest.raises(FileNotFoundError):
self.checks.single_file_check(zero)
with pytest.raises(NotImplementedError):
self.checks.single_file_check(three)
def test_boundary_check(self):
# NOTE: does not presently accept zipped files.
with pytest.warns(None):
self.checks.boundary_check([self.geojson_file, self.raster_file], max_y=80)
with pytest.warns(UserWarning):
self.checks.boundary_check([self.geojson_file, self.raster_file], max_y=15)
with pytest.raises(FileNotFoundError):
self.checks.boundary_check([self.non_existing_file])
@pytest.mark.skip(reason="Not presently testable")
def test_multipolygon_check(self):
pass
class TestGdalOgrFunctions:
analysis = pytest.importorskip("ravenpy.utilities.analysis")
fiona = pytest.importorskip("fiona")
sgeo = pytest.importorskip("shapely.geometry")
geojson_file = get_local_testdata("polygons/mars.geojson")
raster_file = get_local_testdata(
"nasa/Mars_MGS_MOLA_DEM_georeferenced_region_compressed.tiff"
)
def test_gdal_aspect_not_projected(self, tmp_path):
aspect_grid = self.analysis.gdal_aspect_analysis(self.raster_file)
np.testing.assert_almost_equal(
self.analysis.circular_mean_aspect(aspect_grid), 10.9119033
)
# test with creation of a temporary file
aspect_tempfile = tempfile.NamedTemporaryFile(
prefix="aspect_", suffix=".tiff", delete=False, dir=tmp_path
).name
aspect_grid = self.analysis.gdal_aspect_analysis(
self.raster_file, set_output=aspect_tempfile
)
np.testing.assert_almost_equal(
self.analysis.circular_mean_aspect(aspect_grid), 10.9119033
)
assert Path(aspect_tempfile).stat().st_size > 0
# Slope values are high due to data values using Geographic CRS
def test_gdal_slope_not_projected(self, tmp_path):
slope_grid = self.analysis.gdal_slope_analysis(self.raster_file)
np.testing.assert_almost_equal(slope_grid.min(), 0.0)
np.testing.assert_almost_equal(slope_grid.mean(), 64.4365427)
np.testing.assert_almost_equal(slope_grid.max(), 89.71747, 5)
slope_tempfile = tempfile.NamedTemporaryFile(
prefix="slope_", suffix=".tiff", delete=False, dir=tmp_path
).name
slope_grid = self.analysis.gdal_slope_analysis(
self.raster_file, set_output=slope_tempfile
)
np.testing.assert_almost_equal(slope_grid.mean(), 64.4365427)
assert Path(slope_tempfile).stat().st_size > 0
# Slope values are high due to data values using Geographic CRS
def test_dem_properties(self):
dem_properties = self.analysis.dem_prop(self.raster_file)
np.testing.assert_almost_equal(dem_properties["aspect"], 10.911, 3)
np.testing.assert_almost_equal(dem_properties["elevation"], 79.0341, 4)
np.testing.assert_almost_equal(dem_properties["slope"], 64.43654, 5)
with self.fiona.open(self.geojson_file) as gj:
feature = next(iter(gj))
geom = self.sgeo.shape(feature["geometry"])
region_dem_properties = self.analysis.dem_prop(self.raster_file, geom=geom)
np.testing.assert_almost_equal(region_dem_properties["aspect"], 280.681, 3)
np.testing.assert_almost_equal(region_dem_properties["elevation"], 145.8899, 4)
np.testing.assert_almost_equal(region_dem_properties["slope"], 61.26508, 5)
# Slope values are high due to data values using Geographic CRS
def test_geom_properties(self):
with self.fiona.open(self.geojson_file) as gj:
iterable = iter(gj)
feature_1 = next(iterable)
feature_2 = next(iterable)
geom_1 = self.sgeo.shape(feature_1["geometry"])
geom_2 = self.sgeo.shape(feature_2["geometry"])
geom_1_properties = self.analysis.geom_prop(geom_1)
np.testing.assert_almost_equal(geom_1_properties["area"], 357.9811899)
np.testing.assert_almost_equal(
geom_1_properties["centroid"], (-128.3959836, 19.1572278)
)
np.testing.assert_almost_equal(geom_1_properties["perimeter"], 68.4580077)
np.testing.assert_almost_equal(geom_1_properties["gravelius"], 1.0206790)
geom_2_properties = self.analysis.geom_prop(geom_2)
np.testing.assert_almost_equal(geom_2_properties["area"], 361.5114221)
np.testing.assert_almost_equal(
geom_2_properties["centroid"], (-70.2394629, 45.7698029)
)
np.testing.assert_almost_equal(geom_2_properties["perimeter"], 96.1035859)
np.testing.assert_almost_equal(geom_2_properties["gravelius"], 1.4258493)
class TestGenericGeoOperations:
analysis = pytest.importorskip("ravenpy.utilities.analysis")
geo = pytest.importorskip("ravenpy.utilities.geo")
fiona = pytest.importorskip("fiona")
rasterio = pytest.importorskip("rasterio")
sgeo = pytest.importorskip("shapely.geometry")
geojson_file = get_local_testdata("polygons/mars.geojson")
raster_file = get_local_testdata(
"nasa/Mars_MGS_MOLA_DEM_georeferenced_region_compressed.tiff"
)
def test_vector_reprojection(self, tmp_path):
# TODO: It would be awesome if this returned a temporary filepath if no file given.
reproj_file = tempfile.NamedTemporaryFile(
prefix="reproj_", suffix=".geojson", delete=False, dir=tmp_path
).name
self.geo.generic_vector_reproject(
self.geojson_file, projected=reproj_file, target_crs="EPSG:3348"
)
with self.fiona.open(reproj_file) as gj:
iterable = iter(gj)
feature = next(iterable)
geom = self.sgeo.shape(feature["geometry"])
geom_properties = self.analysis.geom_prop(geom)
np.testing.assert_almost_equal(geom_properties["area"], 6450001762792, 0)
np.testing.assert_almost_equal(
geom_properties["centroid"], (1645777.7589835, -933242.1203143)
)
np.testing.assert_almost_equal(geom_properties["perimeter"], 9194343.1759303)
np.testing.assert_almost_equal(geom_properties["gravelius"], 1.0212589)
def test_raster_warp(self, tmp_path):
# TODO: It would be awesome if this returned a temporary filepath if no file given.
# TODO: either use `output` or `reprojected/warped` for these functions.
reproj_file = tempfile.NamedTemporaryFile(
prefix="reproj_", suffix=".tiff", delete=False, dir=tmp_path
).name
self.geo.generic_raster_warp(
self.raster_file, output=reproj_file, target_crs="EPSG:3348"
)
# EPSG:3348 is a very general transformation; Some tolerance should be allowed.
with self.rasterio.open(reproj_file) as gt:
assert gt.crs.to_epsg() == 3348
np.testing.assert_allclose(gt.bounds.left, -2077535, atol=3)
np.testing.assert_allclose(gt.bounds.right, 15591620, atol=3)
np.testing.assert_allclose(gt.bounds.bottom, -4167898, atol=3)
np.testing.assert_allclose(gt.bounds.top, 5817014, atol=3)
data = gt.read(1) # read band 1 (red)
assert data.min() == 0
assert data.max() == 255
np.testing.assert_almost_equal(data.mean(), 60.729, 3)
def test_warped_raster_slope(self, tmp_path):
reproj_file = tempfile.NamedTemporaryFile(
prefix="reproj_", suffix=".tiff", delete=False, dir=tmp_path
).name
self.geo.generic_raster_warp(
self.raster_file, output=reproj_file, target_crs="EPSG:3348"
)
slope_grid = self.analysis.gdal_slope_analysis(reproj_file)
np.testing.assert_almost_equal(slope_grid.min(), 0.0)
np.testing.assert_almost_equal(slope_grid.mean(), 0.0034991)
np.testing.assert_almost_equal(slope_grid.max(), 0.3523546)
def test_warped_raster_aspect(self, tmp_path):
reproj_file = tempfile.NamedTemporaryFile(
prefix="reproj_", suffix=".tiff", delete=False, dir=tmp_path
).name
self.geo.generic_raster_warp(
self.raster_file, output=reproj_file, target_crs="EPSG:3348"
)
aspect_grid = self.analysis.gdal_aspect_analysis(reproj_file)
np.testing.assert_almost_equal(
self.analysis.circular_mean_aspect(aspect_grid), 7.780, decimal=3
)
def test_raster_clip(self, tmp_path):
with self.fiona.open(self.geojson_file) as gj:
feature = next(iter(gj))
geom = self.sgeo.shape(feature["geometry"])
clipped_file = tempfile.NamedTemporaryFile(
prefix="reproj_", suffix=".tiff", delete=False, dir=tmp_path
).name
self.geo.generic_raster_clip(self.raster_file, clipped_file, geometry=geom)
with self.rasterio.open(clipped_file) as gt:
assert gt.crs.to_epsg() == 4326
data = gt.read(1) # read band 1 (red)
assert data.min() == 0
assert data.max() == 255
np.testing.assert_almost_equal(data.mean(), 102.8222965)
def test_shapely_pyproj_transform(self):
with self.fiona.open(self.geojson_file) as gj:
feature = next(iter(gj))
geom = self.sgeo.shape(feature["geometry"])
transformed = self.geo.geom_transform(geom, target_crs="EPSG:3348")
np.testing.assert_almost_equal(
transformed.bounds,
(188140.3820599, -2374936.1363096, 3086554.0207066, 409691.2180337),
)
np.testing.assert_almost_equal(transformed.centroid.x, 1645777.7589835)
np.testing.assert_almost_equal(transformed.centroid.y, -933242.1203143)
np.testing.assert_almost_equal(transformed.area, 6450001762792, 0)
class TestGIS:
checks = pytest.importorskip("ravenpy.utilities.checks")
io = pytest.importorskip("ravenpy.utilities.io")
sgeo = pytest.importorskip("shapely.geometry")
vector_file = get_local_testdata("polygons/mars.geojson")
def test_get_bbox_single(self):
w, s, n, e = self.io.get_bbox(self.vector_file, all_features=False)
np.testing.assert_almost_equal(w, -139.8514262)
np.testing.assert_almost_equal(s, 8.3754794)
np.testing.assert_almost_equal(n, -117.4753973)
np.testing.assert_almost_equal(e, 29.6327068)
def test_get_bbox_all(self):
w, s, n, e = self.io.get_bbox(self.vector_file)
np.testing.assert_almost_equal(w, -139.8514262)
np.testing.assert_almost_equal(s, 8.3754794)
np.testing.assert_almost_equal(n, -38.7397456)
np.testing.assert_almost_equal(e, 64.1757015)
def test_feature_contains(self):
point = -69.0, 45
assert isinstance(self.checks.feature_contains(point, self.vector_file), dict)
assert isinstance(
self.checks.feature_contains(self.sgeo.Point(point), self.vector_file), dict
)
| 41.675362 | 91 | 0.678398 |
0440f547aa29f8ff1d1413d481ee44388426766f | 2,178 | py | Python | docs/source/conf.py | 4DNucleome/big-fish | 5512b6e3274872793ef4365a6dc423c72add91f9 | [
"BSD-3-Clause"
] | 17 | 2020-03-04T10:46:37.000Z | 2022-03-10T13:15:16.000Z | docs/source/conf.py | 4DNucleome/big-fish | 5512b6e3274872793ef4365a6dc423c72add91f9 | [
"BSD-3-Clause"
] | 48 | 2020-03-16T13:39:44.000Z | 2022-03-31T17:26:50.000Z | docs/source/conf.py | 4DNucleome/big-fish | 5512b6e3274872793ef4365a6dc423c72add91f9 | [
"BSD-3-Clause"
] | 15 | 2020-03-04T16:02:31.000Z | 2022-02-17T14:11:15.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import os
import sys
import sphinx_rtd_theme
# import Big-FISH
sys.path.insert(0, os.path.abspath('../..'))
import bigfish
# -- Project information -----------------------------------------------------
project = 'big-fish'
copyright = '2020, Arthur Imbert'
author = 'Arthur Imbert'
# The short X.Y version
version = ".".join(bigfish.__version__.split(".")[:2])
# The full version, including alpha/beta/rc tags
release = bigfish.__version__
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'numpydoc',
'sphinx_rtd_theme',
'sphinx.ext.autosectionlabel'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
| 32.029412 | 79 | 0.671258 |
ecdc3824ebe0c10ad7f08c182caa5ca847b3886c | 3,875 | py | Python | Expt/stats_scripts/stats_s.py | utahnlp/therapist-observer | 31eaf9a5c82c6d0f9a62427ac5df030d81547472 | [
"Apache-2.0"
] | 7 | 2019-07-02T19:32:05.000Z | 2022-01-23T19:11:55.000Z | Expt/stats_scripts/stats_s.py | utahnlp/therapist-observer | 31eaf9a5c82c6d0f9a62427ac5df030d81547472 | [
"Apache-2.0"
] | 1 | 2020-10-06T05:14:16.000Z | 2020-11-05T22:39:19.000Z | Expt/stats_scripts/stats_s.py | utahnlp/therapist-observer | 31eaf9a5c82c6d0f9a62427ac5df030d81547472 | [
"Apache-2.0"
] | 6 | 2019-06-29T23:34:41.000Z | 2022-03-16T22:19:39.000Z | import sys, re
best_dev_loss = 1000.0
best_dev_loss_state = ''
dev_losses = {}
p_r_f1 = {}
recalls = {}
best_recall = {}
best_recall_state = {}
best_precision = {}
best_precision_state = {}
best_f1 = {}
best_f1_state = {}
support = {}
topK = [1,2,3,5,10]
for k in topK:
best_recall[k] = -1.0
#keys = ['brownie','changetalk','sustain','neutral', 'reflection_complex','reflection_simple','affirm','facilitate','givinginfo','question_open','question_closed','advise_wp','advise_wop', 'confront', 'structure', 'other', 'avg / total']
keys = ['P','T']
for key in keys:
best_recall[key] = -1.0
best_recall_state[key] = (0,0)
best_precision[key] = -1.0
best_precision_state[key] = (0,0)
best_f1[key] = -1.0
best_f1_state[key] = (0,0)
p_r_f1[key] = {}
input_log = sys.argv[1]
for l in open(input_log):
res = re.findall('Evaluating the model in the epoch (\d{1,}), after steps (\d{1,})', l)
if res:
epoch, steps = res[0]
res3 = re.findall('Dev_eval_loss = (\d{1,}\.\d{1,})', l)
if res3:
dev_loss = float(res3[0])
dev_losses[(epoch, steps)] = dev_loss
if dev_loss < best_dev_loss:
best_dev_loss = dev_loss
best_dev_loss_state = (epoch, steps)
res4 = re.findall('recall@K = \[(.*)\]', l)
if res4:
recalls[(epoch, steps)] = res4[0]
for t in res4[0].split(', ('):
p,r = t.replace('(', '').replace(')', '').split(', ')
p = int(p)
r = float(r)
if r > best_recall[p]:
best_recall[p] = r
best_recall_state[p] = (epoch, steps)
for key in keys:
prf_res = re.findall('^\s*'+key+'\s+(\d{1,}\.\d{1,})\s+(\d{1,}\.\d{1,})\s+(\d{1,}\.\d{1,})\s+(\d{1,})', l)
if prf_res:
p, r, f1, s = prf_res[0]
p_r_f1[key][(epoch, steps)]=(p,r,f1)
support[key] = s
if p >= best_precision[key]:
best_precision[key] = p
best_precision_state[key] = (epoch, steps)
if r >= best_recall[key]:
best_recall[key] = r
best_recall_state[key] = (epoch, steps)
if f1 >= best_f1[key]:
best_f1[key] = f1
best_f1_state[key] = (epoch, steps)
for i in topK:
print "Best recall@%s: %s" % (i, best_recall[i])
epoch, steps = best_recall_state[i]
print "epoch: %s, %s, dev loss = %s" % (epoch, steps, dev_losses[best_recall_state[i]])
print "all recalls : %s" % (recalls[best_recall_state[i]])
print ''
print "Best dev loss: %s" % best_dev_loss
epoch, steps = best_dev_loss_state
print "epoch: %s, steps : %s" % (epoch, steps)
print "recalls: %s" % recalls[best_dev_loss_state]
print ''
headers = ["best_precision", "best_recall", "best_f1-score"]
width = max(3, max([len(key) for key in keys]))
head_fmt = u'{:>{width}s} ' + u' {:>24}' * len(headers) + u' support \n'
report = head_fmt.format(u'', *headers, width=width)
row_summary_fmt = u'{:>{width}s} ' + u' ({:>4}, {:>6}, {:>8})' * 3 + u'\n'
row_fmt = u'{:>{width}s} ' + u' ({:>4}, {:>6}, {:>8})' * 3 + u' {:>5}\n'
for key in keys:
pepoch, psteps = best_precision_state[key]
repoch, rsteps = best_recall_state[key]
f1epoch, f1steps = best_f1_state[key]
row1 = (key, best_precision[key], pepoch, psteps, best_recall[key], repoch, rsteps, best_f1[key], f1epoch, f1steps)
report += row_summary_fmt.format(*row1, width = width)
row2 = ('', p_r_f1[key][(pepoch, psteps)][0], p_r_f1[key][(pepoch, psteps)][1], p_r_f1[key][(pepoch, psteps)][2], p_r_f1[key][(repoch, rsteps)][0], p_r_f1[key][(repoch, rsteps)][1], p_r_f1[key][(repoch, rsteps)][2], p_r_f1[key][(f1epoch, f1steps)][0], p_r_f1[key][(f1epoch, f1steps)][1], p_r_f1[key][(f1epoch, f1steps)][2], support[key])
report += row_fmt.format(*row2, width = width)
print(report)
| 36.904762 | 341 | 0.572645 |
7f1510f2c779e37a4dbccbcc5bfbd1f86823101a | 260 | py | Python | nodes/1.x/python/Object.Members.py | jdehotin/Clockworkfordynamo | 59226ea8292c57acfa1aa476efd40f0e78c9b965 | [
"MIT"
] | 147 | 2016-02-24T16:37:03.000Z | 2022-02-18T12:10:34.000Z | nodes/1.x/python/Object.Members.py | jdehotin/Clockworkfordynamo | 59226ea8292c57acfa1aa476efd40f0e78c9b965 | [
"MIT"
] | 269 | 2016-02-25T14:04:14.000Z | 2022-03-26T07:30:53.000Z | nodes/1.x/python/Object.Members.py | jdehotin/Clockworkfordynamo | 59226ea8292c57acfa1aa476efd40f0e78c9b965 | [
"MIT"
] | 89 | 2016-03-16T18:21:56.000Z | 2022-02-03T14:34:30.000Z | import clr
items = IN[0]
elementlist = list()
try:
clr.AddReference('RevitAPI')
from Autodesk.Revit.DB import *
for item in items:
elementlist.append(dir(UnwrapElement(item)))
except:
for item in items:
elementlist.append(dir(item))
OUT = elementlist | 18.571429 | 46 | 0.738462 |
18b91f8fd5ee9d847e843850a2ac970d50282450 | 108,127 | py | Python | common/fonts.py | davidstutz/disentangling-robustness-generalization | 220ff8a79a5f08665f4755dc76d4e592ae66c6d6 | [
"Unlicense"
] | 43 | 2019-05-22T13:58:59.000Z | 2022-03-15T03:02:02.000Z | common/fonts.py | davidstutz/disentangling-robustness-generalization | 220ff8a79a5f08665f4755dc76d4e592ae66c6d6 | [
"Unlicense"
] | 2 | 2019-11-23T14:51:41.000Z | 2020-11-30T20:59:42.000Z | common/fonts.py | davidstutz/disentangling-robustness-generalization | 220ff8a79a5f08665f4755dc76d4e592ae66c6d6 | [
"Unlicense"
] | 10 | 2019-06-19T08:28:34.000Z | 2021-06-16T10:24:16.000Z | def get_fonts():
files = """
ufl/ubuntu/Ubuntu-Medium.ttf
ufl/ubuntu/Ubuntu-MediumItalic.ttf
ufl/ubuntu/Ubuntu-Regular.ttf
ufl/ubuntu/Ubuntu-Light.ttf
ufl/ubuntu/Ubuntu-Bold.ttf
ufl/ubuntu/Ubuntu-BoldItalic.ttf
ufl/ubuntu/Ubuntu-Italic.ttf
ufl/ubuntu/Ubuntu-LightItalic.ttf
ufl/ubuntucondensed/UbuntuCondensed-Regular.ttf
ufl/ubuntumono/UbuntuMono-BoldItalic.ttf
ufl/ubuntumono/UbuntuMono-Bold.ttf
ufl/ubuntumono/UbuntuMono-Italic.ttf
ufl/ubuntumono/UbuntuMono-Regular.ttf
apache/cousine/Cousine-Italic.ttf
apache/cousine/Cousine-Bold.ttf
apache/cousine/Cousine-BoldItalic.ttf
apache/cousine/Cousine-Regular.ttf
apache/jsmathcmbx10/jsMath-cmbx10.ttf
apache/aclonica/Aclonica-Regular.ttf
apache/rocksalt/RockSalt-Regular.ttf
apache/ultra/Ultra-Regular.ttf
apache/rochester/Rochester-Regular.ttf
apache/montez/Montez-Regular.ttf
apache/permanentmarker/PermanentMarker-Regular.ttf
apache/robotomono/RobotoMono-Regular.ttf
apache/robotomono/RobotoMono-Light.ttf
apache/robotomono/RobotoMono-Bold.ttf
apache/robotomono/RobotoMono-Italic.ttf
apache/robotomono/RobotoMono-Medium.ttf
apache/robotomono/RobotoMono-BoldItalic.ttf
apache/robotomono/RobotoMono-ThinItalic.ttf
apache/robotomono/RobotoMono-Thin.ttf
apache/robotomono/RobotoMono-MediumItalic.ttf
apache/robotomono/RobotoMono-LightItalic.ttf
apache/comingsoon/ComingSoon-Regular.ttf
apache/robotocondensed/RobotoCondensed-Bold.ttf
apache/robotocondensed/RobotoCondensed-LightItalic.ttf
apache/robotocondensed/RobotoCondensed-Italic.ttf
apache/robotocondensed/RobotoCondensed-Regular.ttf
apache/robotocondensed/RobotoCondensed-BoldItalic.ttf
apache/robotocondensed/RobotoCondensed-Light.ttf
apache/smokum/Smokum-Regular.ttf
apache/roboto/Roboto-Black.ttf
apache/roboto/Roboto-Regular.ttf
apache/roboto/Roboto-LightItalic.ttf
apache/roboto/Roboto-BlackItalic.ttf
apache/roboto/Roboto-Medium.ttf
apache/roboto/Roboto-Thin.ttf
apache/roboto/Roboto-Italic.ttf
apache/roboto/Roboto-MediumItalic.ttf
apache/roboto/Roboto-BoldItalic.ttf
apache/roboto/Roboto-ThinItalic.ttf
apache/roboto/Roboto-Bold.ttf
apache/roboto/Roboto-Light.ttf
apache/homemadeapple/HomemadeApple-Regular.ttf
apache/slackey/Slackey-Regular.ttf
apache/jsmathcmex10/jsMath-cmex10.ttf
apache/jsmathcmmi10/jsMath-cmmi10.ttf
apache/crushed/Crushed-Regular.ttf
apache/chewy/Chewy-Regular.ttf
apache/jsmathcmsy10/jsMath-cmsy10.ttf
apache/luckiestguy/LuckiestGuy-Regular.ttf
apache/mountainsofchristmas/MountainsofChristmas-Bold.ttf
apache/mountainsofchristmas/MountainsofChristmas-Regular.ttf
apache/opensanshebrew/OpenSansHebrew-Regular.ttf
apache/opensanshebrew/OpenSansHebrew-BoldItalic.ttf
apache/opensanshebrew/OpenSansHebrew-ExtraBold.ttf
apache/opensanshebrew/OpenSansHebrew-Light.ttf
apache/opensanshebrew/OpenSansHebrew-Bold.ttf
apache/opensanshebrew/OpenSansHebrew-ExtraBoldItalic.ttf
apache/opensanshebrew/OpenSansHebrew-LightItalic.ttf
apache/opensanshebrew/OpenSansHebrew-Italic.ttf
apache/jsmathcmr10/jsMath-cmr10.ttf
apache/satisfy/Satisfy-Regular.ttf
apache/robotoslab/RobotoSlab-Regular.ttf
apache/robotoslab/RobotoSlab-Thin.ttf
apache/robotoslab/RobotoSlab-Light.ttf
apache/robotoslab/RobotoSlab-Bold.ttf
apache/sunshiney/Sunshiney-Regular.ttf
apache/maidenorange/MaidenOrange-Regular.ttf
apache/schoolbell/Schoolbell-Regular.ttf
apache/justanotherhand/JustAnotherHand-Regular.ttf
apache/rancho/Rancho-Regular.ttf
apache/cherrycreamsoda/CherryCreamSoda-Regular.ttf
apache/specialelite/SpecialElite-Regular.ttf
apache/opensans/OpenSans-SemiBoldItalic.ttf
apache/opensans/OpenSans-ExtraBold.ttf
apache/opensans/OpenSans-Regular.ttf
apache/opensans/OpenSans-ExtraBoldItalic.ttf
apache/opensans/OpenSans-Italic.ttf
apache/opensans/OpenSans-Bold.ttf
apache/opensans/OpenSans-LightItalic.ttf
apache/opensans/OpenSans-BoldItalic.ttf
apache/opensans/OpenSans-Light.ttf
apache/opensans/OpenSans-SemiBold.ttf
apache/arimo/Arimo-Regular.ttf
apache/arimo/Arimo-BoldItalic.ttf
apache/arimo/Arimo-Bold.ttf
apache/arimo/Arimo-Italic.ttf
apache/redressed/Redressed-Regular.ttf
apache/yellowtail/Yellowtail-Regular.ttf
apache/fontdinerswanky/FontdinerSwanky-Regular.ttf
apache/creepstercaps/CreepsterCaps-Regular.ttf
apache/opensanscondensed/OpenSansCondensed-Light.ttf
apache/opensanscondensed/OpenSansCondensed-Bold.ttf
apache/opensanscondensed/OpenSansCondensed-LightItalic.ttf
apache/craftygirls/CraftyGirls-Regular.ttf
apache/tinos/Tinos-BoldItalic.ttf
apache/tinos/Tinos-Bold.ttf
apache/tinos/Tinos-Regular.ttf
apache/tinos/Tinos-Italic.ttf
apache/irishgrover/IrishGrover-Regular.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-Regular.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-Light.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-BoldItalic.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-ExtraBoldItalic.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-ExtraBold.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-LightItalic.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-Bold.ttf
apache/opensanshebrewcondensed/OpenSansHebrewCondensed-Italic.ttf
apache/nokora/Nokora-Bold.ttf
apache/nokora/Nokora-Regular.ttf
apache/jsmathcmti10/jsMath-cmti10.ttf
apache/walterturncoat/WalterTurncoat-Regular.ttf
apache/calligraffitti/Calligraffitti-Regular.ttf
apache/syncopate/Syncopate-Bold.ttf
apache/syncopate/Syncopate-Regular.ttf
apache/unkempt/Unkempt-Bold.ttf
apache/unkempt/Unkempt-Regular.ttf
apache/kranky/Kranky-Regular.ttf
ofl/adobeblank/AdobeBlank-Regular.ttf
ofl/novacut/NovaCut.ttf
ofl/federant/Federant-Regular.ttf
ofl/rugeboogie/RugeBoogie-Regular.ttf
ofl/actor/Actor-Regular.ttf
ofl/amarante/Amarante-Regular.ttf
ofl/seymourone/SeymourOne-Regular.ttf
ofl/baumans/Baumans-Regular.ttf
ofl/yantramanav/Yantramanav-Black.ttf
ofl/yantramanav/Yantramanav-Thin.ttf
ofl/yantramanav/Yantramanav-Regular.ttf
ofl/yantramanav/Yantramanav-Bold.ttf
ofl/yantramanav/Yantramanav-Light.ttf
ofl/yantramanav/Yantramanav-Medium.ttf
ofl/sawarabimincho/SawarabiMincho-Regular.ttf
ofl/petrona/Petrona-Regular.ttf
ofl/rubikmonoone/RubikMonoOne-Regular.ttf
ofl/yanonekaffeesatz/YanoneKaffeesatz-Regular.ttf
ofl/yanonekaffeesatz/YanoneKaffeesatz-Bold.ttf
ofl/yanonekaffeesatz/YanoneKaffeesatz-Light.ttf
ofl/yanonekaffeesatz/YanoneKaffeesatz-ExtraLight.ttf
ofl/amaticasc/AmaticaSC-Regular.ttf
ofl/amaticasc/AmaticaSC-Bold.ttf
ofl/waitingforthesunrise/WaitingfortheSunrise.ttf
ofl/ribeyemarrow/RibeyeMarrow-Regular.ttf
ofl/sedan/Sedan-Regular.ttf
ofl/sedan/Sedan-Italic.ttf
ofl/flamenco/Flamenco-Regular.ttf
ofl/flamenco/Flamenco-Light.ttf
ofl/flavors/Flavors-Regular.ttf
ofl/blackopsone/BlackOpsOne-Regular.ttf
ofl/asap/Asap-Bold.ttf
ofl/asap/Asap-SemiBold.ttf
ofl/asap/Asap-Regular.ttf
ofl/asap/Asap-MediumItalic.ttf
ofl/asap/Asap-BoldItalic.ttf
ofl/asap/Asap-Medium.ttf
ofl/asap/Asap-Italic.ttf
ofl/asap/Asap-SemiBoldItalic.ttf
ofl/bungee/Bungee-Regular.ttf
ofl/sumana/Sumana-Regular.ttf
ofl/sumana/Sumana-Bold.ttf
ofl/meriendaone/MeriendaOne-Regular.ttf
ofl/itim/Itim-Regular.ttf
ofl/sahitya/Sahitya-Regular.ttf
ofl/sahitya/Sahitya-Bold.ttf
ofl/croissantone/CroissantOne-Regular.ttf
ofl/armata/Armata-Regular.ttf
ofl/kadwa/Kadwa-Regular.ttf
ofl/kadwa/Kadwa-Bold.ttf
ofl/vt323/VT323-Regular.ttf
ofl/originalsurfer/OriginalSurfer-Regular.ttf
ofl/francoisone/FrancoisOne-Regular.ttf
ofl/federo/Federo-Regular.ttf
ofl/audiowide/Audiowide-Regular.ttf
ofl/kronaone/KronaOne-Regular.ttf
ofl/baloobhaijaan/BalooBhaijaan-Regular.ttf
ofl/titilliumweb/TitilliumWeb-SemiBoldItalic.ttf
ofl/titilliumweb/TitilliumWeb-Light.ttf
ofl/titilliumweb/TitilliumWeb-Regular.ttf
ofl/titilliumweb/TitilliumWeb-SemiBold.ttf
ofl/titilliumweb/TitilliumWeb-ExtraLight.ttf
ofl/titilliumweb/TitilliumWeb-ExtraLightItalic.ttf
ofl/titilliumweb/TitilliumWeb-BoldItalic.ttf
ofl/titilliumweb/TitilliumWeb-Italic.ttf
ofl/titilliumweb/TitilliumWeb-Black.ttf
ofl/titilliumweb/TitilliumWeb-Bold.ttf
ofl/titilliumweb/TitilliumWeb-LightItalic.ttf
ofl/baloopaaji/BalooPaaji-Regular.ttf
ofl/prostoone/ProstoOne-Regular.ttf
ofl/alegreyasans/AlegreyaSans-MediumItalic.ttf
ofl/alegreyasans/AlegreyaSans-LightItalic.ttf
ofl/alegreyasans/AlegreyaSans-Black.ttf
ofl/alegreyasans/AlegreyaSans-Italic.ttf
ofl/alegreyasans/AlegreyaSans-BlackItalic.ttf
ofl/alegreyasans/AlegreyaSans-ExtraBoldItalic.ttf
ofl/alegreyasans/AlegreyaSans-Regular.ttf
ofl/alegreyasans/AlegreyaSans-Thin.ttf
ofl/alegreyasans/AlegreyaSans-ThinItalic.ttf
ofl/alegreyasans/AlegreyaSans-ExtraBold.ttf
ofl/alegreyasans/AlegreyaSans-Light.ttf
ofl/alegreyasans/AlegreyaSans-Bold.ttf
ofl/alegreyasans/AlegreyaSans-Medium.ttf
ofl/alegreyasans/AlegreyaSans-BoldItalic.ttf
ofl/oxygenmono/OxygenMono-Regular.ttf
ofl/englebert/Englebert-Regular.ttf
ofl/gudea/Gudea-Italic.ttf
ofl/gudea/Gudea-Regular.ttf
ofl/gudea/Gudea-Bold.ttf
ofl/almendra/Almendra-Regular.ttf
ofl/almendra/Almendra-Bold.ttf
ofl/almendra/Almendra-Italic.ttf
ofl/almendra/Almendra-BoldItalic.ttf
ofl/timmana/Timmana-Regular.ttf
ofl/badscript/BadScript-Regular.ttf
ofl/happymonkey/HappyMonkey-Regular.ttf
ofl/shadowsintolight/ShadowsIntoLight.ttf
ofl/fugazone/FugazOne-Regular.ttf
ofl/play/Play-Regular.ttf
ofl/play/Play-Bold.ttf
ofl/lifesavers/LifeSavers-Bold.ttf
ofl/lifesavers/LifeSavers-ExtraBold.ttf
ofl/lifesavers/LifeSavers-Regular.ttf
ofl/medulaone/MedulaOne-Regular.ttf
ofl/mrssheppards/MrsSheppards-Regular.ttf
ofl/wellfleet/Wellfleet-Regular.ttf
ofl/neucha/Neucha.ttf
ofl/kokoro/Kokoro-Regular.ttf
ofl/butchermancaps/ButchermanCaps-Regular.ttf
ofl/reemkufi/ReemKufi-Regular.ttf
ofl/alexbrush/AlexBrush-Regular.ttf
ofl/sarala/Sarala-Regular.ttf
ofl/sarala/Sarala-Bold.ttf
ofl/terminaldosislight/TerminalDosis-Light.ttf
ofl/koulen/Koulen.ttf
ofl/rajdhani/Rajdhani-Medium.ttf
ofl/rajdhani/Rajdhani-Light.ttf
ofl/rajdhani/Rajdhani-Bold.ttf
ofl/rajdhani/Rajdhani-Regular.ttf
ofl/rajdhani/Rajdhani-SemiBold.ttf
ofl/belgrano/Belgrano-Regular.ttf
ofl/radley/Radley-Italic.ttf
ofl/radley/Radley-Regular.ttf
ofl/keaniaone/KeaniaOne-Regular.ttf
ofl/fondamento/Fondamento-Regular.ttf
ofl/fondamento/Fondamento-Italic.ttf
ofl/faustinavfbeta/FaustinaVFBeta.ttf
ofl/faustinavfbeta/FaustinaVFBeta-Italic.ttf
ofl/jaldi/Jaldi-Regular.ttf
ofl/jaldi/Jaldi-Bold.ttf
ofl/comfortaa/Comfortaa-Light.ttf
ofl/comfortaa/Comfortaa-Regular.ttf
ofl/comfortaa/Comfortaa-Bold.ttf
ofl/martelsans/MartelSans-SemiBold.ttf
ofl/martelsans/MartelSans-ExtraBold.ttf
ofl/martelsans/MartelSans-Black.ttf
ofl/martelsans/MartelSans-Light.ttf
ofl/martelsans/MartelSans-ExtraLight.ttf
ofl/martelsans/MartelSans-Regular.ttf
ofl/martelsans/MartelSans-Bold.ttf
ofl/astloch/Astloch-Regular.ttf
ofl/astloch/Astloch-Bold.ttf
ofl/changaone/ChangaOne-Italic.ttf
ofl/changaone/ChangaOne-Regular.ttf
ofl/baloobhai/BalooBhai-Regular.ttf
ofl/gfsdidot/GFSDidot-Regular.ttf
ofl/oldstandardtt/OldStandard-Bold.ttf
ofl/oldstandardtt/OldStandard-Italic.ttf
ofl/oldstandardtt/OldStandard-Regular.ttf
ofl/griffy/Griffy-Regular.ttf
ofl/lateef/LateefRegOT.ttf
ofl/hindmysuru/HindMysuru-Light.ttf
ofl/hindmysuru/HindMysuru-Regular.ttf
ofl/hindmysuru/HindMysuru-Bold.ttf
ofl/hindmysuru/HindMysuru-SemiBold.ttf
ofl/hindmysuru/HindMysuru-Medium.ttf
ofl/rationale/Rationale-Regular.ttf
ofl/seoulnamsancondensed/SeoulNamsanCondensed-Black.ttf
ofl/seoulnamsancondensed/SeoulNamsanCondensed-ExtraBold.ttf
ofl/seoulnamsancondensed/SeoulNamsanCondensed-Bold.ttf
ofl/seoulnamsancondensed/SeoulNamsanCondensed-Medium.ttf
ofl/seoulnamsancondensed/SeoulNamsanCondensed-Light.ttf
ofl/trykker/Trykker-Regular.ttf
ofl/stalinone/StalinOne-Regular.ttf
ofl/almendradisplay/AlmendraDisplay-Regular.ttf
ofl/asul/Asul-Regular.ttf
ofl/asul/Asul-Bold.ttf
ofl/aldrich/Aldrich-Regular.ttf
ofl/josefinslab/JosefinSlab-LightItalic.ttf
ofl/josefinslab/JosefinSlab-Regular.ttf
ofl/josefinslab/JosefinSlab-SemiBold.ttf
ofl/josefinslab/JosefinSlab-Bold.ttf
ofl/josefinslab/JosefinSlab-Light.ttf
ofl/josefinslab/JosefinSlab-BoldItalic.ttf
ofl/josefinslab/JosefinSlab-SemiBoldItalic.ttf
ofl/josefinslab/JosefinSlab-Italic.ttf
ofl/josefinslab/JosefinSlab-ThinItalic.ttf
ofl/josefinslab/JosefinSlab-Thin.ttf
ofl/portlligatsans/PortLligatSans-Regular.ttf
ofl/vollkornsc/VollkornSC-Bold.ttf
ofl/vollkornsc/VollkornSC-SemiBold.ttf
ofl/vollkornsc/VollkornSC-Black.ttf
ofl/vollkornsc/VollkornSC-Regular.ttf
ofl/zillaslabhighlight/ZillaSlabHighlight-Bold.ttf
ofl/zillaslabhighlight/ZillaSlabHighlight-Regular.ttf
ofl/anaheim/Anaheim-Regular.ttf
ofl/cairo/Cairo-Light.ttf
ofl/cairo/Cairo-ExtraLight.ttf
ofl/cairo/Cairo-SemiBold.ttf
ofl/cairo/Cairo-Regular.ttf
ofl/cairo/Cairo-Bold.ttf
ofl/cairo/Cairo-Black.ttf
ofl/imfelldwpicasc/IMFePIsc28P.ttf
ofl/hammersmithone/HammersmithOne-Regular.ttf
ofl/notosanstamil/NotoSansTamil-Regular.ttf
ofl/notosanstamil/NotoSansTamil-Bold.ttf
ofl/changa/Changa-ExtraBold.ttf
ofl/changa/Changa-ExtraLight.ttf
ofl/changa/Changa-Light.ttf
ofl/changa/Changa-SemiBold.ttf
ofl/changa/Changa-Medium.ttf
ofl/changa/Changa-Regular.ttf
ofl/changa/Changa-Bold.ttf
ofl/ewert/Ewert-Regular.ttf
ofl/pompiere/Pompiere-Regular.ttf
ofl/worksans/WorkSans-SemiBold.ttf
ofl/worksans/WorkSans-Regular.ttf
ofl/worksans/WorkSans-Thin.ttf
ofl/worksans/WorkSans-Light.ttf
ofl/worksans/WorkSans-ExtraBold.ttf
ofl/worksans/WorkSans-ExtraLight.ttf
ofl/worksans/WorkSans-Bold.ttf
ofl/worksans/WorkSans-Black.ttf
ofl/worksans/WorkSans-Medium.ttf
ofl/khmer/Khmer.ttf
ofl/warnes/Warnes-Regular.ttf
ofl/inknutantiqua/InknutAntiqua-ExtraBold.ttf
ofl/inknutantiqua/InknutAntiqua-Black.ttf
ofl/inknutantiqua/InknutAntiqua-Bold.ttf
ofl/inknutantiqua/InknutAntiqua-Light.ttf
ofl/inknutantiqua/InknutAntiqua-Medium.ttf
ofl/inknutantiqua/InknutAntiqua-SemiBold.ttf
ofl/inknutantiqua/InknutAntiqua-Regular.ttf
ofl/nanumgothiccoding/NanumGothicCoding-Regular.ttf
ofl/nanumgothiccoding/NanumGothicCoding-Bold.ttf
ofl/frankruhllibre/FrankRuhlLibre-Black.ttf
ofl/frankruhllibre/FrankRuhlLibre-Regular.ttf
ofl/frankruhllibre/FrankRuhlLibre-Bold.ttf
ofl/frankruhllibre/FrankRuhlLibre-Medium.ttf
ofl/frankruhllibre/FrankRuhlLibre-Light.ttf
ofl/miniver/Miniver-Regular.ttf
ofl/geo/Geo-Regular.ttf
ofl/geo/Geo-Oblique.ttf
ofl/hennypenny/HennyPenny-Regular.ttf
ofl/underdog/Underdog-Regular.ttf
ofl/cantoraone/CantoraOne-Regular.ttf
ofl/hind/Hind-Bold.ttf
ofl/hind/Hind-Regular.ttf
ofl/hind/Hind-Medium.ttf
ofl/hind/Hind-Light.ttf
ofl/hind/Hind-SemiBold.ttf
ofl/oxygen/Oxygen-Light.ttf
ofl/oxygen/Oxygen-Bold.ttf
ofl/oxygen/Oxygen-Regular.ttf
ofl/kumarone/KumarOne-Regular.ttf
ofl/tharlon/Tharlon-Regular.ttf
ofl/mitr/Mitr-SemiBold.ttf
ofl/mitr/Mitr-Medium.ttf
ofl/mitr/Mitr-Regular.ttf
ofl/mitr/Mitr-ExtraLight.ttf
ofl/mitr/Mitr-Bold.ttf
ofl/mitr/Mitr-Light.ttf
ofl/offside/Offside-Regular.ttf
ofl/strait/Strait-Regular.ttf
ofl/junge/Junge-Regular.ttf
ofl/sansation/Sansation-Italic.ttf
ofl/sansation/Sansation-Light.ttf
ofl/sansation/Sansation-Bold.ttf
ofl/sansation/Sansation-LightItalic.ttf
ofl/sansation/Sansation-BoldItalic.ttf
ofl/sansation/Sansation-Regular.ttf
ofl/baloo/Baloo-Regular.ttf
ofl/amstelvaralpha/AmstelvarAlpha-VF.ttf
ofl/cabinsketch/CabinSketch-Regular.ttf
ofl/cabinsketch/CabinSketch-Bold.ttf
ofl/antic/Antic-Regular.ttf
ofl/herrvonmuellerhoff/HerrVonMuellerhoff-Regular.ttf
ofl/hindmadurai/HindMadurai-Regular.ttf
ofl/hindmadurai/HindMadurai-Light.ttf
ofl/hindmadurai/HindMadurai-Medium.ttf
ofl/hindmadurai/HindMadurai-SemiBold.ttf
ofl/hindmadurai/HindMadurai-Bold.ttf
ofl/benchnine/BenchNine-Regular.ttf
ofl/benchnine/BenchNine-Light.ttf
ofl/benchnine/BenchNine-Bold.ttf
ofl/fjallaone/FjallaOne-Regular.ttf
ofl/coda/Coda-Regular.ttf
ofl/coda/Coda-ExtraBold.ttf
ofl/kottaone/KottaOne-Regular.ttf
ofl/kiteone/KiteOne-Regular.ttf
ofl/baloochettan/BalooChettan-Regular.ttf
ofl/stalinistone/StalinistOne-Regular.ttf
ofl/mogra/Mogra-Regular.ttf
ofl/sreekrushnadevaraya/SreeKrushnadevaraya-Regular.ttf
ofl/ralewaydots/RalewayDots-Regular.ttf
ofl/angkor/Angkor-Regular.ttf
ofl/hindcolombo/HindColombo-Regular.ttf
ofl/hindcolombo/HindColombo-Light.ttf
ofl/hindcolombo/HindColombo-SemiBold.ttf
ofl/hindcolombo/HindColombo-Bold.ttf
ofl/hindcolombo/HindColombo-Medium.ttf
ofl/amethysta/Amethysta-Regular.ttf
ofl/rozhaone/RozhaOne-Regular.ttf
ofl/caveatbrush/CaveatBrush-Regular.ttf
ofl/chivo/Chivo-Black.ttf
ofl/chivo/Chivo-BlackItalic.ttf
ofl/chivo/Chivo-BoldItalic.ttf
ofl/chivo/Chivo-Bold.ttf
ofl/chivo/Chivo-LightItalic.ttf
ofl/chivo/Chivo-Regular.ttf
ofl/chivo/Chivo-Italic.ttf
ofl/chivo/Chivo-Light.ttf
ofl/arefruqaa/ArefRuqaa-Regular.ttf
ofl/arefruqaa/ArefRuqaa-Bold.ttf
ofl/grandhotel/GrandHotel-Regular.ttf
ofl/amita/Amita-Bold.ttf
ofl/amita/Amita-Regular.ttf
ofl/lato/Lato-Light.ttf
ofl/lato/Lato-ExtraLightItalic.ttf
ofl/lato/Lato-SemiBoldItalic.ttf
ofl/lato/Lato-Bold.ttf
ofl/lato/Lato-SemiBold.ttf
ofl/lato/Lato-Italic.ttf
ofl/lato/Lato-ExtraBold.ttf
ofl/lato/Lato-ExtraBoldItalic.ttf
ofl/lato/Lato-Regular.ttf
ofl/lato/Lato-Thin.ttf
ofl/lato/Lato-LightItalic.ttf
ofl/lato/Lato-MediumItalic.ttf
ofl/lato/Lato-ThinItalic.ttf
ofl/lato/Lato-ExtraLight.ttf
ofl/lato/Lato-Medium.ttf
ofl/lato/Lato-Black.ttf
ofl/lato/Lato-BoldItalic.ttf
ofl/lato/Lato-BlackItalic.ttf
ofl/kaushanscript/KaushanScript-Regular.ttf
ofl/cormorantunicase/CormorantUnicase-Bold.ttf
ofl/cormorantunicase/CormorantUnicase-SemiBold.ttf
ofl/cormorantunicase/CormorantUnicase-Light.ttf
ofl/cormorantunicase/CormorantUnicase-Medium.ttf
ofl/cormorantunicase/CormorantUnicase-Regular.ttf
ofl/novaoval/NovaOval.ttf
ofl/noticiatext/NoticiaText-BoldItalic.ttf
ofl/noticiatext/NoticiaText-Regular.ttf
ofl/noticiatext/NoticiaText-Bold.ttf
ofl/noticiatext/NoticiaText-Italic.ttf
ofl/trocchi/Trocchi-Regular.ttf
ofl/wendyone/WendyOne-Regular.ttf
ofl/inconsolata/Inconsolata-Regular.ttf
ofl/inconsolata/Inconsolata-Bold.ttf
ofl/londrinasolid/LondrinaSolid-Thin.ttf
ofl/londrinasolid/LondrinaSolid-Light.ttf
ofl/londrinasolid/LondrinaSolid-Black.ttf
ofl/londrinasolid/LondrinaSolid-Regular.ttf
ofl/share/Share-Bold.ttf
ofl/share/Share-Regular.ttf
ofl/share/Share-BoldItalic.ttf
ofl/share/Share-Italic.ttf
ofl/molle/Molle-Regular.ttf
ofl/quantico/Quantico-Italic.ttf
ofl/quantico/Quantico-Regular.ttf
ofl/quantico/Quantico-BoldItalic.ttf
ofl/quantico/Quantico-Bold.ttf
ofl/adventpro/AdventPro-ExtraLight.ttf
ofl/adventpro/AdventPro-Light.ttf
ofl/adventpro/AdventPro-Medium.ttf
ofl/adventpro/AdventPro-Regular.ttf
ofl/adventpro/AdventPro-Bold.ttf
ofl/adventpro/AdventPro-SemiBold.ttf
ofl/adventpro/AdventPro-Thin.ttf
ofl/gruppo/Gruppo-Regular.ttf
ofl/biorhymeexpanded/BioRhymeExpanded-Regular.ttf
ofl/biorhymeexpanded/BioRhymeExpanded-Bold.ttf
ofl/biorhymeexpanded/BioRhymeExpanded-ExtraLight.ttf
ofl/biorhymeexpanded/BioRhymeExpanded-ExtraBold.ttf
ofl/biorhymeexpanded/BioRhymeExpanded-Light.ttf
ofl/firasanscondensed/FiraSansCondensed-SemiBoldItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-ExtraLight.ttf
ofl/firasanscondensed/FiraSansCondensed-Medium.ttf
ofl/firasanscondensed/FiraSansCondensed-MediumItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-BoldItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-Light.ttf
ofl/firasanscondensed/FiraSansCondensed-ThinItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-ExtraLightItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-Bold.ttf
ofl/firasanscondensed/FiraSansCondensed-SemiBold.ttf
ofl/firasanscondensed/FiraSansCondensed-ExtraBoldItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-Regular.ttf
ofl/firasanscondensed/FiraSansCondensed-BlackItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-Black.ttf
ofl/firasanscondensed/FiraSansCondensed-ExtraBold.ttf
ofl/firasanscondensed/FiraSansCondensed-Thin.ttf
ofl/firasanscondensed/FiraSansCondensed-LightItalic.ttf
ofl/firasanscondensed/FiraSansCondensed-Italic.ttf
ofl/sansitaone/SansitaOne-Regular.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-BoldItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-BlackItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-MediumItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-SemiBoldItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-ExtraLight.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-ExtraLightItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Bold.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Italic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Black.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-SemiBold.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-ThinItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Regular.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-LightItalic.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Light.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-ExtraBold.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Medium.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-Thin.ttf
ofl/firasansextracondensed/FiraSansExtraCondensed-ExtraBoldItalic.ttf
ofl/istokweb/IstokWeb-Bold.ttf
ofl/istokweb/IstokWeb-Regular.ttf
ofl/istokweb/IstokWeb-BoldItalic.ttf
ofl/istokweb/IstokWeb-Italic.ttf
ofl/coveredbyyourgrace/CoveredByYourGrace.ttf
ofl/bowlbyonesc/BowlbyOneSC-Regular.ttf
ofl/forum/Forum-Regular.ttf
ofl/thegirlnextdoor/TheGirlNextDoor.ttf
ofl/zillaslab/ZillaSlab-Medium.ttf
ofl/zillaslab/ZillaSlab-Regular.ttf
ofl/zillaslab/ZillaSlab-LightItalic.ttf
ofl/zillaslab/ZillaSlab-Light.ttf
ofl/zillaslab/ZillaSlab-SemiBold.ttf
ofl/zillaslab/ZillaSlab-SemiBoldItalic.ttf
ofl/zillaslab/ZillaSlab-Bold.ttf
ofl/zillaslab/ZillaSlab-Italic.ttf
ofl/zillaslab/ZillaSlab-MediumItalic.ttf
ofl/zillaslab/ZillaSlab-BoldItalic.ttf
ofl/kenia/Kenia-Regular.ttf
ofl/sansita/Sansita-BlackItalic.ttf
ofl/sansita/Sansita-Regular.ttf
ofl/sansita/Sansita-Black.ttf
ofl/sansita/Sansita-Bold.ttf
ofl/sansita/Sansita-Italic.ttf
ofl/sansita/Sansita-ExtraBoldItalic.ttf
ofl/sansita/Sansita-BoldItalic.ttf
ofl/sansita/Sansita-ExtraBold.ttf
ofl/kameron/Kameron-Bold.ttf
ofl/kameron/Kameron-Regular.ttf
ofl/mate/Mate-Italic.ttf
ofl/mate/Mate-Regular.ttf
ofl/pinyonscript/PinyonScript-Regular.ttf
ofl/cwtexhei/cwTeXHei-zhonly.ttf
ofl/brunoacesc/BrunoAceSC-Regular.ttf
ofl/quicksand/Quicksand-Light.ttf
ofl/quicksand/Quicksand-Medium.ttf
ofl/quicksand/Quicksand-Bold.ttf
ofl/quicksand/Quicksand-Regular.ttf
ofl/assistant/Assistant-ExtraLight.ttf
ofl/assistant/Assistant-Regular.ttf
ofl/assistant/Assistant-SemiBold.ttf
ofl/assistant/Assistant-ExtraBold.ttf
ofl/assistant/Assistant-Light.ttf
ofl/assistant/Assistant-Bold.ttf
ofl/anonymouspro/AnonymousPro-Bold.ttf
ofl/anonymouspro/AnonymousPro-Italic.ttf
ofl/anonymouspro/AnonymousPro-Regular.ttf
ofl/anonymouspro/AnonymousPro-BoldItalic.ttf
ofl/alef/Alef-Regular.ttf
ofl/alef/Alef-Bold.ttf
ofl/kavivanar/Kavivanar-Regular.ttf
ofl/fingerpaint/FingerPaint-Regular.ttf
ofl/metamorphous/Metamorphous-Regular.ttf
ofl/bigshotone/BigshotOne-Regular.ttf
ofl/monda/Monda-Bold.ttf
ofl/monda/Monda-Regular.ttf
ofl/belleza/Belleza-Regular.ttf
ofl/mrssaintdelafield/MrsSaintDelafield-Regular.ttf
ofl/questrial/Questrial-Regular.ttf
ofl/macondo/Macondo-Regular.ttf
ofl/brunoace/BrunoAce-Regular.ttf
ofl/halant/Halant-SemiBold.ttf
ofl/halant/Halant-Bold.ttf
ofl/halant/Halant-Light.ttf
ofl/halant/Halant-Medium.ttf
ofl/halant/Halant-Regular.ttf
ofl/ponnala/Ponnala-Regular.ttf
ofl/miriamlibre/MiriamLibre-Regular.ttf
ofl/miriamlibre/MiriamLibre-Bold.ttf
ofl/sail/Sail-Regular.ttf
ofl/mako/Mako-Regular.ttf
ofl/pollerone/PollerOne.ttf
ofl/iceberg/Iceberg-Regular.ttf
ofl/hindsiliguri/HindSiliguri-Medium.ttf
ofl/hindsiliguri/HindSiliguri-Light.ttf
ofl/hindsiliguri/HindSiliguri-SemiBold.ttf
ofl/hindsiliguri/HindSiliguri-Regular.ttf
ofl/hindsiliguri/HindSiliguri-Bold.ttf
ofl/poppins/Poppins-Black.ttf
ofl/poppins/Poppins-ExtraBold.ttf
ofl/poppins/Poppins-BlackItalic.ttf
ofl/poppins/Poppins-Italic.ttf
ofl/poppins/Poppins-LightItalic.ttf
ofl/poppins/Poppins-MediumItalic.ttf
ofl/poppins/Poppins-Thin.ttf
ofl/poppins/Poppins-SemiBoldItalic.ttf
ofl/poppins/Poppins-ExtraLightItalic.ttf
ofl/poppins/Poppins-BoldItalic.ttf
ofl/poppins/Poppins-ExtraLight.ttf
ofl/poppins/Poppins-Light.ttf
ofl/poppins/Poppins-Medium.ttf
ofl/poppins/Poppins-ThinItalic.ttf
ofl/poppins/Poppins-SemiBold.ttf
ofl/poppins/Poppins-ExtraBoldItalic.ttf
ofl/poppins/Poppins-Bold.ttf
ofl/poppins/Poppins-Regular.ttf
ofl/pangolin/Pangolin-Regular.ttf
ofl/convergence/Convergence-Regular.ttf
ofl/amaranth/Amaranth-Regular.ttf
ofl/amaranth/Amaranth-BoldItalic.ttf
ofl/amaranth/Amaranth-Italic.ttf
ofl/amaranth/Amaranth-Bold.ttf
ofl/nosifer/Nosifer-Regular.ttf
ofl/deliusswashcaps/DeliusSwashCaps-Regular.ttf
ofl/spicyrice/SpicyRice-Regular.ttf
ofl/headlandone/HeadlandOne-Regular.ttf
ofl/sourcecodepro/SourceCodePro-Regular.ttf
ofl/sourcecodepro/SourceCodePro-SemiBold.ttf
ofl/sourcecodepro/SourceCodePro-Medium.ttf
ofl/sourcecodepro/SourceCodePro-Light.ttf
ofl/sourcecodepro/SourceCodePro-ExtraLight.ttf
ofl/sourcecodepro/SourceCodePro-Black.ttf
ofl/sourcecodepro/SourceCodePro-Bold.ttf
ofl/cwtexming/cwTeXMing-zhonly.ttf
ofl/suranna/Suranna-Regular.ttf
ofl/alike/Alike-Regular.ttf
ofl/peddana/Peddana-Regular.ttf
ofl/sourceserifpro/SourceSerifPro-Black.ttf
ofl/sourceserifpro/SourceSerifPro-Bold.ttf
ofl/sourceserifpro/SourceSerifPro-Regular.ttf
ofl/sourceserifpro/SourceSerifPro-ExtraLight.ttf
ofl/sourceserifpro/SourceSerifPro-SemiBold.ttf
ofl/sourceserifpro/SourceSerifPro-Light.ttf
ofl/pressstart2p/PressStart2P-Regular.ttf
ofl/gorditas/Gorditas-Bold.ttf
ofl/gorditas/Gorditas-Regular.ttf
ofl/gentiumbookbasic/GenBkBasBI.ttf
ofl/gentiumbookbasic/GenBkBasB.ttf
ofl/gentiumbookbasic/GenBkBasR.ttf
ofl/gentiumbookbasic/GenBkBasI.ttf
ofl/dosis/Dosis-ExtraBold.ttf
ofl/dosis/Dosis-Bold.ttf
ofl/dosis/Dosis-ExtraLight.ttf
ofl/dosis/Dosis-Regular.ttf
ofl/dosis/Dosis-Light.ttf
ofl/dosis/Dosis-SemiBold.ttf
ofl/dosis/Dosis-Medium.ttf
ofl/prata/Prata-Regular.ttf
ofl/mukta/Mukta-ExtraLight.ttf
ofl/mukta/Mukta-Light.ttf
ofl/mukta/Mukta-Regular.ttf
ofl/mukta/Mukta-ExtraBold.ttf
ofl/mukta/Mukta-Medium.ttf
ofl/mukta/Mukta-Bold.ttf
ofl/mukta/Mukta-SemiBold.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-SemiBold.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-Light.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-Thin.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-Medium.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-ExtraBold.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-Black.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-Regular.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-Bold.ttf
ofl/encodesanssemicondensed/EncodeSansSemiCondensed-ExtraLight.ttf
ofl/khyay/Khyay-Regular.ttf
ofl/economica/Economica-Italic.ttf
ofl/economica/Economica-Regular.ttf
ofl/economica/Economica-BoldItalic.ttf
ofl/economica/Economica-Bold.ttf
ofl/overlock/Overlock-BoldItalic.ttf
ofl/overlock/Overlock-Bold.ttf
ofl/overlock/Overlock-Black.ttf
ofl/overlock/Overlock-BlackItalic.ttf
ofl/overlock/Overlock-Italic.ttf
ofl/overlock/Overlock-Regular.ttf
ofl/alefhebrew/Alef-Bold.ttf
ofl/alefhebrew/Alef-Regular.ttf
ofl/allertastencil/AllertaStencil-Regular.ttf
ofl/balooda/BalooDa-Regular.ttf
ofl/sharetech/ShareTech-Regular.ttf
ofl/cinzel/Cinzel-Regular.ttf
ofl/cinzel/Cinzel-Black.ttf
ofl/cinzel/Cinzel-Bold.ttf
ofl/taviraj/Taviraj-BlackItalic.ttf
ofl/taviraj/Taviraj-Light.ttf
ofl/taviraj/Taviraj-MediumItalic.ttf
ofl/taviraj/Taviraj-ExtraLightItalic.ttf
ofl/taviraj/Taviraj-Thin.ttf
ofl/taviraj/Taviraj-ExtraBoldItalic.ttf
ofl/taviraj/Taviraj-Medium.ttf
ofl/taviraj/Taviraj-LightItalic.ttf
ofl/taviraj/Taviraj-Bold.ttf
ofl/taviraj/Taviraj-Italic.ttf
ofl/taviraj/Taviraj-ThinItalic.ttf
ofl/taviraj/Taviraj-ExtraBold.ttf
ofl/taviraj/Taviraj-BoldItalic.ttf
ofl/taviraj/Taviraj-SemiBold.ttf
ofl/taviraj/Taviraj-SemiBoldItalic.ttf
ofl/taviraj/Taviraj-Black.ttf
ofl/taviraj/Taviraj-ExtraLight.ttf
ofl/taviraj/Taviraj-Regular.ttf
ofl/faunaone/FaunaOne-Regular.ttf
ofl/arbutus/Arbutus-Regular.ttf
ofl/rokkitt/Rokkitt-Light.ttf
ofl/rokkitt/Rokkitt-SemiBold.ttf
ofl/rokkitt/Rokkitt-Bold.ttf
ofl/rokkitt/Rokkitt-Medium.ttf
ofl/rokkitt/Rokkitt-ExtraLight.ttf
ofl/rokkitt/Rokkitt-Thin.ttf
ofl/rokkitt/Rokkitt-Regular.ttf
ofl/rokkitt/Rokkitt-Black.ttf
ofl/rokkitt/Rokkitt-ExtraBold.ttf
ofl/pirataone/PirataOne-Regular.ttf
ofl/maitree/Maitree-Medium.ttf
ofl/maitree/Maitree-Regular.ttf
ofl/maitree/Maitree-SemiBold.ttf
ofl/maitree/Maitree-Bold.ttf
ofl/maitree/Maitree-ExtraLight.ttf
ofl/maitree/Maitree-Light.ttf
ofl/palanquin/Palanquin-SemiBold.ttf
ofl/palanquin/Palanquin-Regular.ttf
ofl/palanquin/Palanquin-Light.ttf
ofl/palanquin/Palanquin-Bold.ttf
ofl/palanquin/Palanquin-Medium.ttf
ofl/palanquin/Palanquin-ExtraLight.ttf
ofl/palanquin/Palanquin-Thin.ttf
ofl/laosanspro/LaoSansPro-Regular.ttf
ofl/cantarell/Cantarell-BoldOblique.ttf
ofl/cantarell/Cantarell-Regular.ttf
ofl/cantarell/Cantarell-Oblique.ttf
ofl/cantarell/Cantarell-Bold.ttf
ofl/seaweedscript/SeaweedScript-Regular.ttf
ofl/cuprum/Cuprum-BoldItalic.ttf
ofl/cuprum/Cuprum-Bold.ttf
ofl/cuprum/Cuprum-Regular.ttf
ofl/cuprum/Cuprum-Italic.ttf
ofl/exo2/Exo2-SemiBold.ttf
ofl/exo2/Exo2-ExtraBoldItalic.ttf
ofl/exo2/Exo2-SemiBoldItalic.ttf
ofl/exo2/Exo2-BlackItalic.ttf
ofl/exo2/Exo2-Regular.ttf
ofl/exo2/Exo2-MediumItalic.ttf
ofl/exo2/Exo2-ExtraLightItalic.ttf
ofl/exo2/Exo2-Light.ttf
ofl/exo2/Exo2-LightItalic.ttf
ofl/exo2/Exo2-ExtraBold.ttf
ofl/exo2/Exo2-Italic.ttf
ofl/exo2/Exo2-Thin.ttf
ofl/exo2/Exo2-Medium.ttf
ofl/exo2/Exo2-ExtraLight.ttf
ofl/exo2/Exo2-BoldItalic.ttf
ofl/exo2/Exo2-Black.ttf
ofl/exo2/Exo2-Bold.ttf
ofl/exo2/Exo2-ThinItalic.ttf
ofl/londrinaoutline/LondrinaOutline-Regular.ttf
ofl/cormorantgaramond/CormorantGaramond-Italic.ttf
ofl/cormorantgaramond/CormorantGaramond-SemiBoldItalic.ttf
ofl/cormorantgaramond/CormorantGaramond-Regular.ttf
ofl/cormorantgaramond/CormorantGaramond-MediumItalic.ttf
ofl/cormorantgaramond/CormorantGaramond-SemiBold.ttf
ofl/cormorantgaramond/CormorantGaramond-Medium.ttf
ofl/cormorantgaramond/CormorantGaramond-BoldItalic.ttf
ofl/cormorantgaramond/CormorantGaramond-Bold.ttf
ofl/cormorantgaramond/CormorantGaramond-LightItalic.ttf
ofl/cormorantgaramond/CormorantGaramond-Light.ttf
ofl/parisienne/Parisienne-Regular.ttf
ofl/imfelldoublepicasc/IMFeDPsc28P.ttf
ofl/gabriela/Gabriela-Regular.ttf
ofl/firasans/FiraSans-Regular.ttf
ofl/firasans/FiraSans-Bold.ttf
ofl/firasans/FiraSans-MediumItalic.ttf
ofl/firasans/FiraSans-SemiBoldItalic.ttf
ofl/firasans/FiraSans-Light.ttf
ofl/firasans/FiraSans-ExtraLightItalic.ttf
ofl/firasans/FiraSans-Italic.ttf
ofl/firasans/FiraSans-Medium.ttf
ofl/firasans/FiraSans-BlackItalic.ttf
ofl/firasans/FiraSans-Black.ttf
ofl/firasans/FiraSans-ThinItalic.ttf
ofl/firasans/FiraSans-Thin.ttf
ofl/firasans/FiraSans-ExtraBold.ttf
ofl/firasans/FiraSans-SemiBold.ttf
ofl/firasans/FiraSans-BoldItalic.ttf
ofl/firasans/FiraSans-LightItalic.ttf
ofl/firasans/FiraSans-ExtraLight.ttf
ofl/firasans/FiraSans-ExtraBoldItalic.ttf
ofl/jotione/JotiOne-Regular.ttf
ofl/bowlbyone/BowlbyOne-Regular.ttf
ofl/goudybookletter1911/GoudyBookletter1911.ttf
ofl/bilbo/Bilbo-Regular.ttf
ofl/missfajardose/MissFajardose-Regular.ttf
ofl/eatercaps/EaterCaps-Regular.ttf
ofl/quando/Quando-Regular.ttf
ofl/kantumruy/Kantumruy-Light.ttf
ofl/kantumruy/Kantumruy-Bold.ttf
ofl/kantumruy/Kantumruy-Regular.ttf
ofl/cedarvillecursive/Cedarville-Cursive.ttf
ofl/moulpali/Moulpali.ttf
ofl/nobile/Nobile-Italic.ttf
ofl/nobile/Nobile-Medium.ttf
ofl/nobile/Nobile-MediumItalic.ttf
ofl/nobile/Nobile-BoldItalic.ttf
ofl/nobile/Nobile-Regular.ttf
ofl/nobile/Nobile-Bold.ttf
ofl/chelseamarket/ChelseaMarket-Regular.ttf
ofl/rubik/Rubik-Bold.ttf
ofl/rubik/Rubik-Light.ttf
ofl/rubik/Rubik-Italic.ttf
ofl/rubik/Rubik-Medium.ttf
ofl/rubik/Rubik-BlackItalic.ttf
ofl/rubik/Rubik-Black.ttf
ofl/rubik/Rubik-LightItalic.ttf
ofl/rubik/Rubik-MediumItalic.ttf
ofl/rubik/Rubik-BoldItalic.ttf
ofl/rubik/Rubik-Regular.ttf
ofl/postnobillsjaffna/PostNoBillsJaffna-ExtraBold.ttf
ofl/postnobillsjaffna/PostNoBillsJaffna-Regular.ttf
ofl/postnobillsjaffna/PostNoBillsJaffna-Bold.ttf
ofl/postnobillsjaffna/PostNoBillsJaffna-Medium.ttf
ofl/postnobillsjaffna/PostNoBillsJaffna-SemiBold.ttf
ofl/postnobillsjaffna/PostNoBillsJaffna-Light.ttf
ofl/sueellenfrancisco/SueEllenFrancisco.ttf
ofl/shrikhand/Shrikhand-Regular.ttf
ofl/rougescript/RougeScript-Regular.ttf
ofl/arimamadurai/ArimaMadurai-Medium.ttf
ofl/arimamadurai/ArimaMadurai-ExtraBold.ttf
ofl/arimamadurai/ArimaMadurai-Thin.ttf
ofl/arimamadurai/ArimaMadurai-Regular.ttf
ofl/arimamadurai/ArimaMadurai-Black.ttf
ofl/arimamadurai/ArimaMadurai-Light.ttf
ofl/arimamadurai/ArimaMadurai-ExtraLight.ttf
ofl/arimamadurai/ArimaMadurai-Bold.ttf
ofl/jejugothic/JejuGothic-Regular.ttf
ofl/laomuangkhong/LaoMuangKhong-Regular.ttf
ofl/merriweathersans/MerriweatherSans-ExtraBoldItalic.ttf
ofl/merriweathersans/MerriweatherSans-ExtraBold.ttf
ofl/merriweathersans/MerriweatherSans-LightItalic.ttf
ofl/merriweathersans/MerriweatherSans-Regular.ttf
ofl/merriweathersans/MerriweatherSans-Light.ttf
ofl/merriweathersans/MerriweatherSans-Italic.ttf
ofl/merriweathersans/MerriweatherSans-BoldItalic.ttf
ofl/merriweathersans/MerriweatherSans-Bold.ttf
ofl/spectral/Spectral-ExtraBoldItalic.ttf
ofl/spectral/Spectral-Medium.ttf
ofl/spectral/Spectral-ExtraLightItalic.ttf
ofl/spectral/Spectral-MediumItalic.ttf
ofl/spectral/Spectral-Bold.ttf
ofl/spectral/Spectral-ExtraBold.ttf
ofl/spectral/Spectral-LightItalic.ttf
ofl/spectral/Spectral-ExtraLight.ttf
ofl/spectral/Spectral-BoldItalic.ttf
ofl/spectral/Spectral-Italic.ttf
ofl/spectral/Spectral-Light.ttf
ofl/spectral/Spectral-Regular.ttf
ofl/spectral/Spectral-SemiBold.ttf
ofl/spectral/Spectral-SemiBoldItalic.ttf
ofl/tillana/Tillana-SemiBold.ttf
ofl/tillana/Tillana-ExtraBold.ttf
ofl/tillana/Tillana-Regular.ttf
ofl/tillana/Tillana-Medium.ttf
ofl/tillana/Tillana-Bold.ttf
ofl/spirax/Spirax-Regular.ttf
ofl/cabin/Cabin-SemiBold.ttf
ofl/cabin/Cabin-Bold.ttf
ofl/cabin/Cabin-SemiBoldItalic.ttf
ofl/cabin/Cabin-Regular.ttf
ofl/cabin/Cabin-Italic.ttf
ofl/cabin/Cabin-MediumItalic.ttf
ofl/cabin/Cabin-Medium.ttf
ofl/cabin/Cabin-BoldItalic.ttf
ofl/tuffy/Tuffy-Regular.ttf
ofl/tuffy/Tuffy-Bold.ttf
ofl/tuffy/Tuffy-BoldItalic.ttf
ofl/tuffy/Tuffy-Italic.ttf
ofl/metalmania/MetalMania-Regular.ttf
ofl/sitara/Sitara-Italic.ttf
ofl/sitara/Sitara-Bold.ttf
ofl/sitara/Sitara-Regular.ttf
ofl/sitara/Sitara-BoldItalic.ttf
ofl/alice/Alice-Regular.ttf
ofl/merriweather/Merriweather-BoldItalic.ttf
ofl/merriweather/Merriweather-LightItalic.ttf
ofl/merriweather/Merriweather-Light.ttf
ofl/merriweather/Merriweather-Italic.ttf
ofl/merriweather/Merriweather-BlackItalic.ttf
ofl/merriweather/Merriweather-Regular.ttf
ofl/merriweather/Merriweather-Black.ttf
ofl/merriweather/Merriweather-Bold.ttf
ofl/nicomoji/NicoMoji-Regular.ttf
ofl/mandali/Mandali-Regular.ttf
ofl/artifika/Artifika-Regular.ttf
ofl/coustard/Coustard-Black.ttf
ofl/coustard/Coustard-Regular.ttf
ofl/ptserif/PT_Serif-Web-Italic.ttf
ofl/ptserif/PT_Serif-Web-Regular.ttf
ofl/ptserif/PT_Serif-Web-BoldItalic.ttf
ofl/ptserif/PT_Serif-Web-Bold.ttf
ofl/greatvibes/GreatVibes-Regular.ttf
ofl/portlligatslab/PortLligatSlab-Regular.ttf
ofl/voces/Voces-Regular.ttf
ofl/sairasemicondensed/SairaSemiCondensed-Regular.ttf
ofl/sairasemicondensed/SairaSemiCondensed-SemiBold.ttf
ofl/sairasemicondensed/SairaSemiCondensed-Black.ttf
ofl/sairasemicondensed/SairaSemiCondensed-ExtraBold.ttf
ofl/sairasemicondensed/SairaSemiCondensed-Bold.ttf
ofl/sairasemicondensed/SairaSemiCondensed-Light.ttf
ofl/sairasemicondensed/SairaSemiCondensed-Medium.ttf
ofl/sairasemicondensed/SairaSemiCondensed-ExtraLight.ttf
ofl/sairasemicondensed/SairaSemiCondensed-Thin.ttf
ofl/bitter/Bitter-Italic.ttf
ofl/bitter/Bitter-Bold.ttf
ofl/bitter/Bitter-Regular.ttf
ofl/josefinsans/JosefinSans-ThinItalic.ttf
ofl/josefinsans/JosefinSans-LightItalic.ttf
ofl/josefinsans/JosefinSans-Bold.ttf
ofl/josefinsans/JosefinSans-SemiBoldItalic.ttf
ofl/josefinsans/JosefinSans-SemiBold.ttf
ofl/josefinsans/JosefinSans-BoldItalic.ttf
ofl/josefinsans/JosefinSans-Light.ttf
ofl/josefinsans/JosefinSans-Italic.ttf
ofl/josefinsans/JosefinSans-Regular.ttf
ofl/josefinsans/JosefinSans-Thin.ttf
ofl/montserratalternates/MontserratAlternates-Light.ttf
ofl/montserratalternates/MontserratAlternates-Medium.ttf
ofl/montserratalternates/MontserratAlternates-SemiBold.ttf
ofl/montserratalternates/MontserratAlternates-MediumItalic.ttf
ofl/montserratalternates/MontserratAlternates-Bold.ttf
ofl/montserratalternates/MontserratAlternates-BoldItalic.ttf
ofl/montserratalternates/MontserratAlternates-ExtraLightItalic.ttf
ofl/montserratalternates/MontserratAlternates-BlackItalic.ttf
ofl/montserratalternates/MontserratAlternates-ExtraLight.ttf
ofl/montserratalternates/MontserratAlternates-Regular.ttf
ofl/montserratalternates/MontserratAlternates-Italic.ttf
ofl/montserratalternates/MontserratAlternates-Black.ttf
ofl/montserratalternates/MontserratAlternates-LightItalic.ttf
ofl/montserratalternates/MontserratAlternates-ExtraBold.ttf
ofl/montserratalternates/MontserratAlternates-ExtraBoldItalic.ttf
ofl/montserratalternates/MontserratAlternates-SemiBoldItalic.ttf
ofl/montserratalternates/MontserratAlternates-Thin.ttf
ofl/montserratalternates/MontserratAlternates-ThinItalic.ttf
ofl/yinmar/Yinmar-Regular.ttf
ofl/aguafinascript/AguafinaScript-Regular.ttf
ofl/sharetechmono/ShareTechMono-Regular.ttf
ofl/cwtexfangsong/cwTeXFangSong-zhonly.ttf
ofl/averagesans/AverageSans-Regular.ttf
ofl/suezone/SuezOne-Regular.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Black.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Bold.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Italic.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Regular.ttf
ofl/alegreyasanssc/AlegreyaSansSC-ThinItalic.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Thin.ttf
ofl/alegreyasanssc/AlegreyaSansSC-BlackItalic.ttf
ofl/alegreyasanssc/AlegreyaSansSC-MediumItalic.ttf
ofl/alegreyasanssc/AlegreyaSansSC-LightItalic.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Medium.ttf
ofl/alegreyasanssc/AlegreyaSansSC-Light.ttf
ofl/alegreyasanssc/AlegreyaSansSC-ExtraBold.ttf
ofl/alegreyasanssc/AlegreyaSansSC-ExtraBoldItalic.ttf
ofl/alegreyasanssc/AlegreyaSansSC-BoldItalic.ttf
ofl/philosopher/Philosopher-Regular.ttf
ofl/philosopher/Philosopher-Bold.ttf
ofl/philosopher/Philosopher-BoldItalic.ttf
ofl/philosopher/Philosopher-Italic.ttf
ofl/yesteryear/Yesteryear-Regular.ttf
ofl/spacemono/SpaceMono-Bold.ttf
ofl/spacemono/SpaceMono-Italic.ttf
ofl/spacemono/SpaceMono-BoldItalic.ttf
ofl/spacemono/SpaceMono-Regular.ttf
ofl/andadasc/AndadaSC-Regular.ttf
ofl/andadasc/AndadaSC-BoldItalic.ttf
ofl/andadasc/AndadaSC-Bold.ttf
ofl/andadasc/AndadaSC-Italic.ttf
ofl/autourone/AutourOne-Regular.ttf
ofl/unifrakturcook/UnifrakturCook-Bold.ttf
ofl/inika/Inika-Regular.ttf
ofl/inika/Inika-Bold.ttf
ofl/chonburi/Chonburi-Regular.ttf
ofl/scopeone/ScopeOne-Regular.ttf
ofl/italianno/Italianno-Regular.ttf
ofl/jacquesfrancoisshadow/JacquesFrancoisShadow-Regular.ttf
ofl/oregano/Oregano-Italic.ttf
ofl/oregano/Oregano-Regular.ttf
ofl/pacifico/Pacifico-Regular.ttf
ofl/gemunulibre/GemunuLibre-ExtraLight.ttf
ofl/gemunulibre/GemunuLibre-Medium.ttf
ofl/gemunulibre/GemunuLibre-Bold.ttf
ofl/gemunulibre/GemunuLibre-ExtraBold.ttf
ofl/gemunulibre/GemunuLibre-Regular.ttf
ofl/gemunulibre/GemunuLibre-Light.ttf
ofl/gemunulibre/GemunuLibre-SemiBold.ttf
ofl/ekmukta/EkMukta-Bold.ttf
ofl/ekmukta/EkMukta-Light.ttf
ofl/ekmukta/EkMukta-Regular.ttf
ofl/ekmukta/EkMukta-SemiBold.ttf
ofl/ekmukta/EkMukta-ExtraBold.ttf
ofl/ekmukta/EkMukta-Medium.ttf
ofl/ekmukta/EkMukta-ExtraLight.ttf
ofl/lovedbytheking/LovedbytheKing.ttf
ofl/telex/Telex-Regular.ttf
ofl/newscycle/NewsCycle-Bold.ttf
ofl/newscycle/NewsCycle-Regular.ttf
ofl/salsa/Salsa-Regular.ttf
ofl/breeserif/BreeSerif-Regular.ttf
ofl/barrio/Barrio-Regular.ttf
ofl/pecita/Pecita.ttf
ofl/damion/Damion-Regular.ttf
ofl/dancingscript/DancingScript-Bold.ttf
ofl/dancingscript/DancingScript-Regular.ttf
ofl/bungeeshade/BungeeShade-Regular.ttf
ofl/slabo13px/Slabo13px-Regular.ttf
ofl/signikanegative/SignikaNegative-Light.ttf
ofl/signikanegative/SignikaNegative-SemiBold.ttf
ofl/signikanegative/SignikaNegative-Bold.ttf
ofl/signikanegative/SignikaNegative-Regular.ttf
ofl/ribeye/Ribeye-Regular.ttf
ofl/sacramento/Sacramento-Regular.ttf
ofl/ptsansnarrow/PT_Sans-Narrow-Web-Bold.ttf
ofl/ptsansnarrow/PT_Sans-Narrow-Web-Regular.ttf
ofl/shadowsintolighttwo/ShadowsIntoLightTwo-Regular.ttf
ofl/bevan/Bevan-Regular.ttf
ofl/akronim/Akronim-Regular.ttf
ofl/butterflykids/ButterflyKids-Regular.ttf
ofl/fruktur/Fruktur-Regular.ttf
ofl/kurale/Kurale-Regular.ttf
ofl/pavanam/Pavanam-Regular.ttf
ofl/asapvfbeta/AsapVFBeta-Italic.ttf
ofl/asapvfbeta/AsapVFBeta-Condensed.ttf
ofl/asapvfbeta/AsapVFBeta.ttf
ofl/amiko/Amiko-Bold.ttf
ofl/amiko/Amiko-SemiBold.ttf
ofl/amiko/Amiko-Regular.ttf
ofl/kopubbatang/KoPubBatang-Light.ttf
ofl/kopubbatang/KoPubBatang-Regular.ttf
ofl/kopubbatang/KoPubBatang-Bold.ttf
ofl/gafata/Gafata-Regular.ttf
ofl/cabinvfbeta/CabinVFBeta.ttf
ofl/cabinvfbeta/CabinVFBeta-Italic.ttf
ofl/imfellfrenchcanonsc/IMFeFCsc28P.ttf
ofl/jura/Jura-SemiBold.ttf
ofl/jura/Jura-Regular.ttf
ofl/jura/Jura-Light.ttf
ofl/jura/Jura-Bold.ttf
ofl/jura/Jura-Medium.ttf
ofl/marckscript/MarckScript-Regular.ttf
ofl/playfairdisplay/PlayfairDisplay-Italic.ttf
ofl/playfairdisplay/PlayfairDisplay-Bold.ttf
ofl/playfairdisplay/PlayfairDisplay-BlackItalic.ttf
ofl/playfairdisplay/PlayfairDisplay-BoldItalic.ttf
ofl/playfairdisplay/PlayfairDisplay-Regular.ttf
ofl/playfairdisplay/PlayfairDisplay-Black.ttf
ofl/piedra/Piedra-Regular.ttf
ofl/imfellenglish/IMFeENrm28P.ttf
ofl/imfellenglish/IMFeENit28P.ttf
ofl/metrophobic/Metrophobic-Regular.ttf
ofl/lalezar/Lalezar-Regular.ttf
ofl/zeyada/Zeyada.ttf
ofl/montserratsubrayada/MontserratSubrayada-Regular.ttf
ofl/montserratsubrayada/MontserratSubrayada-Bold.ttf
ofl/miltonian/Miltonian-Regular.ttf
ofl/eater/Eater-Regular.ttf
ofl/lancelot/Lancelot-Regular.ttf
ofl/annieuseyourtelescope/AnnieUseYourTelescope-Regular.ttf
ofl/playball/Playball-Regular.ttf
ofl/karlatamilinclined/KarlaTamilInclined-Bold.ttf
ofl/karlatamilinclined/KarlaTamilInclined-Regular.ttf
ofl/shanti/Shanti-Regular.ttf
ofl/prozalibre/ProzaLibre-Italic.ttf
ofl/prozalibre/ProzaLibre-SemiBold.ttf
ofl/prozalibre/ProzaLibre-Regular.ttf
ofl/prozalibre/ProzaLibre-Bold.ttf
ofl/prozalibre/ProzaLibre-MediumItalic.ttf
ofl/prozalibre/ProzaLibre-BoldItalic.ttf
ofl/prozalibre/ProzaLibre-ExtraBold.ttf
ofl/prozalibre/ProzaLibre-ExtraBoldItalic.ttf
ofl/prozalibre/ProzaLibre-SemiBoldItalic.ttf
ofl/prozalibre/ProzaLibre-Medium.ttf
ofl/habibi/Habibi-Regular.ttf
ofl/battambang/Battambang-Bold.ttf
ofl/battambang/Battambang-Regular.ttf
ofl/overpassmono/OverpassMono-Regular.ttf
ofl/overpassmono/OverpassMono-Light.ttf
ofl/overpassmono/OverpassMono-Bold.ttf
ofl/overpassmono/OverpassMono-SemiBold.ttf
ofl/chango/Chango-Regular.ttf
ofl/abeezee/ABeeZee-Italic.ttf
ofl/abeezee/ABeeZee-Regular.ttf
ofl/vidaloka/Vidaloka-Regular.ttf
ofl/mergeone/MergeOne-Regular.ttf
ofl/combo/Combo-Regular.ttf
ofl/pragatinarrow/PragatiNarrow-Bold.ttf
ofl/pragatinarrow/PragatiNarrow-Regular.ttf
ofl/encodesansexpanded/EncodeSansExpanded-Black.ttf
ofl/encodesansexpanded/EncodeSansExpanded-Regular.ttf
ofl/encodesansexpanded/EncodeSansExpanded-SemiBold.ttf
ofl/encodesansexpanded/EncodeSansExpanded-Thin.ttf
ofl/encodesansexpanded/EncodeSansExpanded-ExtraLight.ttf
ofl/encodesansexpanded/EncodeSansExpanded-Bold.ttf
ofl/encodesansexpanded/EncodeSansExpanded-Medium.ttf
ofl/encodesansexpanded/EncodeSansExpanded-Light.ttf
ofl/encodesansexpanded/EncodeSansExpanded-ExtraBold.ttf
ofl/librebarcode39extendedtext/LibreBarcode39ExtendedText-Regular.ttf
ofl/pathwaygothicone/PathwayGothicOne-Regular.ttf
ofl/labelleaurore/LaBelleAurore.ttf
ofl/average/Average-Regular.ttf
ofl/ptmono/PTM55FT.ttf
ofl/biryani/Biryani-ExtraBold.ttf
ofl/biryani/Biryani-Regular.ttf
ofl/biryani/Biryani-Light.ttf
ofl/biryani/Biryani-ExtraLight.ttf
ofl/biryani/Biryani-SemiBold.ttf
ofl/biryani/Biryani-Bold.ttf
ofl/biryani/Biryani-Black.ttf
ofl/heebo/Heebo-Light.ttf
ofl/heebo/Heebo-Regular.ttf
ofl/heebo/Heebo-Bold.ttf
ofl/heebo/Heebo-ExtraBold.ttf
ofl/heebo/Heebo-Black.ttf
ofl/heebo/Heebo-Thin.ttf
ofl/heebo/Heebo-Medium.ttf
ofl/plaster/Plaster-Regular.ttf
ofl/laila/Laila-Medium.ttf
ofl/laila/Laila-SemiBold.ttf
ofl/laila/Laila-Regular.ttf
ofl/laila/Laila-Bold.ttf
ofl/laila/Laila-Light.ttf
ofl/mousememoirs/MouseMemoirs-Regular.ttf
ofl/concertone/ConcertOne-Regular.ttf
ofl/condiment/Condiment-Regular.ttf
ofl/jimnightshade/JimNightshade-Regular.ttf
ofl/uncialantiqua/UncialAntiqua-Regular.ttf
ofl/rammettoone/RammettoOne-Regular.ttf
ofl/hermeneusone/HermeneusOne-Regular.ttf
ofl/fredokaone/FredokaOne-Regular.ttf
ofl/arsenal/Arsenal-BoldItalic.ttf
ofl/arsenal/Arsenal-Italic.ttf
ofl/arsenal/Arsenal-Regular.ttf
ofl/arsenal/Arsenal-Bold.ttf
ofl/dorsa/Dorsa-Regular.ttf
ofl/fjordone/FjordOne-Regular.ttf
ofl/copse/Copse-Regular.ttf
ofl/nothingyoucoulddo/NothingYouCouldDo.ttf
ofl/postnobillscolombo/PostNoBillsColombo-SemiBold.ttf
ofl/postnobillscolombo/PostNoBillsColombo-Medium.ttf
ofl/postnobillscolombo/PostNoBillsColombo-ExtraBold.ttf
ofl/postnobillscolombo/PostNoBillsColombo-Regular.ttf
ofl/postnobillscolombo/PostNoBillsColombo-Bold.ttf
ofl/postnobillscolombo/PostNoBillsColombo-Light.ttf
ofl/lobstertwo/LobsterTwo-Regular.ttf
ofl/lobstertwo/LobsterTwo-BoldItalic.ttf
ofl/lobstertwo/LobsterTwo-Italic.ttf
ofl/lobstertwo/LobsterTwo-Bold.ttf
ofl/bungeeoutline/BungeeOutline-Regular.ttf
ofl/poetsenone/PoetsenOne-Regular.ttf
ofl/ramabhadra/Ramabhadra-Regular.ttf
ofl/mirza/Mirza-Bold.ttf
ofl/mirza/Mirza-Medium.ttf
ofl/mirza/Mirza-Regular.ttf
ofl/mirza/Mirza-SemiBold.ttf
ofl/mada/Mada-Regular.ttf
ofl/mada/Mada-Light.ttf
ofl/mada/Mada-SemiBold.ttf
ofl/mada/Mada-Black.ttf
ofl/mada/Mada-Bold.ttf
ofl/mada/Mada-ExtraLight.ttf
ofl/mada/Mada-Medium.ttf
ofl/rhodiumlibre/RhodiumLibre-Regular.ttf
ofl/sedgwickavedisplay/SedgwickAveDisplay-Regular.ttf
ofl/eczar/Eczar-Medium.ttf
ofl/eczar/Eczar-SemiBold.ttf
ofl/eczar/Eczar-ExtraBold.ttf
ofl/eczar/Eczar-Regular.ttf
ofl/eczar/Eczar-Bold.ttf
ofl/sriracha/Sriracha-Regular.ttf
ofl/jejuhallasan/JejuHallasan-Regular.ttf
ofl/gloriahallelujah/GloriaHallelujah.ttf
ofl/dekko/Dekko-Regular.ttf
ofl/kellyslab/KellySlab-Regular.ttf
ofl/nixieone/NixieOne-Regular.ttf
ofl/ranchers/Ranchers-Regular.ttf
ofl/oleoscript/OleoScript-Bold.ttf
ofl/oleoscript/OleoScript-Regular.ttf
ofl/architectsdaughter/ArchitectsDaughter-Regular.ttf
ofl/limelight/Limelight-Regular.ttf
ofl/poiretone/PoiretOne-Regular.ttf
ofl/raviprakash/RaviPrakash-Regular.ttf
ofl/hannari/Hannari-Regular.ttf
ofl/sevillana/Sevillana-Regular.ttf
ofl/biorhyme/BioRhyme-Regular.ttf
ofl/biorhyme/BioRhyme-ExtraBold.ttf
ofl/biorhyme/BioRhyme-ExtraLight.ttf
ofl/biorhyme/BioRhyme-Bold.ttf
ofl/biorhyme/BioRhyme-Light.ttf
ofl/germaniaone/GermaniaOne-Regular.ttf
ofl/stoke/Stoke-Light.ttf
ofl/stoke/Stoke-Regular.ttf
ofl/fascinate/Fascinate-Regular.ttf
ofl/ptsanscaption/PT_Sans-Caption-Web-Bold.ttf
ofl/ptsanscaption/PT_Sans-Caption-Web-Regular.ttf
ofl/anton/Anton-Regular.ttf
ofl/sofia/Sofia-Regular.ttf
ofl/librebarcode39extended/LibreBarcode39Extended-Regular.ttf
ofl/engagement/Engagement-Regular.ttf
ofl/teko/Teko-Regular.ttf
ofl/teko/Teko-Medium.ttf
ofl/teko/Teko-Light.ttf
ofl/teko/Teko-SemiBold.ttf
ofl/teko/Teko-Bold.ttf
ofl/jacquesfrancois/JacquesFrancois-Regular.ttf
ofl/mavenpro/MavenPro-Black.ttf
ofl/mavenpro/MavenPro-Medium.ttf
ofl/mavenpro/MavenPro-Bold.ttf
ofl/mavenpro/MavenPro-Regular.ttf
ofl/taprom/Taprom.ttf
ofl/yesevaone/YesevaOne-Regular.ttf
ofl/asapcondensed/AsapCondensed-Italic.ttf
ofl/asapcondensed/AsapCondensed-SemiBoldItalic.ttf
ofl/asapcondensed/AsapCondensed-SemiBold.ttf
ofl/asapcondensed/AsapCondensed-MediumItalic.ttf
ofl/asapcondensed/AsapCondensed-Bold.ttf
ofl/asapcondensed/AsapCondensed-Medium.ttf
ofl/asapcondensed/AsapCondensed-BoldItalic.ttf
ofl/asapcondensed/AsapCondensed-Regular.ttf
ofl/fasthand/Fasthand-Regular.ttf
ofl/newrocker/NewRocker-Regular.ttf
ofl/barlowcondensed/BarlowCondensed-Bold.ttf
ofl/barlowcondensed/BarlowCondensed-ExtraBoldItalic.ttf
ofl/barlowcondensed/BarlowCondensed-ExtraBold.ttf
ofl/barlowcondensed/BarlowCondensed-MediumItalic.ttf
ofl/barlowcondensed/BarlowCondensed-Black.ttf
ofl/barlowcondensed/BarlowCondensed-Medium.ttf
ofl/barlowcondensed/BarlowCondensed-SemiBold.ttf
ofl/barlowcondensed/BarlowCondensed-Regular.ttf
ofl/barlowcondensed/BarlowCondensed-BlackItalic.ttf
ofl/barlowcondensed/BarlowCondensed-ThinItalic.ttf
ofl/barlowcondensed/BarlowCondensed-Italic.ttf
ofl/barlowcondensed/BarlowCondensed-Thin.ttf
ofl/barlowcondensed/BarlowCondensed-Light.ttf
ofl/barlowcondensed/BarlowCondensed-ExtraLightItalic.ttf
ofl/barlowcondensed/BarlowCondensed-ExtraLight.ttf
ofl/barlowcondensed/BarlowCondensed-BoldItalic.ttf
ofl/barlowcondensed/BarlowCondensed-SemiBoldItalic.ttf
ofl/barlowcondensed/BarlowCondensed-LightItalic.ttf
ofl/fascinateinline/FascinateInline-Regular.ttf
ofl/gravitasone/GravitasOne.ttf
ofl/bokor/Bokor-Regular.ttf
ofl/mplus1p/Mplus1p-Regular.ttf
ofl/mplus1p/Mplus1p-Bold.ttf
ofl/mplus1p/Mplus1p-Light.ttf
ofl/mplus1p/Mplus1p-Black.ttf
ofl/mplus1p/Mplus1p-ExtraBold.ttf
ofl/mplus1p/Mplus1p-Thin.ttf
ofl/mplus1p/Mplus1p-Medium.ttf
ofl/tauri/Tauri-Regular.ttf
ofl/digitalnumbers/DigitalNumbers-Regular.ttf
ofl/marcellus/Marcellus-Regular.ttf
ofl/viga/Viga-Regular.ttf
ofl/delius/Delius-Regular.ttf
ofl/cookie/Cookie-Regular.ttf
ofl/buenard/Buenard-Bold.ttf
ofl/buenard/Buenard-Regular.ttf
ofl/ntr/NTR-Regular.ttf
ofl/hanalei/Hanalei-Regular.ttf
ofl/jomolhari/Jomolhari-alpha3c-0605331.ttf
ofl/graduate/Graduate-Regular.ttf
ofl/swankyandmoomoo/SwankyandMooMoo.ttf
ofl/slabo27px/Slabo27px-Regular.ttf
ofl/numans/Numans-Regular.ttf
ofl/fasterone/FasterOne-Regular.ttf
ofl/rasa/Rasa-Bold.ttf
ofl/rasa/Rasa-Medium.ttf
ofl/rasa/Rasa-SemiBold.ttf
ofl/rasa/Rasa-Light.ttf
ofl/rasa/Rasa-Regular.ttf
ofl/chenla/Chenla.ttf
ofl/alegreyasc/AlegreyaSC-Italic.ttf
ofl/alegreyasc/AlegreyaSC-ExtraBold.ttf
ofl/alegreyasc/AlegreyaSC-ExtraBoldItalic.ttf
ofl/alegreyasc/AlegreyaSC-BlackItalic.ttf
ofl/alegreyasc/AlegreyaSC-Black.ttf
ofl/alegreyasc/AlegreyaSC-Regular.ttf
ofl/alegreyasc/AlegreyaSC-BoldItalic.ttf
ofl/alegreyasc/AlegreyaSC-MediumItalic.ttf
ofl/alegreyasc/AlegreyaSC-Bold.ttf
ofl/alegreyasc/AlegreyaSC-Medium.ttf
ofl/spectralsc/SpectralSC-LightItalic.ttf
ofl/spectralsc/SpectralSC-Italic.ttf
ofl/spectralsc/SpectralSC-ExtraBoldItalic.ttf
ofl/spectralsc/SpectralSC-ExtraLightItalic.ttf
ofl/spectralsc/SpectralSC-SemiBoldItalic.ttf
ofl/spectralsc/SpectralSC-ExtraBold.ttf
ofl/spectralsc/SpectralSC-BoldItalic.ttf
ofl/spectralsc/SpectralSC-Regular.ttf
ofl/spectralsc/SpectralSC-SemiBold.ttf
ofl/spectralsc/SpectralSC-ExtraLight.ttf
ofl/spectralsc/SpectralSC-MediumItalic.ttf
ofl/spectralsc/SpectralSC-Medium.ttf
ofl/spectralsc/SpectralSC-Bold.ttf
ofl/spectralsc/SpectralSC-Light.ttf
ofl/rubikone/RubikOne-Regular.ttf
ofl/crimsontext/CrimsonText-Italic.ttf
ofl/crimsontext/CrimsonText-SemiBold.ttf
ofl/crimsontext/CrimsonText-SemiBoldItalic.ttf
ofl/crimsontext/CrimsonText-Bold.ttf
ofl/crimsontext/CrimsonText-Regular.ttf
ofl/crimsontext/CrimsonText-BoldItalic.ttf
ofl/capriola/Capriola-Regular.ttf
ofl/tangerine/Tangerine-Bold.ttf
ofl/tangerine/Tangerine-Regular.ttf
ofl/sixcaps/SixCaps.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-SemiBold.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-Medium.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-Light.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-ExtraLight.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-Thin.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-ExtraBold.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-Black.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-Regular.ttf
ofl/encodesanssemiexpanded/EncodeSansSemiExpanded-Bold.ttf
ofl/gentiumbasic/GenBasB.ttf
ofl/gentiumbasic/GenBasBI.ttf
ofl/gentiumbasic/GenBasR.ttf
ofl/gentiumbasic/GenBasI.ttf
ofl/corben/Corben-Regular.ttf
ofl/corben/Corben-Bold.ttf
ofl/hindkochi/HindKochi-Light.ttf
ofl/hindkochi/HindKochi-Bold.ttf
ofl/hindkochi/HindKochi-Medium.ttf
ofl/hindkochi/HindKochi-SemiBold.ttf
ofl/hindkochi/HindKochi-Regular.ttf
ofl/frijole/Frijole-Regular.ttf
ofl/arapey/Arapey-Italic.ttf
ofl/arapey/Arapey-Regular.ttf
ofl/andika/Andika-Regular.ttf
ofl/russoone/RussoOne-Regular.ttf
ofl/bayon/Bayon-Regular.ttf
ofl/muktamahee/MuktaMahee-SemiBold.ttf
ofl/muktamahee/MuktaMahee-Medium.ttf
ofl/muktamahee/MuktaMahee-Light.ttf
ofl/muktamahee/MuktaMahee-ExtraBold.ttf
ofl/muktamahee/MuktaMahee-ExtraLight.ttf
ofl/muktamahee/MuktaMahee-Regular.ttf
ofl/muktamahee/MuktaMahee-Bold.ttf
ofl/magra/Magra-Regular.ttf
ofl/magra/Magra-Bold.ttf
ofl/barlow/Barlow-LightItalic.ttf
ofl/barlow/Barlow-Medium.ttf
ofl/barlow/Barlow-Thin.ttf
ofl/barlow/Barlow-Light.ttf
ofl/barlow/Barlow-SemiBold.ttf
ofl/barlow/Barlow-MediumItalic.ttf
ofl/barlow/Barlow-BlackItalic.ttf
ofl/barlow/Barlow-ExtraBold.ttf
ofl/barlow/Barlow-SemiBoldItalic.ttf
ofl/barlow/Barlow-Bold.ttf
ofl/barlow/Barlow-ExtraBoldItalic.ttf
ofl/barlow/Barlow-Regular.ttf
ofl/barlow/Barlow-ThinItalic.ttf
ofl/barlow/Barlow-Black.ttf
ofl/barlow/Barlow-BoldItalic.ttf
ofl/barlow/Barlow-ExtraLight.ttf
ofl/barlow/Barlow-ExtraLightItalic.ttf
ofl/barlow/Barlow-Italic.ttf
ofl/antonio/Antonio-Light.ttf
ofl/antonio/Antonio-Bold.ttf
ofl/antonio/Antonio-Regular.ttf
ofl/hanuman/Hanuman-Regular.ttf
ofl/hanuman/Hanuman-Bold.ttf
ofl/metal/Metal.ttf
ofl/pontanosans/PontanoSans-Regular.ttf
ofl/lohitdevanagari/Lohit-Devanagari.ttf
ofl/lusitana/Lusitana-Regular.ttf
ofl/lusitana/Lusitana-Bold.ttf
ofl/nanummyeongjo/NanumMyeongjo-Bold.ttf
ofl/nanummyeongjo/NanumMyeongjo-Regular.ttf
ofl/nanummyeongjo/NanumMyeongjo-ExtraBold.ttf
ofl/voltaire/Voltaire-Regular.ttf
ofl/arbutusslab/ArbutusSlab-Regular.ttf
ofl/gurajada/Gurajada-Regular.ttf
ofl/allan/Allan-Bold.ttf
ofl/allan/Allan-Regular.ttf
ofl/suravaram/Suravaram-Regular.ttf
ofl/librecaslontext/LibreCaslonText-Bold.ttf
ofl/librecaslontext/LibreCaslonText-Italic.ttf
ofl/librecaslontext/LibreCaslonText-Regular.ttf
ofl/dellarespira/DellaRespira-Regular.ttf
ofl/meerainimai/MeeraInimai-Regular.ttf
ofl/lohittamil/Lohit-Tamil.ttf
ofl/alfaslabone/AlfaSlabOne-Regular.ttf
ofl/balthazar/Balthazar-Regular.ttf
ofl/sortsmillgoudy/SortsMillGoudy-Italic.ttf
ofl/sortsmillgoudy/SortsMillGoudy-Regular.ttf
ofl/creepster/Creepster-Regular.ttf
ofl/manuale/Manuale-Italic.ttf
ofl/manuale/Manuale-SemiBoldItalic.ttf
ofl/manuale/Manuale-SemiBold.ttf
ofl/manuale/Manuale-MediumItalic.ttf
ofl/manuale/Manuale-Regular.ttf
ofl/manuale/Manuale-Bold.ttf
ofl/manuale/Manuale-BoldItalic.ttf
ofl/manuale/Manuale-Medium.ttf
ofl/daysone/DaysOne-Regular.ttf
ofl/clara/Clara-Regular.ttf
ofl/neuton/Neuton-Bold.ttf
ofl/neuton/Neuton-ExtraBold.ttf
ofl/neuton/Neuton-ExtraLight.ttf
ofl/neuton/Neuton-Italic.ttf
ofl/neuton/Neuton-Regular.ttf
ofl/neuton/Neuton-Light.ttf
ofl/imprima/Imprima-Regular.ttf
ofl/patrickhand/PatrickHand-Regular.ttf
ofl/shortstack/ShortStack-Regular.ttf
ofl/bonbon/Bonbon-Regular.ttf
ofl/librebaskerville/LibreBaskerville-Italic.ttf
ofl/librebaskerville/LibreBaskerville-Bold.ttf
ofl/librebaskerville/LibreBaskerville-Regular.ttf
ofl/markoone/MarkoOne-Regular.ttf
ofl/preahvihear/Preahvihear.ttf
ofl/archivoblack/ArchivoBlack-Regular.ttf
ofl/redactedscript/RedactedScript-Regular.ttf
ofl/redactedscript/RedactedScript-Bold.ttf
ofl/puritan/Puritan-Bold.ttf
ofl/puritan/Puritan-Regular.ttf
ofl/puritan/Puritan-Italic.ttf
ofl/puritan/Puritan-BoldItalic.ttf
ofl/denkone/DenkOne-Regular.ttf
ofl/cevicheone/CevicheOne-Regular.ttf
ofl/sairacondensed/SairaCondensed-ExtraBold.ttf
ofl/sairacondensed/SairaCondensed-Bold.ttf
ofl/sairacondensed/SairaCondensed-ExtraLight.ttf
ofl/sairacondensed/SairaCondensed-Light.ttf
ofl/sairacondensed/SairaCondensed-Medium.ttf
ofl/sairacondensed/SairaCondensed-Thin.ttf
ofl/sairacondensed/SairaCondensed-Black.ttf
ofl/sairacondensed/SairaCondensed-Regular.ttf
ofl/sairacondensed/SairaCondensed-SemiBold.ttf
ofl/holtwoodonesc/HoltwoodOneSC.ttf
ofl/arvo/Arvo-Bold.ttf
ofl/arvo/Arvo-Regular.ttf
ofl/arvo/Arvo-Italic.ttf
ofl/arvo/Arvo-BoldItalic.ttf
ofl/novaflat/NovaFlat.ttf
ofl/karla/Karla-Regular.ttf
ofl/karla/Karla-Bold.ttf
ofl/karla/Karla-BoldItalic.ttf
ofl/karla/Karla-Italic.ttf
ofl/norican/Norican-Regular.ttf
ofl/michroma/Michroma.ttf
ofl/sniglet/Sniglet-ExtraBold.ttf
ofl/sniglet/Sniglet-Regular.ttf
ofl/gochihand/GochiHand-Regular.ttf
ofl/tulpenone/TulpenOne-Regular.ttf
ofl/quintessential/Quintessential-Regular.ttf
ofl/novascript/NovaScript-Regular.ttf
ofl/podkovavfbeta/PodkovaVFBeta.ttf
ofl/oldenburg/Oldenburg-Regular.ttf
ofl/khand/Khand-Light.ttf
ofl/khand/Khand-Medium.ttf
ofl/khand/Khand-Regular.ttf
ofl/khand/Khand-Bold.ttf
ofl/khand/Khand-SemiBold.ttf
ofl/nosifercaps/NosiferCaps-Regular.ttf
ofl/merienda/Merienda-Regular.ttf
ofl/merienda/Merienda-Bold.ttf
ofl/vollkorn/Vollkorn-SemiBold.ttf
ofl/vollkorn/Vollkorn-Black.ttf
ofl/vollkorn/Vollkorn-BlackItalic.ttf
ofl/vollkorn/Vollkorn-Regular.ttf
ofl/vollkorn/Vollkorn-SemiBoldItalic.ttf
ofl/vollkorn/Vollkorn-Italic.ttf
ofl/vollkorn/Vollkorn-BoldItalic.ttf
ofl/vollkorn/Vollkorn-Bold.ttf
ofl/quattrocentosans/QuattrocentoSans-Italic.ttf
ofl/quattrocentosans/QuattrocentoSans-Regular.ttf
ofl/quattrocentosans/QuattrocentoSans-BoldItalic.ttf
ofl/quattrocentosans/QuattrocentoSans-Bold.ttf
ofl/balootammudu/BalooTammudu-Regular.ttf
ofl/leckerlione/LeckerliOne-Regular.ttf
ofl/sirinstencil/SirinStencil-Regular.ttf
ofl/margarine/Margarine-Regular.ttf
ofl/clickerscript/ClickerScript-Regular.ttf
ofl/racingsansone/RacingSansOne-Regular.ttf
ofl/nanumpenscript/NanumPenScript-Regular.ttf
ofl/stardosstencil/StardosStencil-Bold.ttf
ofl/stardosstencil/StardosStencil-Regular.ttf
ofl/handlee/Handlee-Regular.ttf
ofl/misssaintdelafield/MrsSaintDelafield-Regular.ttf
ofl/cutivemono/CutiveMono-Regular.ttf
ofl/alegreya/Alegreya-Italic.ttf
ofl/alegreya/Alegreya-BlackItalic.ttf
ofl/alegreya/Alegreya-BoldItalic.ttf
ofl/alegreya/Alegreya-MediumItalic.ttf
ofl/alegreya/Alegreya-ExtraBold.ttf
ofl/alegreya/Alegreya-ExtraBoldItalic.ttf
ofl/alegreya/Alegreya-Bold.ttf
ofl/alegreya/Alegreya-Black.ttf
ofl/alegreya/Alegreya-Medium.ttf
ofl/alegreya/Alegreya-Regular.ttf
ofl/hanaleifill/HanaleiFill-Regular.ttf
ofl/geostar/Geostar-Regular.ttf
ofl/squadaone/SquadaOne-Regular.ttf
ofl/ranga/Ranga-Bold.ttf
ofl/ranga/Ranga-Regular.ttf
ofl/oleoscriptswashcaps/OleoScriptSwashCaps-Bold.ttf
ofl/oleoscriptswashcaps/OleoScriptSwashCaps-Regular.ttf
ofl/laomuangdon/LaoMuangDon-Regular.ttf
ofl/librefranklin/LibreFranklin-Regular.ttf
ofl/librefranklin/LibreFranklin-BlackItalic.ttf
ofl/librefranklin/LibreFranklin-Black.ttf
ofl/librefranklin/LibreFranklin-Thin.ttf
ofl/librefranklin/LibreFranklin-ExtraLightItalic.ttf
ofl/librefranklin/LibreFranklin-ThinItalic.ttf
ofl/librefranklin/LibreFranklin-BoldItalic.ttf
ofl/librefranklin/LibreFranklin-SemiBoldItalic.ttf
ofl/librefranklin/LibreFranklin-LightItalic.ttf
ofl/librefranklin/LibreFranklin-Italic.ttf
ofl/librefranklin/LibreFranklin-ExtraBold.ttf
ofl/librefranklin/LibreFranklin-ExtraLight.ttf
ofl/librefranklin/LibreFranklin-SemiBold.ttf
ofl/librefranklin/LibreFranklin-Bold.ttf
ofl/librefranklin/LibreFranklin-Medium.ttf
ofl/librefranklin/LibreFranklin-Light.ttf
ofl/librefranklin/LibreFranklin-ExtraBoldItalic.ttf
ofl/librefranklin/LibreFranklin-MediumItalic.ttf
ofl/archivonarrow/ArchivoNarrow-SemiBoldItalic.ttf
ofl/archivonarrow/ArchivoNarrow-MediumItalic.ttf
ofl/archivonarrow/ArchivoNarrow-Regular.ttf
ofl/archivonarrow/ArchivoNarrow-Medium.ttf
ofl/archivonarrow/ArchivoNarrow-BoldItalic.ttf
ofl/archivonarrow/ArchivoNarrow-Italic.ttf
ofl/archivonarrow/ArchivoNarrow-Bold.ttf
ofl/archivonarrow/ArchivoNarrow-SemiBold.ttf
ofl/varela/Varela-Regular.ttf
ofl/hanna/BM-HANNA.ttf
ofl/jejumyeongjo/JejuMyeongjo-Regular.ttf
ofl/scheherazade/Scheherazade-Regular.ttf
ofl/scheherazade/Scheherazade-Bold.ttf
ofl/dhyana/Dhyana-Bold.ttf
ofl/dhyana/Dhyana-Regular.ttf
ofl/aladin/Aladin-Regular.ttf
ofl/novaslim/NovaSlim.ttf
ofl/sigmarone/SigmarOne-Regular.ttf
ofl/cabincondensed/CabinCondensed-Bold.ttf
ofl/cabincondensed/CabinCondensed-SemiBold.ttf
ofl/cabincondensed/CabinCondensed-Regular.ttf
ofl/cabincondensed/CabinCondensed-Medium.ttf
ofl/chelaone/ChelaOne-Regular.ttf
ofl/ptsans/PT_Sans-Web-Italic.ttf
ofl/ptsans/PT_Sans-Web-Regular.ttf
ofl/ptsans/PT_Sans-Web-BoldItalic.ttf
ofl/ptsans/PT_Sans-Web-Bold.ttf
ofl/odormeanchey/OdorMeanChey.ttf
ofl/bubblegumsans/BubblegumSans-Regular.ttf
ofl/nikukyu/Nikukyu-Regular.ttf
ofl/orbitron/Orbitron-Medium.ttf
ofl/orbitron/Orbitron-Bold.ttf
ofl/orbitron/Orbitron-Black.ttf
ofl/orbitron/Orbitron-Regular.ttf
ofl/muli/Muli-BoldItalic.ttf
ofl/muli/Muli-LightItalic.ttf
ofl/muli/Muli-Regular.ttf
ofl/muli/Muli-ExtraLight.ttf
ofl/muli/Muli-ExtraBold.ttf
ofl/muli/Muli-Bold.ttf
ofl/muli/Muli-Italic.ttf
ofl/muli/Muli-Light.ttf
ofl/muli/Muli-SemiBold.ttf
ofl/muli/Muli-BlackItalic.ttf
ofl/muli/Muli-Black.ttf
ofl/muli/Muli-ExtraLightItalic.ttf
ofl/muli/Muli-SemiBoldItalic.ttf
ofl/muli/Muli-ExtraBoldItalic.ttf
ofl/juliussansone/JuliusSansOne-Regular.ttf
ofl/trirong/Trirong-Bold.ttf
ofl/trirong/Trirong-Medium.ttf
ofl/trirong/Trirong-ExtraLightItalic.ttf
ofl/trirong/Trirong-MediumItalic.ttf
ofl/trirong/Trirong-Light.ttf
ofl/trirong/Trirong-SemiBold.ttf
ofl/trirong/Trirong-ExtraBoldItalic.ttf
ofl/trirong/Trirong-SemiBoldItalic.ttf
ofl/trirong/Trirong-Black.ttf
ofl/trirong/Trirong-BoldItalic.ttf
ofl/trirong/Trirong-Thin.ttf
ofl/trirong/Trirong-ExtraBold.ttf
ofl/trirong/Trirong-ExtraLight.ttf
ofl/trirong/Trirong-BlackItalic.ttf
ofl/trirong/Trirong-Italic.ttf
ofl/trirong/Trirong-LightItalic.ttf
ofl/trirong/Trirong-ThinItalic.ttf
ofl/trirong/Trirong-Regular.ttf
ofl/dawningofanewday/DawningofaNewDay.ttf
ofl/almendrasc/AlmendraSC-Regular.ttf
ofl/felipa/Felipa-Regular.ttf
ofl/lekton/Lekton-Regular.ttf
ofl/lekton/Lekton-Italic.ttf
ofl/lekton/Lekton-Bold.ttf
ofl/textmeone/TextMeOne-Regular.ttf
ofl/archivo/Archivo-Italic.ttf
ofl/archivo/Archivo-MediumItalic.ttf
ofl/archivo/Archivo-Regular.ttf
ofl/archivo/Archivo-Bold.ttf
ofl/archivo/Archivo-SemiBoldItalic.ttf
ofl/archivo/Archivo-BoldItalic.ttf
ofl/archivo/Archivo-Medium.ttf
ofl/archivo/Archivo-SemiBold.ttf
ofl/unna/Unna-BoldItalic.ttf
ofl/unna/Unna-Bold.ttf
ofl/unna/Unna-Regular.ttf
ofl/unna/Unna-Italic.ttf
ofl/marcellussc/MarcellusSC-Regular.ttf
ofl/ropasans/RopaSans-Italic.ttf
ofl/ropasans/RopaSans-Regular.ttf
ofl/skranji/Skranji-Regular.ttf
ofl/skranji/Skranji-Bold.ttf
ofl/athiti/Athiti-Medium.ttf
ofl/athiti/Athiti-ExtraLight.ttf
ofl/athiti/Athiti-Bold.ttf
ofl/athiti/Athiti-Light.ttf
ofl/athiti/Athiti-Regular.ttf
ofl/athiti/Athiti-SemiBold.ttf
ofl/thabit/Thabit-BoldOblique.ttf
ofl/thabit/Thabit-Bold.ttf
ofl/thabit/Thabit.ttf
ofl/thabit/Thabit-Oblique.ttf
ofl/homenaje/Homenaje-Regular.ttf
ofl/leaguescript/LeagueScript.ttf
ofl/atma/Atma-Regular.ttf
ofl/atma/Atma-Bold.ttf
ofl/atma/Atma-Light.ttf
ofl/atma/Atma-Medium.ttf
ofl/atma/Atma-SemiBold.ttf
ofl/nunito/Nunito-BlackItalic.ttf
ofl/nunito/Nunito-Black.ttf
ofl/nunito/Nunito-Bold.ttf
ofl/nunito/Nunito-ExtraLightItalic.ttf
ofl/nunito/Nunito-ExtraBoldItalic.ttf
ofl/nunito/Nunito-Italic.ttf
ofl/nunito/Nunito-LightItalic.ttf
ofl/nunito/Nunito-ExtraLight.ttf
ofl/nunito/Nunito-BoldItalic.ttf
ofl/nunito/Nunito-SemiBold.ttf
ofl/nunito/Nunito-SemiBoldItalic.ttf
ofl/nunito/Nunito-Regular.ttf
ofl/nunito/Nunito-Light.ttf
ofl/nunito/Nunito-ExtraBold.ttf
ofl/donegalone/DonegalOne-Regular.ttf
ofl/kreon/Kreon-Light.ttf
ofl/kreon/Kreon-Bold.ttf
ofl/kreon/Kreon-Regular.ttf
ofl/sancreek/Sancreek-Regular.ttf
ofl/kalam/Kalam-Light.ttf
ofl/kalam/Kalam-Bold.ttf
ofl/kalam/Kalam-Regular.ttf
ofl/meiescript/MeieScript-Regular.ttf
ofl/glegoo/Glegoo-Regular.ttf
ofl/glegoo/Glegoo-Bold.ttf
ofl/hindvadodara/HindVadodara-Bold.ttf
ofl/hindvadodara/HindVadodara-Light.ttf
ofl/hindvadodara/HindVadodara-SemiBold.ttf
ofl/hindvadodara/HindVadodara-Regular.ttf
ofl/hindvadodara/HindVadodara-Medium.ttf
ofl/bubblerone/BubblerOne-Regular.ttf
ofl/mrbedfort/MrBedfort-Regular.ttf
ofl/meddon/Meddon.ttf
ofl/dynalight/Dynalight-Regular.ttf
ofl/pattaya/Pattaya-Regular.ttf
ofl/siemreap/Siemreap.ttf
ofl/seoulnamsan/SeoulNamsan-Bold.ttf
ofl/seoulnamsan/SeoulNamsan-Medium.ttf
ofl/seoulnamsan/SeoulNamsan-Light.ttf
ofl/seoulnamsan/SeoulNamsan-ExtraBold.ttf
ofl/notoserif/NotoSerif-Bold.ttf
ofl/notoserif/NotoSerif-Regular.ttf
ofl/notoserif/NotoSerif-Italic.ttf
ofl/notoserif/NotoSerif-BoldItalic.ttf
ofl/galada/Galada-Regular.ttf
ofl/ovo/Ovo-Regular.ttf
ofl/electrolize/Electrolize-Regular.ttf
ofl/raleway/Raleway-Regular.ttf
ofl/raleway/Raleway-SemiBold.ttf
ofl/raleway/Raleway-ExtraBoldItalic.ttf
ofl/raleway/Raleway-MediumItalic.ttf
ofl/raleway/Raleway-Medium.ttf
ofl/raleway/Raleway-Thin.ttf
ofl/raleway/Raleway-Black.ttf
ofl/raleway/Raleway-ExtraBold.ttf
ofl/raleway/Raleway-Italic.ttf
ofl/raleway/Raleway-Light.ttf
ofl/raleway/Raleway-BlackItalic.ttf
ofl/raleway/Raleway-SemiBoldItalic.ttf
ofl/raleway/Raleway-ThinItalic.ttf
ofl/raleway/Raleway-Bold.ttf
ofl/raleway/Raleway-ExtraLightItalic.ttf
ofl/raleway/Raleway-BoldItalic.ttf
ofl/raleway/Raleway-LightItalic.ttf
ofl/raleway/Raleway-ExtraLight.ttf
ofl/jollylodger/JollyLodger-Regular.ttf
ofl/cardo/Cardo-Bold.ttf
ofl/cardo/Cardo-Italic.ttf
ofl/cardo/Cardo-Regular.ttf
ofl/marvel/Marvel-Regular.ttf
ofl/marvel/Marvel-Italic.ttf
ofl/marvel/Marvel-BoldItalic.ttf
ofl/marvel/Marvel-Bold.ttf
ofl/mclaren/McLaren-Regular.ttf
ofl/alikeangular/AlikeAngular-Regular.ttf
ofl/amaticsc/AmaticSC-Regular.ttf
ofl/amaticsc/AmaticSC-Bold.ttf
ofl/glassantiqua/GlassAntiqua-Regular.ttf
ofl/abyssinicasil/AbyssinicaSIL-Regular.ttf
ofl/cagliostro/Cagliostro-Regular.ttf
ofl/caudex/Caudex-BoldItalic.ttf
ofl/caudex/Caudex-Italic.ttf
ofl/caudex/Caudex-Regular.ttf
ofl/caudex/Caudex-Bold.ttf
ofl/dinah/Dinah-Bold.ttf
ofl/dinah/Dinah-Regular.ttf
ofl/muktavaani/MuktaVaani-Light.ttf
ofl/muktavaani/MuktaVaani-SemiBold.ttf
ofl/muktavaani/MuktaVaani-Bold.ttf
ofl/muktavaani/MuktaVaani-Medium.ttf
ofl/muktavaani/MuktaVaani-ExtraBold.ttf
ofl/muktavaani/MuktaVaani-Regular.ttf
ofl/muktavaani/MuktaVaani-ExtraLight.ttf
ofl/shojumaru/Shojumaru-Regular.ttf
ofl/frederickathegreat/FrederickatheGreat-Regular.ttf
ofl/rosarivo/Rosarivo-Italic.ttf
ofl/rosarivo/Rosarivo-Regular.ttf
ofl/imfelldoublepica/IMFeDPrm28P.ttf
ofl/imfelldoublepica/IMFeDPit28P.ttf
ofl/monoton/Monoton-Regular.ttf
ofl/doppioone/DoppioOne-Regular.ttf
ofl/domine/Domine-Regular.ttf
ofl/domine/Domine-Bold.ttf
ofl/portersansblock/PorterSansBlock-Regular.ttf
ofl/kdamthmor/KdamThmor-Regular.ttf
ofl/montserrat/Montserrat-LightItalic.ttf
ofl/montserrat/Montserrat-ExtraBold.ttf
ofl/montserrat/Montserrat-Thin.ttf
ofl/montserrat/Montserrat-SemiBold.ttf
ofl/montserrat/Montserrat-Regular.ttf
ofl/montserrat/Montserrat-Medium.ttf
ofl/montserrat/Montserrat-MediumItalic.ttf
ofl/montserrat/Montserrat-SemiBoldItalic.ttf
ofl/montserrat/Montserrat-Light.ttf
ofl/montserrat/Montserrat-ExtraBoldItalic.ttf
ofl/montserrat/Montserrat-BlackItalic.ttf
ofl/montserrat/Montserrat-Bold.ttf
ofl/montserrat/Montserrat-ExtraLightItalic.ttf
ofl/montserrat/Montserrat-ThinItalic.ttf
ofl/montserrat/Montserrat-Italic.ttf
ofl/montserrat/Montserrat-Black.ttf
ofl/montserrat/Montserrat-ExtraLight.ttf
ofl/montserrat/Montserrat-BoldItalic.ttf
ofl/goblinone/GoblinOne.ttf
ofl/vibur/Vibur-Regular.ttf
ofl/baloobhaina/BalooBhaina-Regular.ttf
ofl/overtherainbow/OvertheRainbow.ttf
ofl/balootamma/BalooTamma-Regular.ttf
ofl/ruda/Ruda-Bold.ttf
ofl/ruda/Ruda-Black.ttf
ofl/ruda/Ruda-Regular.ttf
ofl/cwtexkai/cwTeXKai-zhonly.ttf
ofl/butcherman/Butcherman-Regular.ttf
ofl/palanquindark/PalanquinDark-Medium.ttf
ofl/palanquindark/PalanquinDark-Regular.ttf
ofl/palanquindark/PalanquinDark-SemiBold.ttf
ofl/palanquindark/PalanquinDark-Bold.ttf
ofl/lilitaone/LilitaOne-Regular.ttf
ofl/ruslandisplay/RuslanDisplay.ttf
ofl/ramaraja/Ramaraja-Regular.ttf
ofl/bhavuka/Bhavuka-Regular.ttf
ofl/amiri/Amiri-BoldItalic.ttf
ofl/amiri/Amiri-Regular.ttf
ofl/amiri/Amiri-Bold.ttf
ofl/amiri/Amiri-Italic.ttf
ofl/sedgwickave/SedgwickAve-Regular.ttf
ofl/acme/Acme-Regular.ttf
ofl/oflsortsmillgoudytt/OFLGoudyStMTT.ttf
ofl/oflsortsmillgoudytt/OFLGoudyStMTT-Italic.ttf
ofl/playfairdisplaysc/PlayfairDisplaySC-Italic.ttf
ofl/playfairdisplaysc/PlayfairDisplaySC-Regular.ttf
ofl/playfairdisplaysc/PlayfairDisplaySC-Bold.ttf
ofl/playfairdisplaysc/PlayfairDisplaySC-Black.ttf
ofl/playfairdisplaysc/PlayfairDisplaySC-BlackItalic.ttf
ofl/playfairdisplaysc/PlayfairDisplaySC-BoldItalic.ttf
ofl/modernantiqua/ModernAntiqua-Regular.ttf
ofl/durusans/DuruSans-Regular.ttf
ofl/sourcesanspro/SourceSansPro-SemiBoldItalic.ttf
ofl/sourcesanspro/SourceSansPro-Bold.ttf
ofl/sourcesanspro/SourceSansPro-Italic.ttf
ofl/sourcesanspro/SourceSansPro-SemiBold.ttf
ofl/sourcesanspro/SourceSansPro-Light.ttf
ofl/sourcesanspro/SourceSansPro-Black.ttf
ofl/sourcesanspro/SourceSansPro-BlackItalic.ttf
ofl/sourcesanspro/SourceSansPro-LightItalic.ttf
ofl/sourcesanspro/SourceSansPro-ExtraLight.ttf
ofl/sourcesanspro/SourceSansPro-Regular.ttf
ofl/sourcesanspro/SourceSansPro-ExtraLightItalic.ttf
ofl/sourcesanspro/SourceSansPro-BoldItalic.ttf
ofl/patrickhandsc/PatrickHandSC-Regular.ttf
ofl/sonsieone/SonsieOne-Regular.ttf
ofl/harmattan/Harmattan-Regular.ttf
ofl/nanumbrushscript/NanumBrushScript-Regular.ttf
ofl/bahiana/Bahiana-Regular.ttf
ofl/emilyscandy/EmilysCandy-Regular.ttf
ofl/phetsarath/Phetsarath-Regular.ttf
ofl/phetsarath/Phetsarath-Bold.ttf
ofl/lakkireddy/LakkiReddy-Regular.ttf
ofl/emblemaone/EmblemaOne-Regular.ttf
ofl/cantataone/CantataOne-Regular.ttf
ofl/modak/Modak-Regular.ttf
ofl/tenaliramakrishna/TenaliRamakrishna-Regular.ttf
ofl/coiny/Coiny-Regular.ttf
ofl/lobster/Lobster-Regular.ttf
ofl/passeroone/PasseroOne-Regular.ttf
ofl/adamina/Adamina-Regular.ttf
ofl/tradewinds/TradeWinds-Regular.ttf
ofl/siamreap/Siemreap.ttf
ofl/mina/Mina-Regular.ttf
ofl/mina/Mina-Bold.ttf
ofl/seoulhangangcondensed/SeoulHangangCondensed-ExtraBold.ttf
ofl/seoulhangangcondensed/SeoulHangangCondensed-Bold.ttf
ofl/seoulhangangcondensed/SeoulHangangCondensed-Medium.ttf
ofl/seoulhangangcondensed/SeoulHangangCondensed-Light.ttf
ofl/seoulhangangcondensed/SeoulHangangCondensed-BoldL.ttf
ofl/unlock/Unlock-Regular.ttf
ofl/inder/Inder-Regular.ttf
ofl/yatraone/YatraOne-Regular.ttf
ofl/mrdehaviland/MrDeHaviland-Regular.ttf
ofl/hindguntur/HindGuntur-Light.ttf
ofl/hindguntur/HindGuntur-SemiBold.ttf
ofl/hindguntur/HindGuntur-Medium.ttf
ofl/hindguntur/HindGuntur-Bold.ttf
ofl/hindguntur/HindGuntur-Regular.ttf
ofl/giveyouglory/GiveYouGlory.ttf
ofl/gildadisplay/GildaDisplay-Regular.ttf
ofl/vastshadow/VastShadow-Regular.ttf
ofl/firamono/FiraMono-Regular.ttf
ofl/firamono/FiraMono-Medium.ttf
ofl/firamono/FiraMono-Bold.ttf
ofl/molengo/Molengo-Regular.ttf
ofl/righteous/Righteous-Regular.ttf
ofl/content/Content-Regular.ttf
ofl/content/Content-Bold.ttf
ofl/ruluko/Ruluko-Regular.ttf
ofl/caesardressing/CaesarDressing-Regular.ttf
ofl/ledger/Ledger-Regular.ttf
ofl/rambla/Rambla-Regular.ttf
ofl/rambla/Rambla-Italic.ttf
ofl/rambla/Rambla-Bold.ttf
ofl/rambla/Rambla-BoldItalic.ttf
ofl/gidugu/Gidugu-Regular.ttf
ofl/josefinsansstdlight/JosefinSansStd-Light.ttf
ofl/kavoon/Kavoon-Regular.ttf
ofl/vampiroone/VampiroOne-Regular.ttf
ofl/devonshire/Devonshire-Regular.ttf
ofl/rumraisin/RumRaisin-Regular.ttf
ofl/notosans/NotoSans-Regular.ttf
ofl/notosans/NotoSans-BoldItalic.ttf
ofl/notosans/NotoSans-Italic.ttf
ofl/notosans/NotoSans-Bold.ttf
ofl/ericaone/EricaOne-Regular.ttf
ofl/londrinashadow/LondrinaShadow-Regular.ttf
ofl/carroisgothic/CarroisGothic-Regular.ttf
ofl/andada/Andada-Regular.ttf
ofl/prociono/Prociono-Regular.ttf
ofl/smythe/Smythe-Regular.ttf
ofl/mallanna/Mallanna-Regular.ttf
ofl/bangers/Bangers-Regular.ttf
ofl/imfelldwpica/IMFePIrm28P.ttf
ofl/imfelldwpica/IMFePIit28P.ttf
ofl/milonga/Milonga-Regular.ttf
ofl/carterone/CarterOne.ttf
ofl/purplepurse/PurplePurse-Regular.ttf
ofl/enriqueta/Enriqueta-Regular.ttf
ofl/enriqueta/Enriqueta-Bold.ttf
ofl/cormorantsc/CormorantSC-Regular.ttf
ofl/cormorantsc/CormorantSC-SemiBold.ttf
ofl/cormorantsc/CormorantSC-Light.ttf
ofl/cormorantsc/CormorantSC-Medium.ttf
ofl/cormorantsc/CormorantSC-Bold.ttf
ofl/abrilfatface/AbrilFatface-Regular.ttf
ofl/titanone/TitanOne-Regular.ttf
ofl/rufina/Rufina-Bold.ttf
ofl/rufina/Rufina-Regular.ttf
ofl/asset/Asset-Regular.ttf
ofl/cambo/Cambo-Regular.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-BoldItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-SemiBold.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Medium.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Bold.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-ExtraLight.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Black.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Regular.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-MediumItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-ExtraBoldItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-ThinItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-SemiBoldItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-BlackItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-ExtraBold.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Thin.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Light.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-Italic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-LightItalic.ttf
ofl/barlowsemicondensed/BarlowSemiCondensed-ExtraLightItalic.ttf
ofl/suwannaphum/Suwannaphum-Regular.ttf
ofl/karma/Karma-Bold.ttf
ofl/karma/Karma-Light.ttf
ofl/karma/Karma-Regular.ttf
ofl/karma/Karma-Medium.ttf
ofl/karma/Karma-SemiBold.ttf
ofl/monsieurladoulaise/MonsieurLaDoulaise-Regular.ttf
ofl/arya/Arya-Regular.ttf
ofl/arya/Arya-Bold.ttf
ofl/basic/Basic-Regular.ttf
ofl/exo/Exo-BoldItalic.ttf
ofl/exo/Exo-Italic.ttf
ofl/exo/Exo-ExtraLight.ttf
ofl/exo/Exo-SemiBold.ttf
ofl/exo/Exo-ExtraBold.ttf
ofl/exo/Exo-Light.ttf
ofl/exo/Exo-Thin.ttf
ofl/exo/Exo-ThinItalic.ttf
ofl/exo/Exo-Black.ttf
ofl/exo/Exo-ExtraLightItalic.ttf
ofl/exo/Exo-MediumItalic.ttf
ofl/exo/Exo-LightItalic.ttf
ofl/exo/Exo-BlackItalic.ttf
ofl/exo/Exo-ExtraBoldItalic.ttf
ofl/exo/Exo-Bold.ttf
ofl/exo/Exo-Medium.ttf
ofl/exo/Exo-Regular.ttf
ofl/exo/Exo-SemiBoldItalic.ttf
ofl/lemonada/Lemonada-Bold.ttf
ofl/lemonada/Lemonada-Regular.ttf
ofl/lemonada/Lemonada-SemiBold.ttf
ofl/lemonada/Lemonada-Light.ttf
ofl/euphoriascript/EuphoriaScript-Regular.ttf
ofl/paytoneone/PaytoneOne-Regular.ttf
ofl/encodesanscondensed/EncodeSansCondensed-ExtraLight.ttf
ofl/encodesanscondensed/EncodeSansCondensed-Light.ttf
ofl/encodesanscondensed/EncodeSansCondensed-Thin.ttf
ofl/encodesanscondensed/EncodeSansCondensed-Regular.ttf
ofl/encodesanscondensed/EncodeSansCondensed-ExtraBold.ttf
ofl/encodesanscondensed/EncodeSansCondensed-Medium.ttf
ofl/encodesanscondensed/EncodeSansCondensed-Black.ttf
ofl/encodesanscondensed/EncodeSansCondensed-SemiBold.ttf
ofl/encodesanscondensed/EncodeSansCondensed-Bold.ttf
ofl/deliusunicase/DeliusUnicase-Regular.ttf
ofl/deliusunicase/DeliusUnicase-Bold.ttf
ofl/kristi/Kristi-Regular.ttf
ofl/chathura/Chathura-Bold.ttf
ofl/chathura/Chathura-Regular.ttf
ofl/chathura/Chathura-Light.ttf
ofl/chathura/Chathura-Thin.ttf
ofl/chathura/Chathura-ExtraBold.ttf
ofl/fanwoodtext/FanwoodText-Regular.ttf
ofl/fanwoodtext/FanwoodText-Italic.ttf
ofl/oswald/Oswald-SemiBold.ttf
ofl/oswald/Oswald-ExtraLight.ttf
ofl/oswald/Oswald-Bold.ttf
ofl/oswald/Oswald-Light.ttf
ofl/oswald/Oswald-Medium.ttf
ofl/oswald/Oswald-Regular.ttf
ofl/seoulhangang/SeoulHangang-Light.ttf
ofl/seoulhangang/SeoulHangang-ExtraBold.ttf
ofl/seoulhangang/SeoulHangang-Medium.ttf
ofl/seoulhangang/SeoulHangang-Bold.ttf
ofl/lustria/Lustria-Regular.ttf
ofl/elsie/Elsie-Regular.ttf
ofl/elsie/Elsie-Black.ttf
ofl/vesperlibre/VesperLibre-Bold.ttf
ofl/vesperlibre/VesperLibre-Medium.ttf
ofl/vesperlibre/VesperLibre-Regular.ttf
ofl/vesperlibre/VesperLibre-Heavy.ttf
ofl/ruthie/Ruthie-Regular.ttf
ofl/elsieswashcaps/ElsieSwashCaps-Regular.ttf
ofl/elsieswashcaps/ElsieSwashCaps-Black.ttf
ofl/chicle/Chicle-Regular.ttf
ofl/sedansc/SedanSC-Regular.ttf
ofl/bentham/Bentham-Regular.ttf
ofl/courgette/Courgette-Regular.ttf
ofl/cutive/Cutive-Regular.ttf
ofl/tenorsans/TenorSans-Regular.ttf
ofl/aksarabaligalang/AksaraBaliGalang-Regular.ttf
ofl/podkova/Podkova-ExtraBold.ttf
ofl/podkova/Podkova-SemiBold.ttf
ofl/podkova/Podkova-Regular.ttf
ofl/podkova/Podkova-Medium.ttf
ofl/podkova/Podkova-Bold.ttf
ofl/simonetta/Simonetta-BlackItalic.ttf
ofl/simonetta/Simonetta-Italic.ttf
ofl/simonetta/Simonetta-Regular.ttf
ofl/simonetta/Simonetta-Black.ttf
ofl/volkhov/Volkhov-Bold.ttf
ofl/volkhov/Volkhov-Regular.ttf
ofl/volkhov/Volkhov-BoldItalic.ttf
ofl/volkhov/Volkhov-Italic.ttf
ofl/niconne/Niconne-Regular.ttf
ofl/moul/Moul.ttf
ofl/knewave/Knewave-Regular.ttf
ofl/snippet/Snippet.ttf
ofl/cormorantupright/CormorantUpright-Medium.ttf
ofl/cormorantupright/CormorantUpright-Bold.ttf
ofl/cormorantupright/CormorantUpright-Light.ttf
ofl/cormorantupright/CormorantUpright-Regular.ttf
ofl/cormorantupright/CormorantUpright-SemiBold.ttf
ofl/gfsneohellenic/GFSNeohellenicBold.ttf
ofl/gfsneohellenic/GFSNeohellenicItalic.ttf
ofl/gfsneohellenic/GFSNeohellenicBoldItalic.ttf
ofl/gfsneohellenic/GFSNeohellenic.ttf
ofl/saira/Saira-ExtraLight.ttf
ofl/saira/Saira-Light.ttf
ofl/saira/Saira-Thin.ttf
ofl/saira/Saira-SemiBold.ttf
ofl/saira/Saira-Medium.ttf
ofl/saira/Saira-Bold.ttf
ofl/saira/Saira-ExtraBold.ttf
ofl/saira/Saira-Regular.ttf
ofl/saira/Saira-Black.ttf
ofl/strong/Strong-Regular.ttf
ofl/geostarfill/GeostarFill-Regular.ttf
ofl/loversquarrel/LoversQuarrel-Regular.ttf
ofl/bigelowrules/BigelowRules-Regular.ttf
ofl/imfellgreatprimer/IMFeGPrm28P.ttf
ofl/imfellgreatprimer/IMFeGPit28P.ttf
ofl/quattrocento/Quattrocento-Bold.ttf
ofl/quattrocento/Quattrocento-Regular.ttf
ofl/ebgaramond/EBGaramond-Medium.ttf
ofl/ebgaramond/EBGaramond-ExtraBoldItalic.ttf
ofl/ebgaramond/EBGaramond-Bold.ttf
ofl/ebgaramond/EBGaramond-SemiBold.ttf
ofl/ebgaramond/EBGaramond-SemiBoldItalic.ttf
ofl/ebgaramond/EBGaramond-ExtraBold.ttf
ofl/ebgaramond/EBGaramond-MediumItalic.ttf
ofl/ebgaramond/EBGaramond-Regular.ttf
ofl/ebgaramond/EBGaramond-Italic.ttf
ofl/ebgaramond/EBGaramond-BoldItalic.ttf
ofl/farsan/Farsan-Regular.ttf
ofl/sarina/Sarina-Regular.ttf
ofl/atomicage/AtomicAge-Regular.ttf
ofl/iceland/Iceland-Regular.ttf
ofl/lora/Lora-Regular.ttf
ofl/lora/Lora-Bold.ttf
ofl/lora/Lora-Italic.ttf
ofl/lora/Lora-BoldItalic.ttf
ofl/sofadione/SofadiOne-Regular.ttf
ofl/martel/Martel-Regular.ttf
ofl/martel/Martel-DemiBold.ttf
ofl/martel/Martel-Bold.ttf
ofl/martel/Martel-ExtraBold.ttf
ofl/martel/Martel-Heavy.ttf
ofl/martel/Martel-UltraLight.ttf
ofl/martel/Martel-Light.ttf
ofl/elmessiri/ElMessiri-Regular.ttf
ofl/elmessiri/ElMessiri-Medium.ttf
ofl/elmessiri/ElMessiri-SemiBold.ttf
ofl/elmessiri/ElMessiri-Bold.ttf
ofl/overpass/Overpass-Thin.ttf
ofl/overpass/Overpass-ExtraBoldItalic.ttf
ofl/overpass/Overpass-BlackItalic.ttf
ofl/overpass/Overpass-LightItalic.ttf
ofl/overpass/Overpass-Regular.ttf
ofl/overpass/Overpass-Italic.ttf
ofl/overpass/Overpass-Light.ttf
ofl/overpass/Overpass-Black.ttf
ofl/overpass/Overpass-SemiBold.ttf
ofl/overpass/Overpass-Bold.ttf
ofl/overpass/Overpass-ThinItalic.ttf
ofl/overpass/Overpass-BoldItalic.ttf
ofl/overpass/Overpass-ExtraBold.ttf
ofl/overpass/Overpass-ExtraLightItalic.ttf
ofl/overpass/Overpass-ExtraLight.ttf
ofl/overpass/Overpass-SemiBoldItalic.ttf
ofl/brawler/Brawler-Regular.ttf
ofl/diplomatasc/DiplomataSC-Regular.ttf
ofl/bilboswashcaps/BilboSwashCaps-Regular.ttf
ofl/poly/Poly-Italic.ttf
ofl/poly/Poly-Regular.ttf
ofl/berkshireswash/BerkshireSwash-Regular.ttf
ofl/signika/Signika-Bold.ttf
ofl/signika/Signika-SemiBold.ttf
ofl/signika/Signika-Light.ttf
ofl/signika/Signika-Regular.ttf
ofl/bungeehairline/BungeeHairline-Regular.ttf
ofl/mrbedford/MrBedfort-Regular.ttf
ofl/snowburstone/SnowburstOne-Regular.ttf
ofl/rakkas/Rakkas-Regular.ttf
ofl/redacted/Redacted-Regular.ttf
ofl/librebarcode39text/LibreBarcode39Text-Regular.ttf
ofl/princesssofia/PrincessSofia-Regular.ttf
ofl/averialibre/AveriaLibre-LightItalic.ttf
ofl/averialibre/AveriaLibre-BoldItalic.ttf
ofl/averialibre/AveriaLibre-Regular.ttf
ofl/averialibre/AveriaLibre-Light.ttf
ofl/averialibre/AveriaLibre-Italic.ttf
ofl/averialibre/AveriaLibre-Bold.ttf
ofl/lemon/Lemon-Regular.ttf
ofl/scada/Scada-Italic.ttf
ofl/scada/Scada-Bold.ttf
ofl/scada/Scada-Regular.ttf
ofl/scada/Scada-BoldItalic.ttf
ofl/eaglelake/EagleLake-Regular.ttf
ofl/padauk/Padauk-Regular.ttf
ofl/padauk/Padauk-Bold.ttf
ofl/stintultraexpanded/StintUltraExpanded-Regular.ttf
ofl/italiana/Italiana-Regular.ttf
ofl/sawarabigothic/SawarabiGothic-Regular.ttf
ofl/varelaround/VarelaRound-Regular.ttf
ofl/lilyscriptone/LilyScriptOne-Regular.ttf
ofl/sintony/Sintony-Bold.ttf
ofl/sintony/Sintony-Regular.ttf
ofl/aubrey/Aubrey-Regular.ttf
ofl/judson/Judson-Regular.ttf
ofl/judson/Judson-Italic.ttf
ofl/judson/Judson-Bold.ttf
ofl/freckleface/FreckleFace-Regular.ttf
ofl/imfellenglishsc/IMFeENsc28P.ttf
ofl/contrailone/ContrailOne-Regular.ttf
ofl/yrsa/Yrsa-Medium.ttf
ofl/yrsa/Yrsa-Light.ttf
ofl/yrsa/Yrsa-Regular.ttf
ofl/yrsa/Yrsa-SemiBold.ttf
ofl/yrsa/Yrsa-Bold.ttf
ofl/souliyo/Souliyo-Regular.ttf
ofl/novasquare/NovaSquare.ttf
ofl/medievalsharp/MedievalSharp.ttf
ofl/diplomata/Diplomata-Regular.ttf
ofl/expletussans/ExpletusSans-Medium.ttf
ofl/expletussans/ExpletusSans-MediumItalic.ttf
ofl/expletussans/ExpletusSans-SemiBoldItalic.ttf
ofl/expletussans/ExpletusSans-Bold.ttf
ofl/expletussans/ExpletusSans-SemiBold.ttf
ofl/expletussans/ExpletusSans-BoldItalic.ttf
ofl/expletussans/ExpletusSans-Regular.ttf
ofl/expletussans/ExpletusSans-Italic.ttf
ofl/faustina/Faustina-SemiBoldItalic.ttf
ofl/faustina/Faustina-SemiBold.ttf
ofl/faustina/Faustina-Regular.ttf
ofl/faustina/Faustina-Italic.ttf
ofl/faustina/Faustina-BoldItalic.ttf
ofl/faustina/Faustina-MediumItalic.ttf
ofl/faustina/Faustina-Bold.ttf
ofl/faustina/Faustina-Medium.ttf
ofl/sairaextracondensed/SairaExtraCondensed-Light.ttf
ofl/sairaextracondensed/SairaExtraCondensed-Thin.ttf
ofl/sairaextracondensed/SairaExtraCondensed-ExtraLight.ttf
ofl/sairaextracondensed/SairaExtraCondensed-ExtraBold.ttf
ofl/sairaextracondensed/SairaExtraCondensed-SemiBold.ttf
ofl/sairaextracondensed/SairaExtraCondensed-Medium.ttf
ofl/sairaextracondensed/SairaExtraCondensed-Bold.ttf
ofl/sairaextracondensed/SairaExtraCondensed-Regular.ttf
ofl/sairaextracondensed/SairaExtraCondensed-Black.ttf
ofl/anticdidone/AnticDidone-Regular.ttf
ofl/cormorantinfant/CormorantInfant-SemiBoldItalic.ttf
ofl/cormorantinfant/CormorantInfant-BoldItalic.ttf
ofl/cormorantinfant/CormorantInfant-MediumItalic.ttf
ofl/cormorantinfant/CormorantInfant-SemiBold.ttf
ofl/cormorantinfant/CormorantInfant-Bold.ttf
ofl/cormorantinfant/CormorantInfant-Medium.ttf
ofl/cormorantinfant/CormorantInfant-Italic.ttf
ofl/cormorantinfant/CormorantInfant-LightItalic.ttf
ofl/cormorantinfant/CormorantInfant-Regular.ttf
ofl/cormorantinfant/CormorantInfant-Light.ttf
ofl/khula/Khula-ExtraBold.ttf
ofl/khula/Khula-Light.ttf
ofl/khula/Khula-Bold.ttf
ofl/khula/Khula-Regular.ttf
ofl/khula/Khula-SemiBold.ttf
ofl/abel/Abel-Regular.ttf
ofl/bungeeinline/BungeeInline-Regular.ttf
ofl/librebarcode128/LibreBarcode128-Regular.ttf
ofl/marmelad/Marmelad-Regular.ttf
ofl/passionone/PassionOne-Bold.ttf
ofl/passionone/PassionOne-Black.ttf
ofl/passionone/PassionOne-Regular.ttf
ofl/trochut/Trochut-Italic.ttf
ofl/trochut/Trochut-Regular.ttf
ofl/trochut/Trochut-Bold.ttf
ofl/justmeagaindownhere/JustMeAgainDownHere.ttf
ofl/mavenprovfbeta/MavenProVFBeta.ttf
ofl/nats/NATS-Regular.ttf
ofl/oranienbaum/Oranienbaum-Regular.ttf
ofl/sura/Sura-Bold.ttf
ofl/sura/Sura-Regular.ttf
ofl/cormorant/Cormorant-SemiBoldItalic.ttf
ofl/cormorant/Cormorant-Regular.ttf
ofl/cormorant/Cormorant-Medium.ttf
ofl/cormorant/Cormorant-MediumItalic.ttf
ofl/cormorant/Cormorant-Italic.ttf
ofl/cormorant/Cormorant-LightItalic.ttf
ofl/cormorant/Cormorant-BoldItalic.ttf
ofl/cormorant/Cormorant-SemiBold.ttf
ofl/cormorant/Cormorant-Light.ttf
ofl/cormorant/Cormorant-Bold.ttf
ofl/nanumgothic/NanumGothic-Regular.ttf
ofl/nanumgothic/NanumGothic-Bold.ttf
ofl/nanumgothic/NanumGothic-ExtraBold.ttf
ofl/averiasanslibre/AveriaSansLibre-Bold.ttf
ofl/averiasanslibre/AveriaSansLibre-LightItalic.ttf
ofl/averiasanslibre/AveriaSansLibre-Regular.ttf
ofl/averiasanslibre/AveriaSansLibre-Italic.ttf
ofl/averiasanslibre/AveriaSansLibre-BoldItalic.ttf
ofl/averiasanslibre/AveriaSansLibre-Light.ttf
ofl/fenix/Fenix-Regular.ttf
ofl/supermercadoone/SupermercadoOne-Regular.ttf
ofl/cwtexyen/cwTeXYen-zhonly.ttf
ofl/londrinasketch/LondrinaSketch-Regular.ttf
ofl/wallpoet/Wallpoet-Regular.ttf
ofl/prompt/Prompt-Bold.ttf
ofl/prompt/Prompt-MediumItalic.ttf
ofl/prompt/Prompt-Regular.ttf
ofl/prompt/Prompt-Medium.ttf
ofl/prompt/Prompt-SemiBoldItalic.ttf
ofl/prompt/Prompt-Black.ttf
ofl/prompt/Prompt-ExtraBold.ttf
ofl/prompt/Prompt-ExtraLightItalic.ttf
ofl/prompt/Prompt-BlackItalic.ttf
ofl/prompt/Prompt-SemiBold.ttf
ofl/prompt/Prompt-Light.ttf
ofl/prompt/Prompt-ThinItalic.ttf
ofl/prompt/Prompt-Thin.ttf
ofl/prompt/Prompt-BoldItalic.ttf
ofl/prompt/Prompt-Italic.ttf
ofl/prompt/Prompt-ExtraBoldItalic.ttf
ofl/prompt/Prompt-LightItalic.ttf
ofl/prompt/Prompt-ExtraLight.ttf
ofl/esteban/Esteban-Regular.ttf
ofl/hindjalandhar/HindJalandhar-Light.ttf
ofl/hindjalandhar/HindJalandhar-Medium.ttf
ofl/hindjalandhar/HindJalandhar-Bold.ttf
ofl/hindjalandhar/HindJalandhar-SemiBold.ttf
ofl/hindjalandhar/HindJalandhar-Regular.ttf
ofl/galindo/Galindo-Regular.ttf
ofl/secularone/SecularOne-Regular.ttf
ofl/terminaldosis/TerminalDosis-SemiBold.ttf
ofl/terminaldosis/TerminalDosis-Medium.ttf
ofl/terminaldosis/TerminalDosis-Regular.ttf
ofl/terminaldosis/TerminalDosis-Bold.ttf
ofl/terminaldosis/TerminalDosis-ExtraBold.ttf
ofl/terminaldosis/TerminalDosis-Light.ttf
ofl/terminaldosis/TerminalDosis-ExtraLight.ttf
ofl/paprika/Paprika-Regular.ttf
ofl/codystar/Codystar-Regular.ttf
ofl/codystar/Codystar-Light.ttf
ofl/fresca/Fresca-Regular.ttf
ofl/boogaloo/Boogaloo-Regular.ttf
ofl/varta/Varta-Regular.ttf
ofl/varta/Varta-Bold.ttf
ofl/varta/Varta-Light.ttf
ofl/varta/Varta-SemiBold.ttf
ofl/romanesco/Romanesco-Regular.ttf
ofl/miama/Miama-Regular.ttf
ofl/ptserifcaption/PT_Serif-Caption-Web-Regular.ttf
ofl/ptserifcaption/PT_Serif-Caption-Web-Italic.ttf
ofl/creteround/CreteRound-Italic.ttf
ofl/creteround/CreteRound-Regular.ttf
ofl/sarabun/Sarabun-Regular.ttf
ofl/sarabun/Sarabun-BoldItalic.ttf
ofl/sarabun/Sarabun-Bold.ttf
ofl/sarabun/Sarabun-Italic.ttf
ofl/lohitbengali/Lohit-Bengali.ttf
ofl/mrdafoe/MrDafoe-Regular.ttf
ofl/wireone/WireOne.ttf
ofl/bellefair/Bellefair-Regular.ttf
ofl/cinzeldecorative/CinzelDecorative-Regular.ttf
ofl/cinzeldecorative/CinzelDecorative-Bold.ttf
ofl/cinzeldecorative/CinzelDecorative-Black.ttf
ofl/didactgothic/DidactGothic-Regular.ttf
ofl/drsugiyama/DrSugiyama-Regular.ttf
ofl/tienne/Tienne-Black.ttf
ofl/tienne/Tienne-Regular.ttf
ofl/tienne/Tienne-Bold.ttf
ofl/roundedmplus1c/RoundedMplus1c-Light.ttf
ofl/roundedmplus1c/RoundedMplus1c-Thin.ttf
ofl/roundedmplus1c/RoundedMplus1c-Medium.ttf
ofl/roundedmplus1c/RoundedMplus1c-ExtraBold.ttf
ofl/roundedmplus1c/RoundedMplus1c-Bold.ttf
ofl/roundedmplus1c/RoundedMplus1c-Black.ttf
ofl/roundedmplus1c/RoundedMplus1c-Regular.ttf
ofl/dhurjati/Dhurjati-Regular.ttf
ofl/carme/Carme-Regular.ttf
ofl/yaldevicolombo/YaldeviColombo-Light.ttf
ofl/yaldevicolombo/YaldeviColombo-ExtraLight.ttf
ofl/yaldevicolombo/YaldeviColombo-Regular.ttf
ofl/yaldevicolombo/YaldeviColombo-Medium.ttf
ofl/yaldevicolombo/YaldeviColombo-Bold.ttf
ofl/yaldevicolombo/YaldeviColombo-SemiBold.ttf
ofl/librebarcode39/LibreBarcode39-Regular.ttf
ofl/candal/Candal.ttf
ofl/peralta/Peralta-Regular.ttf
ofl/baloothambi/BalooThambi-Regular.ttf
ofl/sarpanch/Sarpanch-ExtraBold.ttf
ofl/sarpanch/Sarpanch-Regular.ttf
ofl/sarpanch/Sarpanch-Medium.ttf
ofl/sarpanch/Sarpanch-Black.ttf
ofl/sarpanch/Sarpanch-Bold.ttf
ofl/sarpanch/Sarpanch-SemiBold.ttf
ofl/revalia/Revalia-Regular.ttf
ofl/jockeyone/JockeyOne-Regular.ttf
ofl/katibeh/Katibeh-Regular.ttf
ofl/orienta/Orienta-Regular.ttf
ofl/lindenhill/LindenHill-Regular.ttf
ofl/lindenhill/LindenHill-Italic.ttf
ofl/averiaseriflibre/AveriaSerifLibre-LightItalic.ttf
ofl/averiaseriflibre/AveriaSerifLibre-Regular.ttf
ofl/averiaseriflibre/AveriaSerifLibre-Light.ttf
ofl/averiaseriflibre/AveriaSerifLibre-Italic.ttf
ofl/averiaseriflibre/AveriaSerifLibre-Bold.ttf
ofl/averiaseriflibre/AveriaSerifLibre-BoldItalic.ttf
ofl/overlocksc/OverlockSC-Regular.ttf
ofl/loveyalikeasister/LoveYaLikeASister.ttf
ofl/chauphilomeneone/ChauPhilomeneOne-Regular.ttf
ofl/chauphilomeneone/ChauPhilomeneOne-Italic.ttf
ofl/novaround/NovaRound.ttf
ofl/unifrakturmaguntia/UnifrakturMaguntia-Book.ttf
ofl/catamaran/Catamaran-Bold.ttf
ofl/catamaran/Catamaran-Black.ttf
ofl/catamaran/Catamaran-Regular.ttf
ofl/catamaran/Catamaran-SemiBold.ttf
ofl/catamaran/Catamaran-Thin.ttf
ofl/catamaran/Catamaran-Light.ttf
ofl/catamaran/Catamaran-ExtraBold.ttf
ofl/catamaran/Catamaran-ExtraLight.ttf
ofl/catamaran/Catamaran-Medium.ttf
ofl/arizonia/Arizonia-Regular.ttf
ofl/mysteryquest/MysteryQuest-Regular.ttf
ofl/myanmarsanspro/MyanmarSansPro-Regular.ttf
ofl/encodesans/EncodeSans-Regular.ttf
ofl/encodesans/EncodeSans-Bold.ttf
ofl/encodesans/EncodeSans-Light.ttf
ofl/encodesans/EncodeSans-SemiBold.ttf
ofl/encodesans/EncodeSans-Black.ttf
ofl/encodesans/EncodeSans-ExtraBold.ttf
ofl/encodesans/EncodeSans-Medium.ttf
ofl/encodesans/EncodeSans-Thin.ttf
ofl/encodesans/EncodeSans-ExtraLight.ttf
ofl/kanit/Kanit-SemiBold.ttf
ofl/kanit/Kanit-Thin.ttf
ofl/kanit/Kanit-Black.ttf
ofl/kanit/Kanit-Medium.ttf
ofl/kanit/Kanit-Bold.ttf
ofl/kanit/Kanit-LightItalic.ttf
ofl/kanit/Kanit-ExtraLight.ttf
ofl/kanit/Kanit-ExtraBold.ttf
ofl/kanit/Kanit-ExtraBoldItalic.ttf
ofl/kanit/Kanit-Light.ttf
ofl/kanit/Kanit-Italic.ttf
ofl/kanit/Kanit-Regular.ttf
ofl/kanit/Kanit-BoldItalic.ttf
ofl/kanit/Kanit-MediumItalic.ttf
ofl/kanit/Kanit-ThinItalic.ttf
ofl/kanit/Kanit-ExtraLightItalic.ttf
ofl/kanit/Kanit-SemiBoldItalic.ttf
ofl/kanit/Kanit-BlackItalic.ttf
ofl/imfellgreatprimersc/IMFeGPsc28P.ttf
ofl/carroisgothicsc/CarroisGothicSC-Regular.ttf
ofl/freehand/Freehand.ttf
ofl/abhayalibre/AbhayaLibre-Medium.ttf
ofl/abhayalibre/AbhayaLibre-Bold.ttf
ofl/abhayalibre/AbhayaLibre-SemiBold.ttf
ofl/abhayalibre/AbhayaLibre-ExtraBold.ttf
ofl/abhayalibre/AbhayaLibre-Regular.ttf
ofl/risque/Risque-Regular.ttf
ofl/qwigley/Qwigley-Regular.ttf
ofl/nunitosans/NunitoSans-ExtraLight.ttf
ofl/nunitosans/NunitoSans-Black.ttf
ofl/nunitosans/NunitoSans-SemiBold.ttf
ofl/nunitosans/NunitoSans-SemiBoldItalic.ttf
ofl/nunitosans/NunitoSans-BlackItalic.ttf
ofl/nunitosans/NunitoSans-Bold.ttf
ofl/nunitosans/NunitoSans-ExtraLightItalic.ttf
ofl/nunitosans/NunitoSans-Regular.ttf
ofl/nunitosans/NunitoSans-BoldItalic.ttf
ofl/nunitosans/NunitoSans-ExtraBoldItalic.ttf
ofl/nunitosans/NunitoSans-LightItalic.ttf
ofl/nunitosans/NunitoSans-Italic.ttf
ofl/nunitosans/NunitoSans-Light.ttf
ofl/nunitosans/NunitoSans-ExtraBold.ttf
ofl/imfellfrenchcanon/IMFeFCrm28P.ttf
ofl/imfellfrenchcanon/IMFeFCit28P.ttf
ofl/asar/Asar-Regular.ttf
ofl/seoulnamsanvertical/SeoulNamsanVertical-Regular.ttf
ofl/dangrek/Dangrek.ttf
ofl/allura/Allura-Regular.ttf
ofl/reeniebeanie/ReenieBeanie.ttf
ofl/stalemate/Stalemate-Regular.ttf
ofl/galdeano/Galdeano-Regular.ttf
ofl/indieflower/IndieFlower-Regular.ttf
ofl/allerta/Allerta-Regular.ttf
ofl/megrim/Megrim.ttf
ofl/spinnaker/Spinnaker-Regular.ttf
ofl/codacaption/CodaCaption-ExtraBold.ttf
ofl/matesc/MateSC-Regular.ttf
ofl/librebarcode128text/LibreBarcode128Text-Regular.ttf
ofl/julee/Julee-Regular.ttf
ofl/anticslab/AnticSlab-Regular.ttf
ofl/miltoniantattoo/MiltonianTattoo-Regular.ttf
ofl/buda/Buda-Light.ttf
ofl/rye/Rye-Regular.ttf
ofl/averiagruesalibre/AveriaGruesaLibre-Regular.ttf
ofl/monofett/Monofett.ttf
ofl/decovaralpha/DecovarAlpha-VF.ttf
ofl/muktamalar/MuktaMalar-ExtraBold.ttf
ofl/muktamalar/MuktaMalar-ExtraLight.ttf
ofl/muktamalar/MuktaMalar-Medium.ttf
ofl/muktamalar/MuktaMalar-Bold.ttf
ofl/muktamalar/MuktaMalar-Light.ttf
ofl/muktamalar/MuktaMalar-Regular.ttf
ofl/muktamalar/MuktaMalar-SemiBold.ttf
ofl/sanchez/Sanchez-Regular.ttf
ofl/sanchez/Sanchez-Italic.ttf
ofl/montaga/Montaga-Regular.ttf
ofl/caveat/Caveat-Regular.ttf
ofl/caveat/Caveat-Bold.ttf
ofl/unicaone/UnicaOne-Regular.ttf
ofl/novamono/NovaMono.ttf
ofl/archivovfbeta/ArchivoVFBeta.ttf
ofl/archivovfbeta/ArchivoVFBeta-Italic.ttf
ofl/mervalescript/MervaleScript-Regular.ttf
ofl/petitformalscript/PetitFormalScript-Regular.ttf
ofl/karlatamilupright/KarlaTamilUpright-Bold.ttf
ofl/karlatamilupright/KarlaTamilUpright-Regular.ttf
ofl/jomhuria/Jomhuria-Regular.ttf
ofl/macondoswashcaps/MacondoSwashCaps-Regular.ttf
ofl/cambay/Cambay-Bold.ttf
ofl/cambay/Cambay-Regular.ttf
ofl/cambay/Cambay-Italic.ttf
ofl/cambay/Cambay-BoldItalic.ttf
ofl/stintultracondensed/StintUltraCondensed-Regular.ttf
ofl/rosario/Rosario-Italic.ttf
ofl/rosario/Rosario-Regular.ttf
ofl/rosario/Rosario-Bold.ttf
ofl/rosario/Rosario-BoldItalic.ttf
ofl/cherryswash/CherrySwash-Bold.ttf
ofl/cherryswash/CherrySwash-Regular.ttf
ofl/pridi/Pridi-Regular.ttf
ofl/pridi/Pridi-SemiBold.ttf
ofl/pridi/Pridi-Light.ttf
ofl/pridi/Pridi-ExtraLight.ttf
ofl/pridi/Pridi-Medium.ttf
ofl/pridi/Pridi-Bold.ttf
ofl/patuaone/PatuaOne-Regular.ttf
"""
return [filepath for filepath in files.split('\n') if filepath]
def get_filtered_fonts():
filtered_fonts = [
'jsMath-cmex10', 56,
'Nokora-Bold', 120,
'Nokora-Regular', 121,
'AdobeBlank-Regular', 130,
'Kokoro-Regular', 237,
'Koulen', 244,
'Astloch-Regular', 270,
'Astloch-Bold', 271,
'AlmendraDisplay-Regular', 294,
'ZillaSlabHighlight-Bold', 313,
'ZillaSlabHighlight-Regular', 314,
'NotoSansTamil-Regular', 324,
'NotoSansTamil-Bold', 325,
'Ewert-Regular', 333,
'Khmer', 344,
'HerrVonMuellerhoff-Regular', 396,
'RalewayDots-Regular', 414,
'Angkor-Regular', 415,
'PinyonScript-Regular', 564,
'cwTeXHei-zhonly', 565,
'MrsSaintDelafield-Regular', 590,
'Ponnala-Regular', 599,
'cwTeXMing-zhonly', 646,
'LaoSansPro-Regular', 749,
'LondrinaOutline-Regular', 777,
'Parisienne-Regular', 788,
'MissFajardose-Regular', 813,
'Kantumruy-Light', 816,
'Kantumruy-Bold', 817,
'Kantumruy-Regular', 818,
'Cedarville-Cursive', 819,
'Moulpali', 820,
'RougeScript-Regular', 846,
'LaoMuangKhong-Regular', 856,
'Sitara-Italic', 898,
'Sitara-Bold', 899,
'Sitara-Regular', 900,
'Sitara-BoldItalic', 901,
'GreatVibes-Regular', 920,
'Yinmar-Regular', 963,
'cwTeXFangSong-zhonly', 966,
'UnifrakturCook-Bold', 997,
'JacquesFrancoisShadow-Regular', 1003,
'BungeeShade-Regular', 1032,
'Sacramento-Regular', 1039,
'ButterflyKids-Regular', 1045,
'MarckScript-Regular', 1067,
'Miltonian-Regular', 1082,
'KarlaTamilInclined-Bold', 1087,
'KarlaTamilInclined-Regular', 1088,
'Battambang-Bold', 1101,
'Battambang-Regular', 1102,
'LibreBarcode39ExtendedText-Regular', 1124,
'LaBelleAuror', 1126,
'BungeeOutline-Regular', 1175,
'Hannari-Regular', 1209,
'Sevillana-Regular', 1210,
'LibreBarcode39Extended-Regular', 1224,
'Taprom', 1236,
'Fasthand-Regular', 1246,
'Bokor-Regular', 1268,
'Hanalei-Regular', 1285,
'FasterOne-Regular', 1291,
'Chenla', 1297,
'Bayon-Regular', 1358,
'Metal', 1391,
'Lohit-Devanagari', 1393,
'Lohit-Tamil', 1410,
'Bonbon-Regular', 1435,
'Preahvihear', 1440,
'RedactedScript-Regular', 1442,
'RedactedScript-Bold', 1443,
'Norican-Regular', 1469,
'MrsSaintDelafield-Regular', 1509,
'Geostar-Regular', 1522,
'LaoMuangDon-Regular', 1528,
'OdorMeanChey', 1574,
'Nikukyu-Regular', 1576,
'DawningofaNewDay', 1614,
'LeagueScript', 1649,
'MeieScript-Regular', 1677,
'MrBedfort-Regular', 1686,
'Meddon', 1687,
'Siemreap', 1690,
'FrederickatheGreat-Regular', 1749,
'Monoton-Regular', 1754,
'PorterSansBlock-Regular', 1758,
'KdamThmor-Regular', 1759,
'cwTeXKai-zhonly', 1786,
'Phetsarath-Regular', 1830,
'Phetsarath-Bold', 1831,
'Siemreap', 1842,
'MrDeHaviland-Regular', 1853,
'Content-Regular', 1867,
'Content-Bold', 1868,
'LondrinaShadow-Regular', 1887,
'Suwannaphum-Regular', 1930,
'MonsieurLaDoulaise-Regular', 1936,
'Ruthie-Regular', 2000,
'AksaraBaliGalang-Regular', 2009,
'Niconne-Regular', 2023,
'Moul', 2024,
'LoversQuarrel-Regular', 2047,
'DiplomataSc-Regular', 2100,
'BilboSwashCaps-Regular', 2101,
'Redacted-Regular', 2113,
'LibreBarcode39Text-Regular', 2114,
'PrincessSofia-Regular', 2115,
'Souliyo-Regular', 2149,
'Diplomata-Regular', 2152,
'LibreBarcode128-Regular', 2196,
'cwTeXYen-zhonly', 2231,
'LondrinaSketch-Regular', 2232,
'Codystar-Regular', 2268,
'Codystar-Light', 2269,
'Lohit-Bengali', 2286,
'LibreBarcode39-Regular', 2313,
'UnifrakturMaguntia-Book', 2340,
'MyanmarSansPro-Regular', 2352,
'Freehand', 2382,
'Dangrek', 2408,
'LibreBarcode128Text-Regular', 2419,
'Monofett', 2426,
'KarlaTamilUpright-Bold', 2446,
'KarlaTamilUpright-Regular', 2447,
]
fonts = get_fonts()
for i in range(0, len(filtered_fonts), 2):
assert fonts[filtered_fonts[i + 1]].lower().find(filtered_fonts[i].lower()) >= 0
for i in range(len(filtered_fonts) - 2, -1, -2):
del fonts[filtered_fonts[i + 1]]
# For debugging.
#for i in range(0, len(filtered_fonts), 2):
# print(filtered_fonts[i], [font for font in fonts if font.lower().find(filtered_fonts[i].lower()) >= 0])
# assert len([font for font in fonts if font.lower().find(filtered_fonts[i].lower()) >= 0]) == 0
return fonts
def get_filtered_permutation():
numbers = """
1115
449
886
2307
1040
526
68
1224
890
945
907
1271
2191
1128
1701
1180
262
488
718
316
2051
257
1491
360
1183
2185
169
908
1669
1731
1373
677
648
1551
69
1939
1720
1833
1403
2234
2259
1813
2008
579
1982
1414
202
1254
2342
3
380
1693
2335
1566
1467
1275
869
1667
695
71
448
72
864
1907
857
1676
1241
2328
2014
1658
2094
349
578
1999
1647
2197
692
1168
754
1619
1708
1353
1806
280
1487
1688
923
1120
1144
835
675
1750
255
1185
1290
65
87
631
747
1151
1362
407
662
2004
1987
225
687
1598
385
1008
628
1997
2192
533
595
938
50
2254
1877
487
734
77
2052
1706
2287
190
2139
918
93
1596
1363
379
1735
275
927
1587
2298
1292
2251
599
581
229
1266
1608
2058
1800
1870
6
2225
1535
146
575
32
273
1124
1456
1244
484
427
1906
1474
651
139
894
2308
916
2319
968
963
1039
1834
1424
1170
1943
331
399
1214
2301
1661
181
1691
1019
2184
35
2034
2209
2317
1626
67
1495
1489
1522
998
735
8
1856
347
2220
1233
1171
2292
1452
897
1776
676
1699
92
283
2009
2265
1003
2324
152
475
813
1630
1028
178
1672
566
1869
559
1355
1346
793
1629
1858
921
268
368
1929
183
507
323
1748
2310
1246
2039
1390
1145
1707
472
504
76
977
1100
850
1746
464
2074
1752
2137
773
753
780
2100
1309
636
1954
348
937
1476
2339
99
110
609
1511
1510
2049
2194
530
1938
666
607
2063
2105
1123
1811
1940
296
2291
261
834
1259
168
1181
1386
1227
1714
849
388
1718
160
1734
1025
1719
276
1579
2038
2053
1901
154
1301
1431
113
2113
2330
356
265
1657
2257
412
1920
52
749
1032
1182
1887
496
1567
1306
469
1364
1648
528
585
1439
2047
2213
1627
2246
785
621
555
1482
2190
1641
534
1205
2219
509
171
1087
865
186
570
1500
816
1753
1941
2269
2240
973
580
1557
1583
1894
1281
1155
661
418
1435
803
642
982
1559
1930
11
419
1109
2145
2173
845
1682
1823
221
320
799
1047
438
165
2136
1086
1416
1333
122
1201
1438
2231
2162
421
1903
417
889
588
236
267
1516
2152
1687
2267
1597
2021
2116
1328
1795
1797
303
2337
217
1465
326
2212
736
1918
482
1863
506
2115
2295
1102
2189
1265
1052
928
592
9
1457
1161
233
1409
1895
541
2143
1737
2060
805
539
237
1404
1297
557
1689
2204
1475
29
584
1618
1449
2012
1726
670
188
102
226
658
655
1211
219
1560
2271
2340
446
517
697
1652
1253
1640
2056
443
315
935
1792
1164
2221
622
1051
1138
863
272
1878
497
2179
786
286
25
321
1850
1061
1900
1239
1165
2025
939
1512
104
789
2141
814
861
833
564
830
1554
2111
1493
394
1213
1158
838
873
1178
1933
962
299
2127
2283
91
82
38
531
1692
1674
266
792
1062
2068
722
251
2011
1968
540
1670
1996
1310
199
1361
1998
1684
1068
746
220
1104
970
63
1326
253
1990
1936
491
1959
586
1235
437
222
596
2169
1219
810
888
930
1209
1760
765
2109
712
1015
1798
1882
302
1006
1645
1848
290
30
295
357
1319
1027
1136
14
608
1908
1007
373
2241
1311
1705
1549
2281
1250
891
1553
2149
2224
1520
827
944
684
2218
1208
1193
1223
133
1018
1820
1033
1429
1972
300
750
1868
501
1828
182
1385
150
1911
2164
1609
1466
1842
1204
1014
370
1625
1686
1778
1835
2203
162
682
1159
327
1084
1565
1302
164
80
875
859
2320
263
2256
2315
1825
914
476
1270
2297
73
118
166
2165
986
1757
2106
1388
1134
1296
1581
24
1085
1088
120
1222
1742
804
981
1883
1186
941
1108
49
516
1754
383
782
703
242
344
1562
1157
812
173
2275
1285
21
1832
277
1755
410
2092
1114
577
1582
1308
2272
1988
1156
1915
802
1979
730
1080
2261
2110
1607
478
151
1473
340
2133
874
1632
79
382
1398
561
1615
568
801
716
1965
1252
2276
270
126
60
1257
1367
1593
211
1973
46
899
1460
961
189
2135
2302
900
486
2130
943
362
1312
967
1069
1541
1188
1653
1331
1568
1905
246
885
1853
552
498
216
2202
979
89
1683
1643
1831
170
2210
2148
298
293
1621
1099
1695
1401
1206
878
2178
442
2293
1993
381
2188
227
2316
1199
34
2024
2157
1065
1594
499
414
1316
1865
402
917
919
1389
1441
1504
224
1945
1509
727
514
462
2235
951
1203
1759
1417
2144
2069
1117
740
598
90
53
2172
1075
1234
153
659
403
1916
602
1200
2059
775
1268
1875
313
1962
36
1344
2181
909
1620
1773
228
2071
1517
1478
1440
525
688
400
1518
1216
1332
706
1891
1843
1586
612
1278
2104
1885
249
390
1126
336
1280
16
1550
1664
1852
693
1356
1660
1017
1923
40
550
702
2279
619
2153
1329
2258
197
1523
1521
1817
480
433
1812
245
2270
1335
101
1004
1336
1243
884
2140
2282
870
2080
1093
1591
1092
1197
2255
1035
57
1724
613
1589
958
1279
1320
179
2239
1791
2171
195
1861
965
1330
84
674
1450
473
297
1584
314
1793
976
992
1698
1129
1005
2262
209
193
990
1879
1314
123
2040
1023
167
503
2300
13
2252
354
1702
1956
2305
1977
933
643
2228
2138
966
143
807
1067
2322
839
1317
1471
1095
686
5
434
2325
2151
291
1530
119
1247
1966
2118
896
144
995
1859
2046
1616
829
100
1169
1074
2264
408
1572
672
393
513
481
1985
1542
1662
401
1994
58
597
679
953
705
470
1604
81
1300
377
591
626
1248
389
2229
632
1152
1960
1771
447
911
142
2108
1880
1847
1544
97
1348
1298
1448
328
1287
1747
1461
669
1294
1765
1413
48
538
2329
1612
459
1513
2331
1369
1963
2154
109
135
1453
1704
964
2032
17
180
2199
683
1730
359
903
454
301
1665
1196
1762
1946
353
1860
862
848
656
2043
54
971
912
239
177
192
560
1399
2002
492
1139
1175
149
2006
137
777
1315
456
234
2303
871
281
1089
527
1463
876
128
554
649
247
1289
2160
2208
471
1324
606
318
391
1525
364
332
806
1009
1096
1694
1304
61
85
1501
2020
725
2309
1184
2222
1078
556
1896
1286
420
860
1802
1650
439
646
2311
1590
634
203
1483
1218
2237
741
715
1020
523
1527
1921
952
387
603
767
1796
429
136
37
1808
1588
1251
926
650
1822
1245
271
2089
1228
1733
1415
1447
1486
18
1556
2333
1236
582
1150
95
999
2196
1498
1230
1118
1488
28
138
1578
902
1928
1426
1264
2159
1624
1428
1934
543
415
1225
866
825
2078
325
2226
617
630
397
458
1603
1048
1975
2236
1098
185
1340
1430
1480
117
733
1947
502
159
1295
960
701
10
2200
2318
709
2244
663
1174
2048
1874
910
1766
1370
618
477
405
2077
1132
1146
1532
1016
2
654
2082
131
855
1107
522
490
1445
431
1050
1524
794
2250
1337
2075
728
1322
1772
1377
158
214
1043
430
1464
489
858
1432
311
1745
307
1539
1639
4
826
1644
1365
2168
163
1347
1479
1649
355
831
2098
515
732
288
62
2205
769
1458
1845
319
479
1978
2217
1077
0
760
763
700
1862
172
1769
309
1425
681
1242
312
1237
815
667
1044
950
1816
1600
529
1267
252
1256
1814
583
7
406
1654
26
129
2249
2312
2026
840
1446
1700
1372
74
776
1054
1381
1217
983
714
723
842
742
819
1376
2042
230
1116
1751
461
565
460
1238
1623
1393
1383
1506
338
573
1103
1443
2066
1189
818
1325
1231
2033
594
1011
1543
2132
542
627
1839
2107
371
811
395
56
130
256
378
1821
633
2146
743
2195
1153
107
1481
1840
1352
2084
1712
948
1768
1179
882
125
901
1779
690
1303
204
369
2091
157
1293
1082
856
1173
2035
954
1955
2182
70
1655
1697
762
384
1392
868
269
867
279
1026
974
664
2016
116
800
508
1932
2176
905
112
1212
1031
1454
2273
1291
906
176
248
1111
1496
1866
549
1575
20
616
1790
1991
1537
1577
1633
2045
2027
892
817
1249
127
1758
2062
975
19
731
1777
1545
691
571
1592
1738
1384
548
1125
451
738
1038
657
652
1274
1505
75
235
366
1710
1782
625
729
510
1585
1288
1736
1177
98
2114
47
1055
140
1143
2134
1079
457
832
240
2313
678
1121
1072
537
2211
2158
243
1122
2294
1681
1162
264
1732
1190
2096
155
2215
1284
1191
111
673
1283
1952
1046
1387
1091
121
145
1614
2198
1749
428
604
1368
422
27
198
1379
2326
1846
1459
1631
1984
665
1935
1917
1983
1273
2103
1229
996
1855
39
680
2099
156
396
739
2284
1263
1226
1809
520
2201
637
1305
1406
1884
1854
545
2102
887
2003
795
710
593
2018
1944
2065
844
821
250
2299
1056
358
1357
426
1744
1922
1407
1563
2155
2230
1176
466
1528
289
1442
711
2304
1638
31
132
1154
808
1490
474
1529
1558
1727
1002
1552
1538
572
1323
1767
1395
436
843
623
994
494
880
2193
853
175
2280
1434
1468
615
148
1958
201
1605
2022
544
1220
2327
43
350
1925
1394
432
392
1969
1422
1400
647
2156
1258
2180
2079
1571
1967
1149
1725
1909
1070
1716
915
1635
205
86
1420
2019
2055
2260
2076
376
671
1327
1299
1849
536
416
88
713
783
2278
1995
398
1799
1421
2277
720
2030
774
685
2206
1261
1723
1282
2101
64
1805
372
745
1071
465
2095
823
620
1871
363
1570
55
1728
1354
1059
698
308
1531
2072
374
1058
1770
2150
1819
1764
1127
1876
1826
1041
1671
726
1937
989
2290
493
304
1140
587
1187
2163
2031
1841
1899
1824
194
324
1167
317
1913
1034
2216
2233
15
1970
1172
2227
44
1371
463
1192
1137
2013
1656
1382
1066
423
1857
409
1666
972
161
1924
1646
1912
1974
991
758
1919
1576
1949
2266
343
1110
798
947
2245
2007
1659
2253
1651
925
1462
452
210
1101
1961
2088
1350
936
1739
2123
1
1837
1741
1166
1886
2000
553
913
1888
1269
757
495
1599
1786
558
1484
1360
1444
717
2085
852
841
755
898
1133
2232
824
1419
33
2005
2023
638
535
605
2186
2121
184
737
1673
1756
1838
883
485
704
12
2124
1508
2041
1030
822
2183
1561
1519
809
1637
2126
1022
837
1514
468
957
1680
2129
1976
1898
2054
2128
467
2214
1436
1013
2268
1339
2017
1953
1628
2001
879
790
1375
41
744
1053
847
984
187
330
1207
1784
124
1210
367
797
1094
191
310
1343
1573
284
1090
292
1142
1902
924
134
1472
576
2117
836
2243
259
1663
1060
893
1926
2338
346
748
305
546
781
351
1711
1536
1931
1497
1815
45
2112
2323
969
1942
2177
1595
1617
1396
946
1787
450
524
1836
1391
1113
1130
1334
241
764
42
1927
2170
2070
635
1163
959
445
2057
1794
1001
2086
335
223
1893
1064
2336
1412
949
1740
1221
846
1083
232
1485
1914
1830
2037
511
1606
1721
1366
505
707
614
601
942
1989
1610
500
756
1024
66
2166
563
997
94
342
22
258
1547
1194
1451
1119
1262
1992
768
2274
770
334
1433
1872
532
562
455
238
660
2288
2083
1548
1580
375
2064
2050
1063
1502
1477
828
1410
724
411
1677
1010
644
611
1827
1564
1803
23
956
1892
766
895
955
791
1397
1045
1981
1950
1951
567
788
306
1499
1276
980
1455
141
2263
2248
108
940
922
2238
213
1380
1690
1408
1272
2286
721
1569
260
851
254
96
424
1147
2131
1012
610
341
2073
1106
1081
1904
2061
1515
1980
877
2036
1788
2067
1378
1160
1135
1986
1785
1713
1215
590
2161
1198
2122
1342
1780
1307
2090
1255
872
772
2242
1897
2010
985
787
1321
207
1029
1470
1957
2044
1761
2029
1873
413
1507
386
1781
1494
1073
1844
444
1807
105
287
2285
904
1021
1642
2296
1076
174
1418
1341
1534
751
1601
1546
589
1804
1948
274
796
1890
708
779
2167
1634
778
2125
2097
1829
2120
212
1789
2341
1423
1602
1260
1678
1636
2081
920
78
1037
1675
1574
1555
2175
1202
931
1351
2093
1540
640
435
1715
1358
1097
1042
294
1611
1889
1668
345
106
2334
1345
1851
1492
978
1338
329
689
453
1722
278
1964
425
2223
1349
1881
1105
1148
1036
929
2147
2247
103
761
333
696
2289
551
854
285
1469
624
2306
653
1818
361
1374
1131
752
1801
2187
282
208
521
404
639
574
1057
2174
1427
1437
231
771
352
694
1195
988
218
2321
1141
1112
932
1867
934
1000
365
1613
147
759
1533
784
699
1405
1679
1743
196
645
641
987
200
1049
115
1313
1703
322
2087
1783
1622
719
441
547
215
1240
1864
1763
1717
2314
1402
993
1526
114
2142
483
206
59
244
881
2028
1810
600
83
2332
337
51
1277
820
1696
339
1503
569
1232
1774
2207
1411
1910
518
1318
1971
2119
2015
1685
1709
1729
440
1775
629
668
1359
519
512
"""
return [int(value) for value in numbers.split('\n') if value] | 21.786621 | 112 | 0.844766 |
9f6496149b3e1105425db8cfc7bb830fbb222159 | 4,618 | py | Python | homeassistant/components/sensor/rflink.py | lupin-de-mid/home-assistant | 35f6dbc9dc0fb12d1d04837acbf09dabb325f4fe | [
"Apache-2.0"
] | 1 | 2021-01-02T14:13:46.000Z | 2021-01-02T14:13:46.000Z | homeassistant/components/sensor/rflink.py | lupin-de-mid/home-assistant | 35f6dbc9dc0fb12d1d04837acbf09dabb325f4fe | [
"Apache-2.0"
] | null | null | null | homeassistant/components/sensor/rflink.py | lupin-de-mid/home-assistant | 35f6dbc9dc0fb12d1d04837acbf09dabb325f4fe | [
"Apache-2.0"
] | null | null | null | """Support for Rflink sensors.
For more details about this platform, please refer to the documentation
at https://home-assistant.io/components/light.rflink/
"""
import asyncio
from functools import partial
import logging
from homeassistant.components import group
from homeassistant.components.rflink import (
CONF_ALIASSES, CONF_DEVICES, CONF_NEW_DEVICES_GROUP, DATA_DEVICE_REGISTER,
DATA_ENTITY_LOOKUP, DOMAIN, EVENT_KEY_ID, EVENT_KEY_SENSOR, EVENT_KEY_UNIT,
RflinkDevice, cv, vol)
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT, CONF_NAME, CONF_PLATFORM,
CONF_UNIT_OF_MEASUREMENT)
DEPENDENCIES = ['rflink']
_LOGGER = logging.getLogger(__name__)
SENSOR_ICONS = {
'humidity': 'mdi:water-percent',
'battery': 'mdi:battery',
'temperature': 'mdi:thermometer',
}
CONF_SENSOR_TYPE = 'sensor_type'
PLATFORM_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): DOMAIN,
vol.Optional(CONF_NEW_DEVICES_GROUP, default=None): cv.string,
vol.Optional(CONF_DEVICES, default={}): vol.Schema({
cv.string: {
vol.Optional(CONF_NAME): cv.string,
vol.Required(CONF_SENSOR_TYPE): cv.string,
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=None): cv.string,
vol.Optional(CONF_ALIASSES, default=[]):
vol.All(cv.ensure_list, [cv.string]),
},
}),
})
def lookup_unit_for_sensor_type(sensor_type):
"""Get unit for sensor type.
Async friendly.
"""
from rflink.parser import UNITS, PACKET_FIELDS
field_abbrev = {v: k for k, v in PACKET_FIELDS.items()}
return UNITS.get(field_abbrev.get(sensor_type))
def devices_from_config(domain_config, hass=None):
"""Parse config and add rflink sensor devices."""
devices = []
for device_id, config in domain_config[CONF_DEVICES].items():
if not config[ATTR_UNIT_OF_MEASUREMENT]:
config[ATTR_UNIT_OF_MEASUREMENT] = lookup_unit_for_sensor_type(
config[CONF_SENSOR_TYPE])
device = RflinkSensor(device_id, hass, **config)
devices.append(device)
# register entity to listen to incoming rflink events
hass.data[DATA_ENTITY_LOOKUP][
EVENT_KEY_SENSOR][device_id].append(device)
return devices
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_devices, discovery_info=None):
"""Setup the Rflink platform."""
# add devices from config
yield from async_add_devices(devices_from_config(config, hass))
# add new (unconfigured) devices to user desired group
if config[CONF_NEW_DEVICES_GROUP]:
new_devices_group = yield from group.Group.async_create_group(
hass, config[CONF_NEW_DEVICES_GROUP], [], True)
else:
new_devices_group = None
@asyncio.coroutine
def add_new_device(event):
"""Check if device is known, otherwise create device entity."""
device_id = event[EVENT_KEY_ID]
rflinksensor = partial(RflinkSensor, device_id, hass)
device = rflinksensor(event[EVENT_KEY_SENSOR], event[EVENT_KEY_UNIT])
# add device entity
yield from async_add_devices([device])
# register entity to listen to incoming rflink events
hass.data[DATA_ENTITY_LOOKUP][
EVENT_KEY_SENSOR][device_id].append(device)
# make sure the event is processed by the new entity
device.handle_event(event)
# maybe add to new devices group
if new_devices_group:
yield from new_devices_group.async_update_tracked_entity_ids(
list(new_devices_group.tracking) + [device.entity_id])
hass.data[DATA_DEVICE_REGISTER][EVENT_KEY_SENSOR] = add_new_device
class RflinkSensor(RflinkDevice):
"""Representation of a Rflink sensor."""
def __init__(self, device_id, hass, sensor_type,
unit_of_measurement, **kwargs):
"""Handle sensor specific args and super init."""
self._sensor_type = sensor_type
self._unit_of_measurement = unit_of_measurement
super().__init__(device_id, hass, **kwargs)
def _handle_event(self, event):
"""Domain specific event handler."""
self._state = event['value']
@property
def unit_of_measurement(self):
"""Return measurement unit."""
return self._unit_of_measurement
@property
def state(self):
"""Return value."""
return self._state
@property
def icon(self):
"""Return possible sensor specific icon."""
if self._sensor_type in SENSOR_ICONS:
return SENSOR_ICONS[self._sensor_type]
| 32.521127 | 79 | 0.689909 |
7fc8f26175e81132ec765280b022a3fa602fe055 | 5,938 | py | Python | docs/source/conf.py | gaurapanasenko/cglm | 981af0565ea823dec8dc7e8d7d332ff65f99d7a4 | [
"MIT"
] | 1 | 2022-03-10T04:41:03.000Z | 2022-03-10T04:41:03.000Z | docs/source/conf.py | gaurapanasenko/cglm | 981af0565ea823dec8dc7e8d7d332ff65f99d7a4 | [
"MIT"
] | null | null | null | docs/source/conf.py | gaurapanasenko/cglm | 981af0565ea823dec8dc7e8d7d332ff65f99d7a4 | [
"MIT"
] | 1 | 2022-01-27T12:18:41.000Z | 2022-01-27T12:18:41.000Z | # -*- coding: utf-8 -*-
#
# cglm documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 6 20:31:05 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '3.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'cglm'
copyright = u'2017, Recep Aslantas'
author = u'Recep Aslantas'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.8.0'
# The full version, including alpha/beta/rc tags.
release = u'0.8.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
html_theme_options = {
# 'github_banner': 'true',
# 'github_button': 'true',
# 'github_user': 'recp',
# 'github_repo': 'cglm',
# 'travis_button': 'true',
# 'show_related': 'true',
# 'fixed_sidebar': 'true'
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'cglmdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'cglm.tex', u'cglm Documentation',
u'Recep Aslantas', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'cglm', u'cglm Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'cglm', u'cglm Documentation',
author, 'cglm', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
# -- Options for todo extension ----------------------------------------------
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for the C domain ------------------------------------------------
c_id_attributes = ['__restrict']
| 29.107843 | 79 | 0.655945 |
a847ac37c9ceda5ec9b173556566a7cf9f986e2a | 16,682 | py | Python | sdk/python/pulumi_gitlab/get_projects.py | pulumi/pulumi-gitlab | 5627240bf718fc765d3a2068acd20621383514c8 | [
"ECL-2.0",
"Apache-2.0"
] | 11 | 2019-09-17T20:41:23.000Z | 2021-12-02T20:39:23.000Z | sdk/python/pulumi_gitlab/get_projects.py | pulumi/pulumi-gitlab | 5627240bf718fc765d3a2068acd20621383514c8 | [
"ECL-2.0",
"Apache-2.0"
] | 67 | 2019-06-21T18:30:30.000Z | 2022-03-31T21:27:20.000Z | sdk/python/pulumi_gitlab/get_projects.py | pulumi/pulumi-gitlab | 5627240bf718fc765d3a2068acd20621383514c8 | [
"ECL-2.0",
"Apache-2.0"
] | 2 | 2019-10-05T10:36:36.000Z | 2021-05-13T18:14:59.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
__all__ = [
'GetProjectsResult',
'AwaitableGetProjectsResult',
'get_projects',
]
@pulumi.output_type
class GetProjectsResult:
"""
A collection of values returned by getProjects.
"""
def __init__(__self__, archived=None, group_id=None, id=None, include_subgroups=None, max_queryable_pages=None, membership=None, min_access_level=None, order_by=None, owned=None, page=None, per_page=None, projects=None, search=None, simple=None, sort=None, starred=None, statistics=None, visibility=None, with_custom_attributes=None, with_issues_enabled=None, with_merge_requests_enabled=None, with_programming_language=None, with_shared=None):
if archived and not isinstance(archived, bool):
raise TypeError("Expected argument 'archived' to be a bool")
pulumi.set(__self__, "archived", archived)
if group_id and not isinstance(group_id, int):
raise TypeError("Expected argument 'group_id' to be a int")
pulumi.set(__self__, "group_id", group_id)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if include_subgroups and not isinstance(include_subgroups, bool):
raise TypeError("Expected argument 'include_subgroups' to be a bool")
pulumi.set(__self__, "include_subgroups", include_subgroups)
if max_queryable_pages and not isinstance(max_queryable_pages, int):
raise TypeError("Expected argument 'max_queryable_pages' to be a int")
pulumi.set(__self__, "max_queryable_pages", max_queryable_pages)
if membership and not isinstance(membership, bool):
raise TypeError("Expected argument 'membership' to be a bool")
pulumi.set(__self__, "membership", membership)
if min_access_level and not isinstance(min_access_level, int):
raise TypeError("Expected argument 'min_access_level' to be a int")
pulumi.set(__self__, "min_access_level", min_access_level)
if order_by and not isinstance(order_by, str):
raise TypeError("Expected argument 'order_by' to be a str")
pulumi.set(__self__, "order_by", order_by)
if owned and not isinstance(owned, bool):
raise TypeError("Expected argument 'owned' to be a bool")
pulumi.set(__self__, "owned", owned)
if page and not isinstance(page, int):
raise TypeError("Expected argument 'page' to be a int")
pulumi.set(__self__, "page", page)
if per_page and not isinstance(per_page, int):
raise TypeError("Expected argument 'per_page' to be a int")
pulumi.set(__self__, "per_page", per_page)
if projects and not isinstance(projects, list):
raise TypeError("Expected argument 'projects' to be a list")
pulumi.set(__self__, "projects", projects)
if search and not isinstance(search, str):
raise TypeError("Expected argument 'search' to be a str")
pulumi.set(__self__, "search", search)
if simple and not isinstance(simple, bool):
raise TypeError("Expected argument 'simple' to be a bool")
pulumi.set(__self__, "simple", simple)
if sort and not isinstance(sort, str):
raise TypeError("Expected argument 'sort' to be a str")
pulumi.set(__self__, "sort", sort)
if starred and not isinstance(starred, bool):
raise TypeError("Expected argument 'starred' to be a bool")
pulumi.set(__self__, "starred", starred)
if statistics and not isinstance(statistics, bool):
raise TypeError("Expected argument 'statistics' to be a bool")
pulumi.set(__self__, "statistics", statistics)
if visibility and not isinstance(visibility, str):
raise TypeError("Expected argument 'visibility' to be a str")
pulumi.set(__self__, "visibility", visibility)
if with_custom_attributes and not isinstance(with_custom_attributes, bool):
raise TypeError("Expected argument 'with_custom_attributes' to be a bool")
pulumi.set(__self__, "with_custom_attributes", with_custom_attributes)
if with_issues_enabled and not isinstance(with_issues_enabled, bool):
raise TypeError("Expected argument 'with_issues_enabled' to be a bool")
pulumi.set(__self__, "with_issues_enabled", with_issues_enabled)
if with_merge_requests_enabled and not isinstance(with_merge_requests_enabled, bool):
raise TypeError("Expected argument 'with_merge_requests_enabled' to be a bool")
pulumi.set(__self__, "with_merge_requests_enabled", with_merge_requests_enabled)
if with_programming_language and not isinstance(with_programming_language, str):
raise TypeError("Expected argument 'with_programming_language' to be a str")
pulumi.set(__self__, "with_programming_language", with_programming_language)
if with_shared and not isinstance(with_shared, bool):
raise TypeError("Expected argument 'with_shared' to be a bool")
pulumi.set(__self__, "with_shared", with_shared)
@property
@pulumi.getter
def archived(self) -> Optional[bool]:
return pulumi.get(self, "archived")
@property
@pulumi.getter(name="groupId")
def group_id(self) -> Optional[int]:
return pulumi.get(self, "group_id")
@property
@pulumi.getter
def id(self) -> str:
"""
The provider-assigned unique ID for this managed resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="includeSubgroups")
def include_subgroups(self) -> Optional[bool]:
return pulumi.get(self, "include_subgroups")
@property
@pulumi.getter(name="maxQueryablePages")
def max_queryable_pages(self) -> Optional[int]:
return pulumi.get(self, "max_queryable_pages")
@property
@pulumi.getter
def membership(self) -> Optional[bool]:
return pulumi.get(self, "membership")
@property
@pulumi.getter(name="minAccessLevel")
def min_access_level(self) -> Optional[int]:
return pulumi.get(self, "min_access_level")
@property
@pulumi.getter(name="orderBy")
def order_by(self) -> Optional[str]:
return pulumi.get(self, "order_by")
@property
@pulumi.getter
def owned(self) -> Optional[bool]:
return pulumi.get(self, "owned")
@property
@pulumi.getter
def page(self) -> Optional[int]:
return pulumi.get(self, "page")
@property
@pulumi.getter(name="perPage")
def per_page(self) -> Optional[int]:
return pulumi.get(self, "per_page")
@property
@pulumi.getter
def projects(self) -> Sequence['outputs.GetProjectsProjectResult']:
"""
A list containing the projects matching the supplied arguments
"""
return pulumi.get(self, "projects")
@property
@pulumi.getter
def search(self) -> Optional[str]:
return pulumi.get(self, "search")
@property
@pulumi.getter
def simple(self) -> Optional[bool]:
return pulumi.get(self, "simple")
@property
@pulumi.getter
def sort(self) -> Optional[str]:
return pulumi.get(self, "sort")
@property
@pulumi.getter
def starred(self) -> Optional[bool]:
return pulumi.get(self, "starred")
@property
@pulumi.getter
def statistics(self) -> Optional[bool]:
return pulumi.get(self, "statistics")
@property
@pulumi.getter
def visibility(self) -> Optional[str]:
"""
The visibility of the project.
"""
return pulumi.get(self, "visibility")
@property
@pulumi.getter(name="withCustomAttributes")
def with_custom_attributes(self) -> Optional[bool]:
return pulumi.get(self, "with_custom_attributes")
@property
@pulumi.getter(name="withIssuesEnabled")
def with_issues_enabled(self) -> Optional[bool]:
return pulumi.get(self, "with_issues_enabled")
@property
@pulumi.getter(name="withMergeRequestsEnabled")
def with_merge_requests_enabled(self) -> Optional[bool]:
return pulumi.get(self, "with_merge_requests_enabled")
@property
@pulumi.getter(name="withProgrammingLanguage")
def with_programming_language(self) -> Optional[str]:
return pulumi.get(self, "with_programming_language")
@property
@pulumi.getter(name="withShared")
def with_shared(self) -> Optional[bool]:
return pulumi.get(self, "with_shared")
class AwaitableGetProjectsResult(GetProjectsResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetProjectsResult(
archived=self.archived,
group_id=self.group_id,
id=self.id,
include_subgroups=self.include_subgroups,
max_queryable_pages=self.max_queryable_pages,
membership=self.membership,
min_access_level=self.min_access_level,
order_by=self.order_by,
owned=self.owned,
page=self.page,
per_page=self.per_page,
projects=self.projects,
search=self.search,
simple=self.simple,
sort=self.sort,
starred=self.starred,
statistics=self.statistics,
visibility=self.visibility,
with_custom_attributes=self.with_custom_attributes,
with_issues_enabled=self.with_issues_enabled,
with_merge_requests_enabled=self.with_merge_requests_enabled,
with_programming_language=self.with_programming_language,
with_shared=self.with_shared)
def get_projects(archived: Optional[bool] = None,
group_id: Optional[int] = None,
include_subgroups: Optional[bool] = None,
max_queryable_pages: Optional[int] = None,
membership: Optional[bool] = None,
min_access_level: Optional[int] = None,
order_by: Optional[str] = None,
owned: Optional[bool] = None,
page: Optional[int] = None,
per_page: Optional[int] = None,
search: Optional[str] = None,
simple: Optional[bool] = None,
sort: Optional[str] = None,
starred: Optional[bool] = None,
statistics: Optional[bool] = None,
visibility: Optional[str] = None,
with_custom_attributes: Optional[bool] = None,
with_issues_enabled: Optional[bool] = None,
with_merge_requests_enabled: Optional[bool] = None,
with_programming_language: Optional[str] = None,
with_shared: Optional[bool] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetProjectsResult:
"""
## # gitlab\_projects
Provide details about a list of projects in the Gitlab provider. Listing all projects and group projects with [project filtering](https://docs.gitlab.com/ee/api/projects.html#list-user-projects) or [group project filtering](https://docs.gitlab.com/ee/api/groups.html#list-a-groups-projects) is supported.
> **NOTE**: This data source supports all available filters exposed by the `xanzy/go-gitlab` package, which might not expose all available filters exposed by the Gitlab APIs.
## Example Usage
### List projects within a group tree
```python
import pulumi
import pulumi_gitlab as gitlab
mygroup = gitlab.get_group(full_path="mygroup")
group_projects = gitlab.get_projects(group_id=mygroup.id,
order_by="name",
include_subgroups=True,
with_shared=False)
```
### List projects using the search syntax
```python
import pulumi
import pulumi_gitlab as gitlab
projects = gitlab.get_projects(search="postgresql",
visibility="private")
```
:param bool archived: Limit by archived status.
:param int group_id: The ID of the group owned by the authenticated user to look projects for within. Cannot be used with `min_access_level`, `with_programming_language` or `statistics`.
:param bool include_subgroups: Include projects in subgroups of this group. Default is `false`. Needs `group_id`.
:param int max_queryable_pages: Prevents overloading your Gitlab instance in case of a misconfiguration. Default is `10`.
:param bool membership: Limit by projects that the current user is a member of.
:param int min_access_level: Limit to projects where current user has at least this access level, refer to the [official documentation](https://docs.gitlab.com/ee/api/members.html) for values. Cannot be used with `group_id`.
:param str order_by: Return projects ordered by `id`, `name`, `path`, `created_at`, `updated_at`, or `last_activity_at` fields. Default is `created_at`.
:param bool owned: Limit by projects owned by the current user.
:param str search: Return list of authorized projects matching the search criteria.
:param bool simple: Return only the ID, URL, name, and path of each project.
:param str sort: Return projects sorted in `asc` or `desc` order. Default is `desc`.
:param bool starred: Limit by projects starred by the current user.
:param bool statistics: Include project statistics. Cannot be used with `group_id`.
:param str visibility: Limit by visibility `public`, `internal`, or `private`.
:param bool with_custom_attributes: Include custom attributes in response _(admins only)_.
:param bool with_issues_enabled: Limit by projects with issues feature enabled. Default is `false`.
:param bool with_merge_requests_enabled: Limit by projects with merge requests feature enabled. Default is `false`.
:param str with_programming_language: Limit by projects which use the given programming language. Cannot be used with `group_id`.
:param bool with_shared: Include projects shared to this group. Default is `true`. Needs `group_id`.
"""
__args__ = dict()
__args__['archived'] = archived
__args__['groupId'] = group_id
__args__['includeSubgroups'] = include_subgroups
__args__['maxQueryablePages'] = max_queryable_pages
__args__['membership'] = membership
__args__['minAccessLevel'] = min_access_level
__args__['orderBy'] = order_by
__args__['owned'] = owned
__args__['page'] = page
__args__['perPage'] = per_page
__args__['search'] = search
__args__['simple'] = simple
__args__['sort'] = sort
__args__['starred'] = starred
__args__['statistics'] = statistics
__args__['visibility'] = visibility
__args__['withCustomAttributes'] = with_custom_attributes
__args__['withIssuesEnabled'] = with_issues_enabled
__args__['withMergeRequestsEnabled'] = with_merge_requests_enabled
__args__['withProgrammingLanguage'] = with_programming_language
__args__['withShared'] = with_shared
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('gitlab:index/getProjects:getProjects', __args__, opts=opts, typ=GetProjectsResult).value
return AwaitableGetProjectsResult(
archived=__ret__.archived,
group_id=__ret__.group_id,
id=__ret__.id,
include_subgroups=__ret__.include_subgroups,
max_queryable_pages=__ret__.max_queryable_pages,
membership=__ret__.membership,
min_access_level=__ret__.min_access_level,
order_by=__ret__.order_by,
owned=__ret__.owned,
page=__ret__.page,
per_page=__ret__.per_page,
projects=__ret__.projects,
search=__ret__.search,
simple=__ret__.simple,
sort=__ret__.sort,
starred=__ret__.starred,
statistics=__ret__.statistics,
visibility=__ret__.visibility,
with_custom_attributes=__ret__.with_custom_attributes,
with_issues_enabled=__ret__.with_issues_enabled,
with_merge_requests_enabled=__ret__.with_merge_requests_enabled,
with_programming_language=__ret__.with_programming_language,
with_shared=__ret__.with_shared)
| 44.485333 | 448 | 0.679775 |
652cdf394a98184c0dc1b68da27fb1fd32326b30 | 1,640 | py | Python | api/app/resources/bookings/appointment/appointment_detail.py | pixelater/queue-management | 9881505d4af2b9860aeaf76b9572315dd016c7dc | [
"Apache-2.0"
] | null | null | null | api/app/resources/bookings/appointment/appointment_detail.py | pixelater/queue-management | 9881505d4af2b9860aeaf76b9572315dd016c7dc | [
"Apache-2.0"
] | 1 | 2019-02-26T00:27:31.000Z | 2019-02-26T00:27:31.000Z | api/app/resources/bookings/appointment/appointment_detail.py | pixelater/queue-management | 9881505d4af2b9860aeaf76b9572315dd016c7dc | [
"Apache-2.0"
] | null | null | null | '''Copyright 2018 Province of British Columbia
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.'''
import logging
from sqlalchemy import exc
from flask import abort, g
from flask_restplus import Resource
from app.models.bookings import Appointment
from app.models.theq import CSR
from app.schemas.bookings import AppointmentSchema
from qsystem import api, oidc
@api.route("/appointments/<int:id>", methods=["GET"])
class AppointmentDetail(Resource):
appointment_schema = AppointmentSchema()
@oidc.accept_token(require_token=True)
def get(self, id):
csr = CSR.find_by_username(g.oidc_token_info['username'])
try:
appointment = Appointment.query.filter_by(appointment_id=id)\
.filter_by(office_id=csr.office_id)\
.first_or_404()
result = self.appointment_schema.dump(appointment)
return {"appointment": result.data,
"errors": result.errors}, 200
except exc.SQLAlchemyError as error:
logging.error(error, exc_info=True)
return {"message": "API is down"}, 500
| 34.166667 | 79 | 0.695122 |
0974d3861ed2e306d10f2b1da01323cd6961cc84 | 9,078 | py | Python | controller.py | liwangGT/crazyflie-tools | ed3ca66d4efd77c7abb321e6f8deeb564da45862 | [
"MIT"
] | 73 | 2015-03-11T05:32:13.000Z | 2022-03-25T23:30:17.000Z | controller.py | liwangGT/crazyflie-tools | ed3ca66d4efd77c7abb321e6f8deeb564da45862 | [
"MIT"
] | 18 | 2015-01-19T20:23:36.000Z | 2017-02-14T21:03:07.000Z | controller.py | blandry/drake-crazyflie-tools | ed3ca66d4efd77c7abb321e6f8deeb564da45862 | [
"MIT"
] | 49 | 2015-06-19T03:16:10.000Z | 2021-01-13T08:56:02.000Z | import lcm
import math
import numpy as np
from crazyflie_t import crazyflie_input_t, crazyflie_controller_commands_t, crazyflie_hover_commands_t, crazyflie_positioninput_t
from threading import Thread
GO_TO_START = True
XHAT_START = [0, -0.61, 0.01, 0, 0, 0, 0, 0, 0, 0, 0, 0]
NOMINAL_W2 = 16.3683
XHAT_DESIRED = [0, -0.61, 0.50, 0, 0, 0, 0, 0, 0, 0, 0, 0]
ROLL_KP = 3.5*180/math.pi
PITCH_KP = 3.5*180/math.pi
YAW_KP = 0
ROLL_RATE_KP = 70*180/math.pi
PITCH_RATE_KP = 70*180/math.pi
YAW_RATE_KP = 50*180/math.pi
K32bits = np.array([[0,0,0,0,PITCH_KP,YAW_KP,0,0,0,0,PITCH_RATE_KP,YAW_RATE_KP],
[0,0,0,ROLL_KP,0,-YAW_KP,0,0,0,ROLL_RATE_KP,0,-YAW_RATE_KP],
[0,0,0,0,-PITCH_KP,YAW_KP,0,0,0,0,-PITCH_RATE_KP,YAW_RATE_KP],
[0,0,0,-ROLL_KP,0,-YAW_KP,0,0,0,-ROLL_RATE_KP,0,-YAW_RATE_KP]])
ROLL_KP = .7
PITCH_KP = .7
YAW_KP = 0
ROLL_RATE_KP = .8
PITCH_RATE_KP = .8
YAW_RATE_KP = .6
Komegasqu = np.array([[0,0,0,0,PITCH_KP,YAW_KP,0,0,0,0,PITCH_RATE_KP,YAW_RATE_KP],
[0,0,0,ROLL_KP,0,-YAW_KP,0,0,0,ROLL_RATE_KP,0,-YAW_RATE_KP],
[0,0,0,0,-PITCH_KP,YAW_KP,0,0,0,0,-PITCH_RATE_KP,YAW_RATE_KP],
[0,0,0,-ROLL_KP,0,-YAW_KP,0,0,0,-ROLL_RATE_KP,0,-YAW_RATE_KP]])
Ktilqr = np.array([[ 7.1623, -0.0000, -15.8114, 0.0000, 6.3078, 11.1803, 2.0167, -0.0000, -7.2476, 0.0000, 1.0742, 3.3139],
[ 0.0000, -7.1623, -15.8114, 6.3078, 0.0000, -11.1803, 0.0000, -2.0167, -7.2476, 1.0742, -0.0000, -3.3139],
[-7.1623, -0.0000, -15.8114, 0.0000, -6.3078, 11.1803, -2.0167, -0.0000, -7.2476, 0.0000, -1.0742, 3.3139],
[-0.0000, 7.1623, -15.8114, -6.3078, -0.0000, -11.1803, 0.0000, 2.0167, -7.2476, -1.0742, 0.0000, -3.3139]])
# This is Ben's hand-tuned K matrix. TILQR from drake with position cost increased.
Kpostilqr = np.array([
[-0.0000, 3.6690, -0.0000, -0.5318, -0.0000, 0.0000, -0.0000, 0.2581, -0.0000, -0.0079, -0.0000, 0.0000],
[-3.6690, 0.0000, 0.0000, -0.0000, -0.5318, -0.0000, -0.2581, 0.0000, -0.0000, -0.0000, -0.0079, 0.0000],
[0.0000, 0.0000, 0.0000, -0.0000, 0.0000, -1.2046, 0.0000, 0.0000, 0.0000, -0.0000, 0.0000, -0.1210],
[-0.0000, 12.2322, -0.0000, -10.6368, -0.0000, 0.0000, -0.0000, 5.1625, -0.0000, -0.1579, -0.0000, 0.0000],
[-12.2322, 0.0000, 0.0000, -0.0000, -10.6368, -0.0000, -5.1625, 0.0000, -0.0000, -0.0000, -0.1579, 0.0000],
[0.0000, 0.0000, 0.0000, -0.0000, 0.0000, -17.2087, 0.0000, 0.0000, 0.0000, -0.0000, 0.0000, -1.7292],
[-0.0000, 0.0000, -37.4166, -0.0000, -0.0000, 0.0000, -0.0000, 0.0000, -6.3931, -0.0000, -0.0000, 0.0000],
])
# Input mode in the Crazyflie
MODES = {
'32bits': 1,
'omegasqu': 2,
'onboardpd': 3,
}
class Controller():
def __init__(self, control_input_type='32bits', listen_to_lcm=False, control_input_updated_flag=None,
listen_to_extra_input=False, publish_to_lcm=False, pos_control=False):
self._pos_control = pos_control
self._go_to_start = GO_TO_START
self._is_running = True
Thread(target=self._controller_watchdog).start()
self._hover = False
self._reset_xhat_desired = False
self._xhat_desired = np.array(XHAT_DESIRED).transpose()
Thread(target=self._hover_watchdog).start()
self._K = {'32bits': K32bits, 'omegasqu': Komegasqu, 'tilqr': Ktilqr, 'postilqr': Kpostilqr}
if self._pos_control:
self._latest_control_input = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
else:
self._latest_control_input = [0.0, 0.0, 0.0, 0.0, 0.0, MODES.get(control_input_type,1)]
self._control_input_type = control_input_type
self._control_input_updated_flag = control_input_updated_flag
self._listen_to_lcm = listen_to_lcm
if listen_to_lcm:
publish_to_lcm = False
Thread(target=self._control_input_listener).start()
self._publish_to_lcm = publish_to_lcm
if publish_to_lcm:
self._control_input_lc = lcm.LCM()
self._listen_to_extra_input = listen_to_extra_input
if listen_to_extra_input:
self._extra_control_input = [0.0, 0.0, 0.0, 0.0, 0.0, MODES.get(self._control_input_type,1)]
Thread(target=self._extra_input_thread).start()
def get_control_input(self, xhat=None):
if self._pos_control:
if not self._is_running:
return [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
if self._listen_to_lcm or not xhat:
control_input = list(self._latest_control_input)
else:
control_input = np.dot(self._K.get('postilqr'),np.array(xhat).transpose()-(self._xhat_desired+np.array([self._extra_control_input[0], self._extra_control_input[1], 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]))).tolist()
control_input[6] += NOMINAL_W2 - 15.0
if self._reset_xhat_desired:
if self._go_to_start:
self._xhat_desired = np.array(XHAT_START).transpose()
self._go_to_start = False
else:
self._xhat_desired = np.array([xhat[0], xhat[1], xhat[2], 0, 0, 0, 0, 0, 0, 0, 0, 0]).transpose()
self._reset_xhat_desired = False
if self._hover:
control_input = np.dot(self._K.get('postilqr'),np.array(xhat).transpose()-self._xhat_desired).tolist()
control_input[6] += NOMINAL_W2 - 15.0
if self._listen_to_extra_input:
control_input[6] += self._extra_control_input[4]
if self._publish_to_lcm:
msg = crazyflie_positioninput_t()
msg.input = control_input
self._control_input_lc.publish('crazyflie_input', msg.encode())
return control_input
if not self._is_running:
return [0.0, 0.0, 0.0, 0.0, 0.0, MODES.get(self._control_input_type,1)]
if self._listen_to_lcm or not xhat:
control_input = list(self._latest_control_input)
else:
thrust_input = np.dot(self._K.get(self._control_input_type),np.array(xhat).transpose()-self._xhat_desired).tolist()
control_input = thrust_input + [0.0, MODES.get(self._control_input_type,1)]
if self._reset_xhat_desired:
if self._go_to_start:
self._xhat_desired = np.array(XHAT_START).transpose()
self._go_to_start = False
else:
self._xhat_desired = np.array([xhat[0], xhat[1], xhat[2], 0, 0, 0, 0, 0, 0, 0, 0, 0]).transpose()
self._reset_xhat_desired = False
if self._hover:
xhat_error = np.array(xhat).transpose()-self._xhat_desired
thrust_input = np.dot(self._K.get('tilqr'),xhat_error).tolist()
thrust_input[0] += NOMINAL_W2 - 15
thrust_input[1] += NOMINAL_W2 - 15
thrust_input[2] += NOMINAL_W2 - 15
thrust_input[3] += NOMINAL_W2 - 15
control_input = thrust_input + [0.0, MODES.get('omegasqu',2)]
if self._listen_to_extra_input:
assert control_input[5] == self._extra_control_input[5], 'The extra input is not of the right type'
control_input[0] += self._extra_control_input[0]
control_input[1] += self._extra_control_input[1]
control_input[2] += self._extra_control_input[2]
control_input[3] += self._extra_control_input[3]
control_input[4] += self._extra_control_input[4]
if self._publish_to_lcm:
msg = crazyflie_input_t()
msg.input = control_input[0:4]
msg.offset = control_input[4]
msg.type = self._control_input_type
self._control_input_lc.publish('crazyflie_input', msg.encode())
return control_input
def _control_input_listener(self):
_control_input_listener_lc = lcm.LCM()
_control_input_listener_lc.subscribe('crazyflie_input',self._update_control_input)
while True:
_control_input_listener_lc.handle()
def _update_control_input(self, channel, data):
if self._pos_control:
msg = crazyflie_positioninput_t.decode(data)
self._latest_control_input = list(msg.input)
else:
msg = crazyflie_input_t.decode(data)
self._latest_control_input = list(msg.input) + [msg.offset, MODES.get(msg.type,1)]
self._control_input_type = msg.type
if self._control_input_updated_flag:
self._control_input_updated_flag.set()
def _extra_input_thread(self):
_extra_input_lc = lcm.LCM()
_extra_input_lc.subscribe('crazyflie_extra_input',self._update_extra_input)
while True:
_extra_input_lc.handle()
def _update_extra_input(self, channel, data):
msg = crazyflie_input_t.decode(data)
self._extra_control_input = list(msg.input) + [msg.offset, MODES.get(msg.type,1)]
def _controller_watchdog(self):
_watchdog_lc = lcm.LCM()
_watchdog_lc.subscribe('crazyflie_controller_commands',self._controller_watchdog_update)
while True:
_watchdog_lc.handle()
def _controller_watchdog_update(self, channel, data):
msg = crazyflie_controller_commands_t.decode(data)
self._is_running = msg.is_running
def _hover_watchdog(self):
_hover_lc = lcm.LCM()
_hover_lc.subscribe('crazyflie_hover_commands',self._hover_watchdog_update)
while True:
_hover_lc.handle()
def _hover_watchdog_update(self, channel, data):
msg = crazyflie_hover_commands_t.decode(data)
if not(self._hover) and msg.hover:
self._reset_xhat_desired = True
self._hover = msg.hover | 40.526786 | 209 | 0.673056 |
2ba9f1c22f3ab70db0ece0669de90e2fb97bb833 | 910 | py | Python | wsgi.py | kemball/gallertoid | 8f42ce2220e98034b18fec9f64575b7e0675a66a | [
"Apache-2.0"
] | null | null | null | wsgi.py | kemball/gallertoid | 8f42ce2220e98034b18fec9f64575b7e0675a66a | [
"Apache-2.0"
] | null | null | null | wsgi.py | kemball/gallertoid | 8f42ce2220e98034b18fec9f64575b7e0675a66a | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python
import os
#virtenv = os.environ['OPENSHIFT_PYTHON_DIR'] + '/virtenv/'
virtenv = os.path.join(os.environ.get('OPENSHIFT_PYTHON_DIR','.'),'virtenv')
virtualenv = os.path.join(virtenv, 'bin/activate_this.py')
try:
execfile(virtualenv, dict(__file__=virtualenv))
except IOError:
pass
#
# IMPORTANT: Put any additional includes below this line. If placed above this
# line, it's possible required libraries won't be in your searchable path
#
from gallertoid import app as application
#
# Below for testing only
#
if __name__ == '__main__':
SQL_ALCHEMY_DATABASE_URI= "mysql://adminsX4kPYt:lmX8jFYZpEVj@localhost/gallertoid"
#application.run(debug=True)
from wsgiref.simple_server import make_server
httpd = make_server('localhost', 8051, application)
# Wait for a single request, serve it and quit.
print "Server started, request away"
httpd.serve_forever()
| 31.37931 | 86 | 0.742857 |
a26deeedfb5962af8bf1c925e3f36d5fdd10468e | 298 | py | Python | yt_aspect/api.py | chrishavlin/yt_aspect | 01173da347fcef4abf201f2da3c8233b37f23892 | [
"MIT"
] | 1 | 2021-12-10T20:39:13.000Z | 2021-12-10T20:39:13.000Z | yt_aspect/api.py | data-exp-lab/yt_aspect | 01173da347fcef4abf201f2da3c8233b37f23892 | [
"MIT"
] | 6 | 2021-09-07T19:54:16.000Z | 2021-09-16T20:18:51.000Z | yt_aspect/api.py | chrishavlin/yt_aspect | 01173da347fcef4abf201f2da3c8233b37f23892 | [
"MIT"
] | 1 | 2021-09-15T21:55:57.000Z | 2021-09-15T21:55:57.000Z | from .data_structures import ASPECTDataset # noqa: F401; noqa: F401
from .data_structures import ASPECTUnstructuredIndex # noqa: F401
from .data_structures import ASPECTUnstructuredMesh # noqa: F401
from .fields import ASPECTFieldInfo # noqa: F401
from .io import IOHandlerASPECT # noqa: F401
| 49.666667 | 68 | 0.802013 |
8d9928517430bf12db93876f97f5bcb221cb1f7a | 723 | py | Python | imbo/__main__.py | imneonizer/bounding-box | 62e77fb5ce74dbb0dcb0c102e49083921de6050e | [
"BSD-3-Clause"
] | 6 | 2020-05-03T13:06:26.000Z | 2020-08-17T11:59:22.000Z | imbo/__main__.py | imneonizer/bounding-box | 62e77fb5ce74dbb0dcb0c102e49083921de6050e | [
"BSD-3-Clause"
] | null | null | null | imbo/__main__.py | imneonizer/bounding-box | 62e77fb5ce74dbb0dcb0c102e49083921de6050e | [
"BSD-3-Clause"
] | null | null | null | import os
import argparse
import shutil
abspath = lambda file_name: os.path.join(
os.path.dirname(os.path.realpath(__file__)), os.path.join("fonts", file_name))
# create fonts directory in module location
if not os.path.exists(abspath("")):
os.makedirs(abspath(""))
# construct the command line arguments
ap = argparse.ArgumentParser()
ap.add_argument("-u", "--upload", required=True,
help="path to .ttf file")
args = vars(ap.parse_args())
def upload_font(font_path):
assert font_path.endswith(
".ttf"), "invalid file name, only .ttf files are accepted"
shutil.copy(font_path, abspath(os.path.basename(font_path)))
print(">> upload successful")
upload_font(args["upload"])
| 27.807692 | 82 | 0.699862 |
bd04418a7cf4cdac7b85aca7ad8e7295ca6f3beb | 196 | py | Python | rssant/allauth_providers/github/provider.py | zuzhi/rssant | 06d985845f6af3be7097e6d718afba7eeb195ec8 | [
"BSD-3-Clause"
] | 1,176 | 2019-12-24T01:51:22.000Z | 2022-03-29T06:00:25.000Z | rssant/allauth_providers/github/provider.py | zuzhi/rssant | 06d985845f6af3be7097e6d718afba7eeb195ec8 | [
"BSD-3-Clause"
] | 33 | 2020-03-06T03:29:46.000Z | 2022-03-11T06:24:26.000Z | rssant/allauth_providers/github/provider.py | zuzhi/rssant | 06d985845f6af3be7097e6d718afba7eeb195ec8 | [
"BSD-3-Clause"
] | 110 | 2019-12-29T05:49:24.000Z | 2022-03-28T06:44:21.000Z | from allauth.socialaccount.providers.github.provider import GitHubProvider
class RssantGitHubProvider(GitHubProvider):
"""RssantGitHubProvider"""
provider_classes = [RssantGitHubProvider]
| 21.777778 | 74 | 0.826531 |
275d2b61baa39313058c18250db16000b08ea70e | 10,803 | py | Python | Lenia-taichi.py | metachow/Taichi_Lenia | 32be8db10ac9fd3c24cb3732c39683af93fd287d | [
"MIT"
] | 5 | 2022-01-10T10:10:43.000Z | 2022-01-25T12:09:55.000Z | Lenia-taichi.py | metachow/Taichi_Lenia | 32be8db10ac9fd3c24cb3732c39683af93fd287d | [
"MIT"
] | null | null | null | Lenia-taichi.py | metachow/Taichi_Lenia | 32be8db10ac9fd3c24cb3732c39683af93fd287d | [
"MIT"
] | null | null | null | import taichi as ti
import time
from taichi.ui import canvas
# ti.init(arch=ti.cpu)
ti.init(arch=ti.cuda)
# color map is copy from: https://forum.taichi.graphics/t/vortex-method-demo/775
class ColorMap:
def __init__(self, h, wl, wr, c):
self.h = h
self.wl = wl
self.wr = wr
self.c = c
@ti.func
def clamp(self, x):
return max(0.0, min(1.0, x))
@ti.func
def map(self, x):
w = 0.0
if x < self.c:
w = self.wl
else:
w = self.wr
return self.clamp((w - abs(self.clamp(x) - self.c)) / w * self.h)
jetR = ColorMap(1.5, .37, .37, .75)
jetG = ColorMap(1.5, .37, .37, .5)
jetB = ColorMap(1.5, .37, .37, .25)
@ti.func
def color_map(c):
return ti.Vector([jetR.map(c), jetG.map(c), jetB.map(c)])
@ti.data_oriented
class Taichi_Lenia:
def __init__(self,
res,
scatter,
conv_r,
time,
miu,
sig,
kr=1,
kb=ti.Vector([1])):
#Display
self.res = res
self.scatter = scatter
self.step = 1
self.paused = True
self.time = time
self.dt = 1 / self.time
self.max_conv_r = 30
self.conv_r = conv_r
self.kernel_alpha = 4.0
self.kernel_rank = kr
self.grow_miu = ti.field(ti.f32, ())
self.grow_sig = ti.field(ti.f32, ())
self.grow_miu[None] = miu
self.grow_sig[None] = sig
self.total = ti.field(ti.f32, ())
self.brush = ti.field(ti.f32, ())
self.brush[None] = 0.03
self.kernel_beta = ti.field(ti.f32, self.kernel_rank)
# self.kernel_beta = ti.Vector([1/2, 1, 1/3])
self.kernel_beta = kb
self.world_old = ti.field(ti.f32, (self.res, self.res))
self.world_save = ti.field(ti.f32, (self.res, self.res))
self.world_new = ti.field(ti.f32, (self.res, self.res))
self.kernel = ti.field(ti.f32,
(2 * self.max_conv_r, 2 * self.max_conv_r))
self.pixels = ti.Vector.field(3, ti.f32,
(res * scatter, res * scatter))
self.cursor = ti.field(dtype=float, shape=2)
@ti.func
def clip(self, x, min, max):
return ti.min(ti.max(x, min), max)
@ti.func
def kernel_core(self, r):
return ti.exp(self.kernel_alpha - self.kernel_alpha / (4.0 * r *
(1.0 - r)))
@ti.func
def kernel_shell(self, x, y):
c = 0.0
center = ti.Vector([float(self.max_conv_r), float(self.max_conv_r)])
xy = ti.Vector([x, y])
# r = (xy - center).norm() / self.conv_r
r = self.clip((xy - center).norm() / self.conv_r, 0, 1)
br = r * self.kernel_rank
for i in ti.static(range(self.kernel_rank)):
if ti.floor(br) == i:
c = self.clip(
(r < 1) * self.kernel_beta[i] * self.kernel_core(br - i),
0.0, 1.0)
return c
@ti.kernel
def kernel_build(self):
self.total[None] = 0.0
for i, j in ti.ndrange(2 * self.max_conv_r, 2 * self.max_conv_r):
self.kernel[i, j] += self.kernel_shell(float(i), float(j))
self.total[None] += self.kernel_shell(float(i), float(j))
# print("kernel building completed!,kernel total is {}".format(self.total[None]))
@ti.kernel
def kernel_norm(self, mat: ti.template()):
for i, j in ti.ndrange(2 * self.max_conv_r, 2 * self.max_conv_r):
mat[i, j] /= self.total[None]
self.total[None] = 0.0
for i, j in ti.ndrange(2 * self.max_conv_r, 2 * self.max_conv_r):
self.total[None] += mat[i, j]
print("kernel normalization:{}".format(self.total[None]))
@ti.func
def growth_mapping(self, u):
return 2.0 * ti.exp(-(u - self.grow_miu[None])**2.0 /
(2.0 * (self.grow_sig[None]**2.0))) - 1.0
@ti.kernel
def world_conv(self):
for i, j in ti.ndrange(self.res, self.res):
tmp = 0.0
for m, n in ti.ndrange(2 * self.max_conv_r, 2 * self.max_conv_r):
tmp += self.kernel[m, n] * self.world_old[
(i - self.max_conv_r + m) % self.res,
(j - self.max_conv_r + n) % self.res]
self.world_new[i, j] = self.growth_mapping(tmp)
@ti.kernel
def world_update(self):
for i, j in ti.ndrange(self.res, self.res):
t = 0.0
t = self.clip(
self.dt * self.world_new[i, j] + self.world_old[i, j], 0.0,
1.0)
self.world_new[i, j] = t
@ti.kernel
def world_init(self):
for i, j in ti.ndrange(self.res, self.res):
# if i < 127 and j < 127 :
# self.world_old[i, j] = self.clip(ti.random(), 0.2, 0.9)
# else:
# self.world_old[i, j] = 0
self.world_old[i, j] = 0
@ti.kernel
def render(self):
for i, j in ti.ndrange(self.res, self.res):
for k, l in ti.ndrange(self.scatter, self.scatter):
self.pixels[i * self.scatter + k, j * self.scatter +
l] = color_map(self.world_old[i, j])
# self.pixels[i*self.scatter+k, j*self.scatter+l] = self.get_color(self.world_old[i, j], 0.0, 1.0)
# self.pixels[i*self.scatter+k, j*self.scatter+l] = ti.Vector([self.world_old[i, j],self.world_old[i, j],self.world_old[i, j]])
@ti.kernel
def draw(self):
center = ti.Vector([self.cursor[0], self.cursor[1]])
for i, j in ti.ndrange(self.res, self.res):
dis = (ti.Vector([i, j]) / self.res - center).norm()
if dis < self.brush[None]:
self.world_old[i, j] = self.clip(ti.random(), 0.2, 0.8)
@ti.kernel
def erase(self):
center = ti.Vector([self.cursor[0], self.cursor[1]])
for i, j in ti.ndrange(self.res, self.res):
dis = (ti.Vector([i, j]) / self.res - center).norm()
if dis < self.brush[None]:
self.world_old[i, j] = 0.0
def save_world(self):
self.world_save.copy_from(self.world_old)
def load_world(self):
self.world_old.copy_from(self.world_save)
def init(self):
self.world_init()
self.kernel_build()
self.kernel_norm(self.kernel)
self.render()
print(
"Current parameter: kernel radius:{}, dt:{}, miu:{}, sig:{}, kr:{}, kb:{}"
.format(self.conv_r, self.dt, self.grow_miu[None],
self.grow_sig[None], self.kernel_rank, self.kernel_beta))
def update(self):
for i in range(self.step):
self.world_conv()
self.world_update()
self.world_old.copy_from(self.world_new)
# print(self.world_old)
# self.paused = not self.paused
self.render()
if __name__ == "__main__":
res = 256
scatter = 4
window = ti.ui.Window("Taichi-Lenia", (res * scatter, res * scatter))
canvas = window.get_canvas()
lenia = Taichi_Lenia(res=res,
scatter=scatter,
conv_r=20,
time=10,
miu=0.15,
sig=0.016,
kr=1,
kb=ti.Vector([1]))
lenia.init()
while window.running:
for e in window.get_events(ti.ui.PRESS):
if e.key in [ti.ui.ESCAPE]:
exit()
elif e.key == ti.ui.SPACE:
lenia.paused = not lenia.paused
print('Pause state:{}'.format(lenia.paused))
elif e.key == 'r':
lenia.init()
print("Reset world")
elif e.key == 's':
lenia.save_world()
print("Saved current world")
lenia.render()
elif e.key == 'l':
lenia.load_world()
print("Loaded saved world")
lenia.render()
if window.is_pressed(ti.ui.LMB):
lenia.cursor[1] = window.get_cursor_pos()[1]
lenia.cursor[0] = window.get_cursor_pos()[0]
lenia.draw()
lenia.render()
elif window.is_pressed(ti.ui.RMB):
lenia.cursor[1] = window.get_cursor_pos()[1]
lenia.cursor[0] = window.get_cursor_pos()[0]
lenia.erase()
lenia.render()
canvas.set_image(lenia.pixels)
window.GUI.begin("Taichi Lenia", 0.01, 0.01, 0.6, 0.15)
window.GUI.text("LB press: draw, RB press clear")
window.GUI.text("r : Reset, SPACE : pause")
window.GUI.text("S : save, L : load")
# lenia.conv_r = window.GUI.slider(
# "Convolution kernel radius",
# lenia.conv_r, 5, 40
# )
# lenia.time = window.GUI.slider(
# "time step",
# lenia.time, 1, 20
# )
lenia.grow_miu[None] = window.GUI.slider_float("Growth function miu",
lenia.grow_miu[None],
0.01, 0.30)
lenia.grow_sig[None] = window.GUI.slider_float("Growth function sigma",
lenia.grow_sig[None],
0.001, 0.0300)
lenia.brush[None] = window.GUI.slider_float("Brush radius",
lenia.brush[None], 0.01,
0.06)
window.GUI.end()
if not lenia.paused:
lenia.update()
window.show()
# gui = ti.GUI("Lenia", (res*scatter, res*scatter))
# lenia = Taichi_Lenia(res=res, scatter=scatter, conv_r=13)
# lenia.init()
# while gui.running:
# # for i in range(1000):
# for e in gui.get_events(ti.GUI.PRESS):
# if e.key == ti.GUI.ESCAPE:
# exit()
# elif e.key == ti.GUI.SPACE:
# lenia.paused = not lenia.paused
# print('state:{}'.format(lenia.paused))
# elif e.key == 'r':
# print('Reset')
# lenia.init()
# gui.get_event()
# if(gui.is_pressed(ti.GUI.LMB)):
# lenia.cursor[1]=gui.get_cursor_pos()[1]
# lenia.cursor[0]=gui.get_cursor_pos()[0]
# lenia.draw()
# lenia.render()
# gui.set_image(lenia.pixels)
# if not lenia.paused:
# lenia.update()
# gui.show()
| 32.441441 | 143 | 0.493381 |
3e66c34b0f6eab0a6715188c860cee3816f01970 | 699 | py | Python | dash_docs/chapters/dash_core_components/Slider/examples/slider_drag.py | houtanb/dash-docs | daf5f555117ff5ba53d7d5161c5f08e8c270cad9 | [
"MIT"
] | 379 | 2017-06-21T14:35:52.000Z | 2022-03-20T01:47:14.000Z | dash_docs/chapters/dash_core_components/Slider/examples/slider_drag.py | houtanb/dash-docs | daf5f555117ff5ba53d7d5161c5f08e8c270cad9 | [
"MIT"
] | 746 | 2017-06-21T19:58:17.000Z | 2022-03-23T14:51:24.000Z | dash_docs/chapters/dash_core_components/Slider/examples/slider_drag.py | houtanb/dash-docs | daf5f555117ff5ba53d7d5161c5f08e8c270cad9 | [
"MIT"
] | 201 | 2017-06-21T21:53:19.000Z | 2022-03-17T13:23:55.000Z | import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output
import dash
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
app.layout = html.Div([
dcc.Slider(id='slider-drag'),
html.Div(id='slider-drag-output', style={'margin-top': 20})
])
@app.callback(Output('slider-drag-output', 'children'),
[Input('slider-drag', 'drag_value'), Input('slider-drag', 'value')])
def display_value(drag_value, value):
return 'drag_value: {} | value: {}'.format(drag_value, value)
if __name__ == '__main__':
app.run_server(debug=True)
| 29.125 | 82 | 0.715308 |
7b49805c83a685e9fb669f4b32a310c867420dd2 | 26,868 | py | Python | apply_kernels_MLPs.py | Jeffrey-Ede/Denoising-Kernels-MLPs-Autoencoders | 9fd2cba5245b1d984023332bc06b2d9da2c27959 | [
"MIT"
] | 1 | 2020-02-28T12:46:05.000Z | 2020-02-28T12:46:05.000Z | apply_kernels_MLPs.py | Jeffrey-Ede/Denoising-Kernels-MLPs-Autoencoders | 9fd2cba5245b1d984023332bc06b2d9da2c27959 | [
"MIT"
] | null | null | null | apply_kernels_MLPs.py | Jeffrey-Ede/Denoising-Kernels-MLPs-Autoencoders | 9fd2cba5245b1d984023332bc06b2d9da2c27959 | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
import argparse
import numpy as np
import tensorflow as tf
import cv2
from scipy.misc import imread
import time
import os, random
from PIL import Image
import functools
import itertools
import collections
import six
from tensorflow.python.platform import tf_logging as logging
from tensorflow.core.framework import node_def_pb2
from tensorflow.python.framework import device as pydev
from tensorflow.python.training import basic_session_run_hooks
from tensorflow.python.training import session_run_hook
from tensorflow.python.training import training_util
from tensorflow.python.training import device_setter
from tensorflow.contrib.learn.python.learn import run_config
slim = tf.contrib.slim
tf.logging.set_verbosity(tf.logging.DEBUG)
"""
Inference script for the various STEM and TEM autoencoders
TEM and STEM autoencoders are trained for middle-convolution feature space sizes of 1, 4 and 16
Miscellaneous functions for parsing data are also included in this directory
The class-based autoencoder interface supports inference from 160x160 crops or can be
applied to an entire image. Functionality for whole-image applications is included in the class
"""
data_dir = "G:/unaltered_TEM_crops-171x171/"
#data_dir = "G:/unaltered_STEM_crops-171x171/"
modelSavePeriod = 2 #Train timestep in hours
modelSavePeriod *= 3600 #Convert to s
model_dir = "G:/noise-removal-kernels-TEM/autoencoder/16/"
#model_dir = "G:/noise-removal-kernels-STEM/depth1/"
shuffle_buffer_size = 5000
num_parallel_calls = 4
num_parallel_readers = 4
prefetch_buffer_size = 5
batch_size = 1
num_gpus = 1
#batch_size = 8 #Batch size to use during training
num_epochs = 1 #Dataset repeats indefinitely
logDir = "C:/dump/train/"
log_file = model_dir+"log.txt"
val_log_file = model_dir+"val_log.txt"
variables_file = model_dir+"variables.txt"
log_every = 1 #Log every _ examples
channels = 1 #Greyscale input image
#hparams = experiment_hparams(train_batch_size=batch_size, eval_batch_size=16)
num_workers = 1
increase_batch_size_by_factor = 1
effective_batch_size = increase_batch_size_by_factor*batch_size
val_skip_n = 10
save_result_every_n_batches = 50000
def architectures(inputs, inputs_truth, depths, widths):
cropsize = widths[0]
def pad(tensor, size):
d1_pad = size[0]
d2_pad = size[1]
paddings = tf.constant([[0, 0], [d1_pad, d1_pad], [d2_pad, d2_pad], [0, 0]], dtype=tf.int32)
padded = tf.pad(tensor, paddings, mode="REFLECT")
return padded
def make_layer(size, type):
if type == 'biases':
init = np.array([0.], dtype=np.float32)
if type == 'weights':
init = np.array([1./(size*size)], dtype=np.float32)
#print("Init: {}".format(init))
printij = False #Set to true to debug
if printij:
print("\nStart:")
variables = [[None for _ in range(size)] for _ in range(size)]
if printij:
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
#for i in range(size):
# for j in range(size):
# variables[i][j] = tf.get_variable('i-{}_j-{}'.format(i,j), dtype=tf.float32, initializer=init, trainable=True)
offset = size//2
for x in range(size//2+1):
for y in range(x+1):
with tf.variable_scope("var_x-{}_y-{}".format(x, y), reuse=False) as scope:
i, j = offset+x, offset+y
variables[i][j] = tf.get_variable('v', dtype=tf.float32, initializer=init, trainable=True)
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
if x > 0:
if y == 0:
i, j = offset-x, offset
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset, offset+x
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset, offset-x
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
elif y == x:
i, j = offset+x, offset-y
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset-x, offset+y
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset-x, offset-y
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
elif y != x:
i, j = offset-x, offset+y
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset+x, offset-y
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset-x, offset-y
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset+y, offset+x
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset-y, offset+x
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset+y, offset-x
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
i, j = offset-y, offset-x
scope.reuse_variables()
variables[i][j] = tf.get_variable(name='v')
if printij:
print(i,j,x,y,variables[i][j].name)
for i in range(3):
for j in range(3):
if variables[i][j]:
print(i, j, variables[i][j].name)
else:
print(i,j)
print("\n")
#print(len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="depth-1_size-3/var_x-{}_y-{}".format(x, y))))
#print(variables)
if printij:
for i in range(3):
for j in range(3):
print(i, j, variables[i][j].name)
concats = []
for i in range(size):
concats.append(tf.concat(variables[i][:], axis=0))
kernel = tf.stack(concats, axis=1)
kernel = tf.expand_dims(kernel, axis=0)
kernel = tf.expand_dims(kernel, axis=3)
#kernel = tf.reshape(kernel, [-1, size, size, 1])
#print(kernel)
return kernel
#depths = [1]
#widths = [3]
#depths = [i for i in range(1, 6)]
#widths = [3, 5, 7, 9, 13, 17]
filters = []
filter_scopes = []
filter_depths = []
filter_widths = []
outputs = []
losses = []
ps = []
for depth in depths:
print("Depth: {}".format(depth))
for width in widths:
print("Width: {}".format(width))
default_scope = "depth-{}_size-{}".format(depth, width)
#Filter creation
def filter_fn(input):
with tf.variable_scope('w0'):
filter = make_layer(width, 'weights')*input
for i in range(1, depth):
with tf.variable_scope('b'+str(i)):
filter += make_layer(width, 'biases')
filter = tf.sigmoid(filter)
filter = tf.contrib.layers.fully_connected(
inputs=filter,
num_outputs=1,
activation_fn=None,
weights_initializer=None,
biases_initializer=None)
with tf.variable_scope('w'+str(i)):
filter = make_layer(width, 'weights')*filter
output = tf.reduce_sum(tf.reduce_sum(tf.reduce_sum(filter, axis=1), axis=1), axis=1)
return output
filters.append(filter_fn)
filter_scopes.append(default_scope)
filter_depths.append(depth)
filter_widths.append(width)
padded = pad(inputs, (0, 0))
#Generate outputs
output = [[None for _ in range(cropsize-width+1)] for _ in range(cropsize-width+1)]
for x in range(cropsize-width+1):
for y in range(cropsize-width+1):
if not x+y:
with tf.variable_scope(default_scope) as filter_scope:
_x = x+width
_y = y+width
output[x][y] = filter_fn(padded[:, x:_x, y:_y, :])
else:
with tf.variable_scope(filter_scope, reuse=True) as filter_scope:
_x = x+width
_y = y+width
output[x][y] = filter_fn(padded[:, x:_x, y:_y, :])
#print(len(tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="depth-1_size-3")))
concats = []
for i in range(cropsize-width+1):
concats.append(tf.stack(output[i][:], axis=1))
output = tf.stack(concats, axis=2)
output = tf.expand_dims(output, 3)
padded_truth = pad(inputs_truth, (0, 0))
p = padded_truth[:, (width//2):(cropsize-width//2), (width//2):(cropsize-width//2), :]
loss = tf.losses.mean_squared_error(output, p)#tf.reduce_mean(tf.abs(output-p))
loss = tf.cond(loss > 1., lambda: tf.sqrt(loss), lambda: loss)
outputs.append(output)
losses.append(loss)
return filters, filter_scopes, filter_depths, filter_widths, outputs, losses, p, padded_truth
def experiment(img, img_truth, depths, widths):
filters, filter_scopes, filter_depths, filter_widths, \
outputs, losses, truth, padded_truth = architectures(img, img_truth, depths, widths)
return {'filters': filters, 'filter_scopes': filter_scopes, 'filter_depths': filter_depths,
'filter_widths': filter_widths, 'outputs': outputs,
'losses': losses, 'truth': [truth], 'padded_truth': [padded_truth]}
def flip_rotate(img):
"""Applies a random flip || rotation to the image, possibly leaving it unchanged"""
choice = np.random.randint(0, 8)
if choice == 0:
return img
if choice == 1:
return np.rot90(img, 1)
if choice == 2:
return np.rot90(img, 2)
if choice == 3:
return np.rot90(img, 3)
if choice == 4:
return np.flip(img, 0)
if choice == 5:
return np.flip(img, 1)
if choice == 6:
return np.flip(np.rot90(img, 1), 0)
if choice == 7:
return np.flip(np.rot90(img, 1), 1)
def load_image(addr, resize_size=None, img_type=np.float32):
"""Read an image and make sure it is of the correct type. Optionally resize it"""
try:
img = imread(addr, mode='F')
x = np.random.randint(0, img.shape[0]-cropsize)
y = np.random.randint(0, img.shape[1]-cropsize)
img = img[x:(x+cropsize),y:(y+cropsize)]
except:
img = 0.5*np.ones((cropsize,cropsize))
print("Image read failed")
return img.astype(img_type)
def scale0to1(img):
"""Rescale image between 0 and 1"""
min = np.min(img)
max = np.max(img)
if min == max:
img.fill(0.5)
else:
img = (img-min) / (max-min)
return img.astype(np.float32)
def norm_img(img):
min = np.min(img)
max = np.max(img)
if min == max:
img.fill(0.)
else:
a = 0.5*(min+max)
b = 0.5*(max-min)
img = (img-a) / b
return img.astype(np.float32)
def record_parser(record, cropsize):
img = preprocess(flip_rotate(load_image(record)))
if np.sum(np.isfinite(img)) != cropsize**2:
img = np.ones((cropsize, cropsize), dtype=np.float32)
return img
def disp(img):
d = int(round(np.sqrt(img.size)))
cv2.namedWindow('CV_Window', cv2.WINDOW_NORMAL)
cv2.imshow('CV_Window', scale0to1(img.reshape((d,d))))
cv2.waitKey(0)
return
class RunConfig(tf.contrib.learn.RunConfig):
def uid(self, whitelist=None):
"""
Generates a 'Unique Identifier' based on all internal fields.
Caller should use the uid string to check `RunConfig` instance integrity
in one session use, but should not rely on the implementation details, which
is subject to change.
Args:
whitelist: A list of the string names of the properties uid should not
include. If `None`, defaults to `_DEFAULT_UID_WHITE_LIST`, which
includes most properties user allowes to change.
Returns:
A uid string.
"""
if whitelist is None:
whitelist = run_config._DEFAULT_UID_WHITE_LIST
state = {k: v for k, v in self.__dict__.items() if not k.startswith('__')}
# Pop out the keys in whitelist.
for k in whitelist:
state.pop('_' + k, None)
ordered_state = collections.OrderedDict(
sorted(state.items(), key=lambda t: t[0]))
# For class instance without __repr__, some special cares are required.
# Otherwise, the object address will be used.
if '_cluster_spec' in ordered_state:
ordered_state['_cluster_spec'] = collections.OrderedDict(
sorted(ordered_state['_cluster_spec'].as_dict().items(), key=lambda t: t[0]))
return ', '.join(
'%s=%r' % (k, v) for (k, v) in six.iteritems(ordered_state))
class Kernels_and_MLPS(object):
'''Class to access all the TEM and STEM autoencoders'''
def __init__(self,
ckpt_loc="G:/noise-removal-kernels-TEM/results/1/model/",
visible_cuda=None,
depth=1,
width=3):
depths = [depth]
widths = [width]
cropsize = width
self.cropsize = cropsize
if visible_cuda:
os.environ["CUDA_VISIBLE_DEVICES"] = visible_cuda
# Session configuration.
log_device_placement = False #Once placement is correct, this fills up too much of the cmd window...
sess_config = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=log_device_placement,
intra_op_parallelism_threads=1,
gpu_options=tf.GPUOptions(force_gpu_compatible=True))
img_ph = [tf.placeholder(tf.float32, shape=(cropsize, cropsize, 1), name='img')]
img_truth_ph = [tf.placeholder(tf.float32, shape=(cropsize, cropsize, 1), name='img_truth')]
exp_dict = experiment(img_ph, img_truth_ph, depths, widths)
outputs = exp_dict['outputs']
sess = tf.Session(config=sess_config)
sess.run(tf.initialize_variables(tf.all_variables()))
#print(tf.all_variables())
saver = tf.train.Saver()
saver.restore(sess, tf.train.latest_checkpoint(ckpt_loc))
self.sess = sess
self.inputs = img_ph
self.outputs = outputs
def preprocess(self, img, pad_width=0):
img[np.isnan(img)] = 0.
img[np.isinf(img)] = 0.
#img = scale0to1(img)
#img /= np.mean(img)
img = np.pad(img, pad_width=pad_width, mode='reflect').reshape(
img.shape[0]+2*pad_width,img.shape[1]+2*pad_width,1)
return img.astype(np.float32)
def denoise_crop(self, crop):
"""
It is assumed that the crop is correctly scaled using the entire image
"""
cropsize = self.cropsize
pred = self.sess.run(self.outputs,
feed_dict={self.inputs[0]:
self.preprocess(crop)})
return pred[0]
def denoise(self, img, preprocess=True, postprocess=True, used_overlap=1):
"""
img: Image to denoise
preprocess: Remove nans and infs, make sure it has a (-1, 512, 512, 1) shape and
reflection pad it
postprocess: Clip output values to [0.0, 1.0] and reshape to (512, 512)
overlap: Amount of crop overlap. Predictions for the overlapping region won't be used
"""
cropsize = self.cropsize
overlap = used_overlap = 0
if overlap < used_overlap:
overlap = used_overlap
denoised = np.zeros((img.shape[0]+2*overlap, img.shape[1]+2*overlap))
contributions = np.zeros((img.shape[0]+2*overlap, img.shape[1]+2*overlap))
if preprocess:
img = self.preprocess(img, pad_width=cropsize//2)
offset = np.min(img)
if np.max(img) == offset:
img.fill(1.)
else:
scale = (np.mean(img) - offset)
img = (img-offset) / scale
l_ax1 = img.shape[0]
l_ax2 = img.shape[1]
len = 1
len0 = len1 = len
for x in range(0, l_ax1, len):
if l_ax1 - x <= cropsize:
x = l_ax1 - cropsize
for y in range(0, l_ax2, len):
if l_ax2 - y <= cropsize:
y = l_ax2 - cropsize
crop = img[x:(x+cropsize), y:(y+cropsize), :]
denoised[x, y] = self.denoise_crop(crop=crop)
crop = img[x:(x+cropsize), y:(y+cropsize), :]
denoised[x, y] = self.denoise_crop(crop=crop)
for y in range(0, l_ax2, len):
if l_ax2 - y <= cropsize:
y = l_ax2 - cropsize
crop = img[x:(x+cropsize), y:(y+cropsize), :]
denoised[x, y] = self.denoise_crop(crop=crop)
crop = img[x:(x+cropsize), y:(y+cropsize), :]
denoised[x, y] = self.denoise_crop(crop=crop)
if postprocess:
denoised = denoised*scale+offset if scale else denoised*offset/np.mean(pred)
return denoised
if __name__ == '__main__':
depth = 3
width = 7
num = 3
locs = ['G:/noise-removal-kernels-TEM+STEM/examples/orig/'+str(i)+'.tif' for i in range(1, 6)]
dst = 'G:/noise-removal-kernels-TEM+STEM/examples/kernels+mlps/'
ckpt_loc = 'G:/noise-removal-kernels-TEM+STEM/results/'+str(num)+'/model/'
nn = Kernels_and_MLPS(ckpt_loc=ckpt_loc,
visible_cuda='1',
depth=depth,
width=width)
for i, loc in enumerate(locs, 1):
img = imread(loc, mode='F')
img = img[:160, :160]
nn_img = nn.denoise(img)
#Image.fromarray(nn_img).save( dst+'d'+str(depth)+'w'+str(width)+'/'+str(i)+'.tif' )
| 37.368567 | 132 | 0.452285 |
7d988ef6693bea56dcb405ec83dd7821a50b5cd7 | 36,135 | py | Python | MetaScreener/external_sw/mgltools/lib/python2.7/site-packages/SimPy/SimPlot.py | bio-hpc/metascreener | 6900497629f601c4b6c0c37da26de58ffa221988 | [
"Apache-2.0"
] | 8 | 2021-12-14T21:30:01.000Z | 2022-02-14T11:30:03.000Z | MetaScreener/external_sw/mgltools/lib/python2.7/site-packages/SimPy/SimPlot.py | bio-hpc/metascreener | 6900497629f601c4b6c0c37da26de58ffa221988 | [
"Apache-2.0"
] | null | null | null | MetaScreener/external_sw/mgltools/lib/python2.7/site-packages/SimPy/SimPlot.py | bio-hpc/metascreener | 6900497629f601c4b6c0c37da26de58ffa221988 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# $Revision: 1.1.1.14 $ $Date: 2007/01/08 14:47:12 $ kgm
""" SimPlot 1.8 Provides basic plotting services based on Tk/Tkinter.
LICENSE:
Copyright (C) 2002, 2005,2006,2007 Klaus G. Muller, Tony Vignaux
mailto: kgmuller@xs4all.nl and Tony.Vignaux@vuw.ac.nz
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
END OF LICENSE
Derived from plotting package in Grayson's Tkinter book.
The idea to use this package came from Prof. Simon Frost
of U of California, San Diego who also strongly contributed
to the design and implementation of SimPlot.
Change history:
Nov 2, 2003 : Combined utils.py (also from Grayson) with plotting package.
Nov 11, 2003: Made totally OO
Dec 16, 2003: Completion of SimPlot 1.4alpha
Feb 2004: Release with SimPy 1.4
Aug 27, 2005: Added tests for empty point sets to plotXXX functions.
"""
__version__= '1.8 $Revision: 1.1.1.14 $ $Date: 2007/01/08 14:47:12 $'
from Tkinter import *
from Canvas import Line, CanvasText, Rectangle
from tkMessageBox import *
from tkSimpleDialog import askinteger,askstring,askfloat
from tkFileDialog import *
import string, math
from math import pi
from SimPy.Simulation import Monitor
def minCoordinate(clist):
if len(clist) < 2: return clist[0]
try:
x, y = clist[0]
for x1, y1 in clist[1:]:
if x1 <= x or y1 <= y:
x, y = x1, y1
except:
x, y = 0, 0
return x,y
def maxCoordinate(clist):
if len(clist) < 2: return clist[0]
try:
x, y = clist[0]
for x1, y1 in clist[1:]:
if x1 >= x or y1 >= y:
x, y = x1, y1
except:
x, y = 0, 0
return x,y
def minBound(clist):
x = 10000000
y = 10000000
for x1, y1 in clist:
if x1 < x: x = x1
if y1 < y: y = y1
return x,y
def maxBound(clist):
x = -10000000
y = -10000000
for x1, y1 in clist:
if x1 > x: x = x1
if y1 > y: y = y1
return x,y
class SimPlot(object):
def __init__(self,root=Tk()):
self.root=root
pass
def mainloop(self):
self.root.mainloop()
def makeLine(self,points,**attr):
return GraphLine(points, **attr)
def makeStep(self,points, **attr):
#convert data list to steps
step0=points[:]
step1=[[0,0]]*2*len(step0)
prev=[step0[0][0],0]
for x in range(len(step0)):
step1[2*x]=[step0[x][0],prev[1]]
step1[2*x+1]=step0[x]
prev=step0[x]
#draw the line
return self.makeLine(step1,smooth=False, **attr)
def makeHistogram(self,points,**attr):
"""Makes a histogram graph. 'points' must be a Histogram-like
object.
"""
#convert data list to bars
step0=points[:]
step1=[[0,0]]*3*len(step0)
prev=[step0[0][0],0]
for x in range(len(step0)):
step1[3*x]=[step0[x][0],prev[1]]
step1[3*x+1]=[step0[x][0],0.0]
step1[3*x+2]=step0[x]
prev=step0[x]
deltax=step0[1][0]-step0[0][0]
step1.append([prev[0]+deltax,prev[1]])
step1.append([prev[0]+deltax,0])
#make the line
return self.makeLine(step1,smooth=False,
xaxis=(step1[0][0],step1[-1][0]),
**attr)
def makeSymbols(self,points,**attr):
return GraphSymbols(points,**attr)
def makeBars(self,points,**attr):
return GraphBars(points,**attr)
def makeGraphObjects(self,objects):
return GraphObjects(objects)
def makeGraphBase(self, master, width, height,
background='white',title="",xtitle='',ytitle='', **kw):
return GraphBase(master, width, height,
background,title,xtitle,ytitle,**kw)
def graphMenu(self,root,graph):
"""To provide a File menu (postscript output, more to come)
to the plotxxxx plots"""
mainMenu=Menu(root)
root.config(menu=mainMenu)
def postscriptout():
graph.postscr()
file=Menu(mainMenu)
file.add_command(label="Postscript",command=postscriptout)
mainMenu.add_cascade(label='File',menu=file,underline=0)
def plotLine(self,points,windowsize=(500,300),title='',width=1,color='black',
smooth=0,background='white',xlab='x',ylab='y',
xaxis='automatic',yaxis='automatic'):
"""Generates a line chart, with menu to save as Postscript file.
'points' can be a Monitor instance.
"""
if points!=[]:
root=Toplevel()
f=Frame(root)
try: #if it is like a Monitor, take xlab,ylab from it
ylab=points.ylab
xlab=points.tlab
if not title: title=points.name
except:
pass
line=self.makeLine(points, width=width,color=color,smooth=smooth)
gr=self.makeGraphObjects([line])
graph = self.makeGraphBase(f, windowsize[0], windowsize[1],
title=title,xtitle=xlab,
ytitle=ylab,background=background)
graph.pack(side=LEFT, fill=BOTH, expand=YES)
graph.draw(gr,xaxis=xaxis, yaxis=yaxis)
#File menu
self.graphMenu(root,graph)
f.pack()
return graph
else:
print "SimPlot.plotline: dataset empty, no plot."
return None
def plotStep(self,points,windowsize=(500,300),title='',width=1,color='black',
background='white',xlab='x',ylab='y',
xaxis='automatic',yaxis='automatic'):
"""Generates a step chart, with menu to save as Postscript file.
'points' can be a Monitor instance.
"""
if points!=[]:
#convert data list to steps
step0=points[:]
step1=[[0,0]]*2*len(step0)
prev=[step0[0][0],0]
for x in range(len(step0)):
step1[2*x]=[step0[x][0],prev[1]]
step1[2*x+1]=step0[x]
prev=step0[x]
#treat monitor case
try: #if it is like a Monitor, take xlab,ylab from it
ylab=points.ylab
xlab=points.tlab
if not title: title=points.name
except:
pass
#draw the line
smooth=False
return self.plotLine(step1,windowsize,title,width,color,
smooth,background,xlab,ylab,
xaxis,yaxis)
else:
print "SimPlot.plotStep: dataset empty, no plot."
return None
def plotHistogram(self,points,windowsize=(500,300),title='',width=1,color='black',
background='white',xlab='x',ylab='y',
xaxis='automatic',yaxis='automatic'):
"""Makes a histogram plot. 'points' can be a Monitor instance.
"""
if points!=[]:
#convert data list to bars
step0=points[:]
step1=[[0,0]]*3*len(step0)
prev=[step0[0][0],0]
for x in range(len(step0)):
step1[3*x]=[step0[x][0],prev[1]]
step1[3*x+1]=[step0[x][0],0.0]
step1[3*x+2]=step0[x]
prev=step0[x]
deltax=step0[1][0]-step0[0][0]
step1.append([prev[0]+deltax,prev[1]])
step1.append([prev[0]+deltax,0])
#treat monitor case
try: #if it is like a Monitor, take xlab,ylab from it
ylab=points.ylab
xlab=points.tlab
if not title: title=points.name
except:
pass
#draw the line
smooth=False
return self.plotLine(step1,windowsize=windowsize,title=title,width=width,
color=color,smooth=smooth,background=background,
xlab=xlab,ylab=ylab,xaxis=(step1[0][0],step1[-1][0]),
yaxis=yaxis)
else:
print "SimPlot.plotHistogram: dataset empty, no plot."
return None
def plotBars(self,points,windowsize=(500,300),title='',color='black',
width=1,size=3,fillcolor='black',fillstyle='',
outline='black',background='white',xlab='x',ylab='y',
xaxis='automatic',yaxis='automatic',anchor=0.0):
"""Generates a bar chart, with menu to save as Postscript file.
'points' can be a Monitor instance.
"""
if points!=[]:
root=Toplevel()
f=Frame(root)
try: #if it is like a Monitor, take xlab,ylab from it
ylab=points.ylab
xlab=points.tlab
if not title: title=points.name
except:
pass
bars=self.makeBars(points, width=width,size=size,color=color,
fillcolor=fillcolor,fillstyle=fillstyle,
outline=outline,anchor=anchor)
gr=self.makeGraphObjects([bars])
graph = self.makeGraphBase(f, windowsize[0],windowsize[1],
title=title,xtitle=xlab,
ytitle=ylab,background=background)
graph.pack(side=LEFT, fill=BOTH, expand=YES)
graph.draw(gr,xaxis=xaxis, yaxis=yaxis)
#File menu
self.graphMenu(root,graph)
f.pack()
return graph
else:
print "SimPlot.plotBars dataset empty, no plot."
return None
def plotScatter(self,points,windowsize=(500,300),title='',width=1,color='black',
fillcolor='black',size=2,fillstyle='',
outline='black',marker='circle',
background='white',xlab='x',ylab='y',
xaxis='automatic',yaxis='automatic'):
if points!=[]:
root=Toplevel()
f=Frame(root)
try: #if it is like a Monitor, take xlab,ylab from it
ylab=points.ylab
xlab=points.tlab
if not title: title=points.name
except:
pass
scat=self.makeSymbols(points, width=width,color=color,size=size,
marker=marker,fillcolor=fillcolor,
fillstyle=fillstyle,outline=outline)
gr=self.makeGraphObjects([scat])
graph = self.makeGraphBase(f, windowsize[0],windowsize[1],
title=title,xtitle=xlab,
ytitle=ylab,background=background)
graph.pack(side=LEFT, fill=BOTH, expand=YES)
graph.draw(gr,xaxis=xaxis, yaxis=yaxis)
#File menu
self.graphMenu(root,graph)
f.pack()
return graph
else:
print "SimPlot.plotScatter: dataset empty, no plot."
return None
def mainloop(self):
self.root.mainloop()
class GraphPoints:
def __init__(self, points, attr):
self.points = points
self.scaled = self.points
self.attributes = {}
for name, value in self._attributes.items():
try:
value = attr[name]
except KeyError: pass
self.attributes[name] = value
def boundingBox(self):
return minBound(self.points), maxBound(self.points)
def fitToScale(self, scale=(1,1), shift=(0,0)):
self.scaled = []
for x,y in self.points:
self.scaled.append(((scale[0]*x)+shift[0],\
(scale[1]*y)+shift[1]))
self.attributes.get('anchor', 0.0)
self.anchor = scale[1]*self.attributes.get('anchor', 0.0)+\
shift[1]
class GraphLine(GraphPoints):
def __init__(self, points, **attr):
GraphPoints.__init__(self, points, attr)
_attributes = {'color': 'black',
'width': 1,
'smooth': 0,
'splinesteps': 12}
def draw(self, canvas):
color = self.attributes['color']
width = self.attributes['width']
smooth = self.attributes['smooth']
steps = self.attributes['splinesteps']
arguments = (canvas,)
if smooth:
for i in range(len(self.points)):
x1, y1 = self.scaled[i]
arguments = arguments + (x1, y1)
else:
for i in range(len(self.points)-1):
x1, y1 = self.scaled[i]
x2, y2 = self.scaled[i+1]
arguments = arguments + (x1, y1, x2, y2)
apply(Line, arguments, {'fill': color, 'width': width,
'smooth': smooth, 'splinesteps':steps})
class GraphSymbols(GraphPoints):
def __init__(self, points, **attr):
GraphPoints.__init__(self, points, attr)
_attributes = {'color': 'black',
'width': 1,
'fillcolor': 'black',
'size': 2,
'fillstyle': '',
'outline': 'black',
'marker': 'circle'}
def draw(self, canvas):
color = self.attributes['color']
size = self.attributes['size']
fillcolor = self.attributes['fillcolor']
marker = self.attributes['marker']
fillstyle = self.attributes['fillstyle']
self._drawmarkers(canvas, self.scaled, marker, color,
fillstyle, fillcolor, size)
def _drawmarkers(self, c, coords, marker='circle', color='black',
fillstyle='', fillcolor='', size=2):
l = []
f = eval('self._' +marker)
for xc, yc in coords:
id = f(c, xc, yc, outline=color, size=size,
fill=fillcolor, fillstyle=fillstyle)
if type(id) is type(()):
for item in id: l.append(item)
else:
l.append(id)
return l
def _circle(self, c, xc, yc, size=1, fill='', outline='black',
fillstyle=''):
id = c.create_oval(xc-0.5, yc-0.5, xc+0.5, yc+0.5,
fill=fill, outline=outline,
stipple=fillstyle)
c.scale(id, xc, yc, size*5, size*5)
return id
def _dot(self, c, xc, yc, size=1, fill='', outline='black',
fillstyle=''):
id = c.create_oval(xc-0.5, yc-0.5, xc+0.5, yc+0.5,
fill=fill, outline=outline,
stipple=fillstyle)
c.scale(id, xc, yc, size*2.5, size*2.5)
return id
def _square(self, c, xc, yc, size=1, fill='', outline='black',
fillstyle=''):
id = c.create_rectangle(xc-0.5, yc-0.5, xc+0.5, yc+0.5,
fill=fill, outline=outline,
stipple=fillstyle)
c.scale(id, xc, yc, size*5, size*5)
return id
def _triangle(self, c, xc, yc, size=1, fill='', outline='black',
fillstyle=''):
id = c.create_polygon(-0.5, 0.288675134595,
0.5, 0.288675134595,
0.0, -0.577350269189, fill=fill,
outline=outline, stipple=fillstyle)
c.move(id, xc, yc)
c.scale(id, xc, yc, size*5, size*5)
return id
def _triangle_down(self, c, xc, yc, size=1, fill='',
outline='black', fillstyle=''):
id = c.create_polygon(-0.5, -0.288675134595,
0.5, -0.288675134595,
0.0, 0.577350269189, fill=fill,
outline=outline, stipple=fillstyle)
c.move(id, xc, yc)
c.scale(id, xc, yc, size*5, size*5)
return id
def _cross(self, c, xc, yc, size=1, fill='black', outline=None,
fillstyle=''):
if outline: fill=outline
id1 = c.create_line(xc-0.5, yc-0.5, xc+0.5, yc+0.5,
fill=fill)
id2 = c.create_line(xc-0.5, yc+0.5, xc+0.5, yc-0.5,
fill=fill)
c.scale(id1, xc, yc, size*5, size*5)
c.scale(id2, xc, yc, size*5, size*5)
return id1, id2
def _plus(self, c, xc, yc, size=1, fill='black', outline=None,
fillstyle=''):
if outline: fill=outline
id1 = c.create_line(xc-0.5, yc, xc+0.5, yc, fill=fill)
id2 = c.create_line(xc, yc+0.5, xc, yc-0.5, fill=fill)
c.scale(id1, xc, yc, size*5, size*5)
c.scale(id2, xc, yc, size*5, size*5)
return id1, id2
class GraphBars(GraphPoints):
def __init__(self, points, **attr):
GraphPoints.__init__(self, points, attr)
_attributes = {'color': 'black',
'width': 1,
'fillcolor': 'black',
'size': 3,
'fillstyle': '',
'outline': 'black'}
def draw(self, canvas):
color = self.attributes['color']
width = self.attributes['width']
fillstyle = self.attributes['fillstyle']
outline = self.attributes['outline']
spread = self.attributes['size']
arguments = (canvas,)
p1, p2 = self.boundingBox()
for i in range(len(self.points)):
x1, y1 = self.scaled[i]
canvas.create_rectangle(x1-spread, y1, x1+spread,
self.anchor, fill=color,
width=width, outline=outline,
stipple=fillstyle)
class GraphObjects:
def __init__(self, objects):
self.objects = objects
def boundingBox(self):
c1, c2 = self.objects[0].boundingBox()
for object in self.objects[1:]:
c1o, c2o = object.boundingBox()
c1 = minBound([c1, c1o])
c2 = maxBound([c2, c2o])
return c1, c2
def fitToScale(self, scale=(1,1), shift=(0,0)):
for object in self.objects:
object.fitToScale(scale, shift)
def draw(self, canvas):
for object in self.objects:
object.draw(canvas)
class GraphBase(Frame):
def __init__(self, master, width, height,
background='white',title="",xtitle='',ytitle='', **kw):
apply(Frame.__init__, (self, master), kw)
self.title=title
self.xtitle=xtitle
self.ytitle=ytitle
self.canvas = Canvas(self, width=width, height=height,
background=background)
self.canvas.pack(fill=BOTH, expand=YES)
border_w = self.canvas.winfo_reqwidth() - \
string.atoi(self.canvas.cget('width'))
border_h = self.canvas.winfo_reqheight() - \
string.atoi(self.canvas.cget('height'))
self.border = (border_w, border_h)
self.canvas.bind('<Configure>', self.configure)
self.plotarea_size = [None, None]
self._setsize()
self.last_drawn = None
self.font = ('Verdana', 10)
def configure(self, event):
new_width = event.width-self.border[0]
new_height = event.height-self.border[1]
width = string.atoi(self.canvas.cget('width'))
height = string.atoi(self.canvas.cget('height'))
if new_width == width and new_height == height:
return
self.canvas.configure(width=new_width, height=new_height)
self._setsize()
self.clear()
self.replot()
def bind(self, *args):
apply(self.canvas.bind, args)
def _setsize(self):
self.width = string.atoi(self.canvas.cget('width'))
self.height = string.atoi(self.canvas.cget('height'))
#self.plotarea_size[0] = 0.90 * self.width
#self.plotarea_size[1] = 0.90 * -self.height
self.plotarea_size[0] = 0.90 * self.width
self.plotarea_size[1] = 0.90 * -self.height
xo = 0.5*(self.width-self.plotarea_size[0])
yo = self.height-0.5*(self.height+self.plotarea_size[1])
self.plotarea_origin = (xo, yo)
def draw(self, graphics, xaxis = 'automatic', yaxis = 'automatic'):
self.last_drawn = (graphics, xaxis, yaxis)
p1, p2 = graphics.boundingBox()
xaxis = self._axisInterval(xaxis, p1[0], p2[0])
yaxis = self._axisInterval(yaxis, p1[1], p2[1])
text_width = [0., 0.]
text_height = [0., 0.]
if xaxis is not None:
p1 = xaxis[0], p1[1]
p2 = xaxis[1], p2[1]
xticks = self._ticks(xaxis[0], xaxis[1])
bb = self._textBoundingBox(xticks[0][1])
text_height[1] = bb[3]-bb[1]
text_width[0] = 0.5*(bb[2]-bb[0])
bb = self._textBoundingBox(xticks[-1][1])
text_width[1] = 0.5*(bb[2]-bb[0])
else:
xticks = None
if yaxis is not None:
p1 = p1[0], yaxis[0]
p2 = p2[0], yaxis[1]
yticks = self._ticks(yaxis[0], yaxis[1])
for y in yticks:
bb = self._textBoundingBox(y[1])
w = bb[2]-bb[0]
text_width[0] = max(text_width[0], w)
h = 0.5*(bb[3]-bb[1])
text_height[0] = h
text_height[1] = max(text_height[1], h)
else:
yticks = None
text1 = [text_width[0], -text_height[1]]
text2 = [text_width[1], -text_height[0]]
scale = ((self.plotarea_size[0]-text1[0]-text2[0]) / \
(p2[0]-p1[0]),
(self.plotarea_size[1]-text1[1]-text2[1]) / \
(p2[1]-p1[1]))
shift = ((-p1[0]*scale[0]) + self.plotarea_origin[0] + \
text1[0],
(-p1[1]*scale[1]) + self.plotarea_origin[1] + \
text1[1])
self._drawAxes(self.canvas, xaxis, yaxis, p1, p2,
scale, shift, xticks, yticks)
graphics.fitToScale(scale, shift)
graphics.draw(self.canvas)
def _axisInterval(self, spec, lower, upper):
if spec is None:
return None
if spec == 'minimal':
if lower == upper:
return lower-0.5, upper+0.5
else:
return lower, upper
if spec == 'automatic':
range = upper-lower
if range == 0.:
return lower-0.5, upper+0.5
log = math.log10(range)
power = math.floor(log)
fraction = log-power
if fraction <= 0.05:
power = power-1
grid = 10.**power
lower = lower - lower % grid
mod = upper % grid
if mod != 0:
upper = upper - mod + grid
return lower, upper
if type(spec) == type(()):
lower, upper = spec
if lower <= upper:
return lower, upper
else:
return upper, lower
raise ValueError, str(spec) + ': illegal axis specification'
def _drawAxes(self, canvas, xaxis, yaxis,
bb1, bb2, scale, shift, xticks, yticks):
dict = {'anchor': N, 'fill': 'black'}
if self.font is not None:
dict['font'] = self.font
if xaxis is not None:
#draw x-axis
lower, upper = xaxis
text = 1
once=1
for y, d in [(bb1[1], -3), (bb2[1], 3)]:
#d=.5 of tick-length
p1 = (scale[0]*lower)+shift[0], (scale[1]*y)+shift[1]
if once: pp1=p1
p2 = (scale[0]*upper)+shift[0], (scale[1]*y)+shift[1]
if once: pp2=p2
once = 0
Line(self.canvas, p1[0], p1[1], p2[0], p2[1],
fill = 'black', width = 1)
if xticks:
for x, label in xticks:
p = (scale[0]*x)+shift[0], \
(scale[1]*y)+shift[1]
Line(self.canvas, p[0], p[1], p[0], p[1]+d,
fill = 'black', width = 1)
if text:
dict['text'] = label
apply(CanvasText, (self.canvas, p[0],
p[1]+2), dict) ##KGM 14 Aug 03
text = 0
#write x-axis title
CanvasText(self.canvas,(pp2[0]-pp1[0])/2.+pp1[0],pp1[1]+22,text=self.xtitle)
#write graph title
CanvasText(self.canvas,(pp2[0]-pp1[0])/2.+pp1[0],7,text=self.title)
dict['anchor'] = E
if yaxis is not None:
#draw y-axis
lower, upper = yaxis
text = 1
once = 1
for x, d in [(bb1[0], -3), (bb2[0], 3)]:
p1 = (scale[0]*x)+shift[0], (scale[1]*lower)+shift[1]
p2 = (scale[0]*x)+shift[0], (scale[1]*upper)+shift[1]
if once: pp1=p1 ;pp2=p2
once = 0
Line(self.canvas, p1[0], p1[1], p2[0], p2[1],
fill = 'black', width = 1)
if yticks:
for y, label in yticks:
p = (scale[0]*x)+shift[0], \
(scale[1]*y)+shift[1]
Line(self.canvas, p[0], p[1], p[0]-d, p[1],
fill = 'black', width = 1)
if text:
dict['text'] = label
apply(CanvasText,(self.canvas,
p[0]-4,p[1]+2), dict)
text = 0
#write y-axis title
CanvasText(self.canvas,pp2[0],pp2[1] - 10,text=self.ytitle)
def _ticks(self, lower, upper):
ideal = (upper-lower)/7.
log = math.log10(ideal)
power = math.floor(log)
fraction = log-power
factor = 1.
error = fraction
for f, lf in self._multiples:
e = math.fabs(fraction-lf)
if e < error:
error = e
factor = f
grid = factor * 10.**power
if power > 3 or power < -3:
format = '%+7.0e'
elif power >= 0:
digits = max(1, int(power))
format = '%' + `digits`+'.0f'
else:
digits = -int(power)
format = '%'+`digits+2`+'.'+`digits`+'f'
ticks = []
t = -grid*math.floor(-lower/grid)
while t <= upper and len(ticks) < 200:
ticks.append((t, format % (t,)))
t = t + grid
return ticks
_multiples = [(2., math.log10(2.)), (5., math.log10(5.))]
def _textBoundingBox(self, text):
bg = self.canvas.cget('background')
dict = {'anchor': NW, 'text': text, 'fill': bg}
if self.font is not None:
dict['font'] = self.font
item = apply(CanvasText, (self.canvas, 0., 0.), dict)
bb = self.canvas.bbox(item)
self.canvas.delete(item)
return bb
def replot(self):
if self.last_drawn is not None:
apply(self.draw, self.last_drawn)
def clear(self):
self.canvas.delete('all')
def postscr(self,filename=None):
"""Write to Postscript file given by 'filename'. If none provided,
ask user.
"""
from tkFileDialog import asksaveasfilename
if not filename:
filename=asksaveasfilename()
if filename:
if not filename[-3:] == '.ps':
filename+=".ps"
self.canvas.postscript(width=self.width,height=self.height,file=filename)
class TextBox(Frame):
def __init__(self, master, width, height,
background='white',boxtext='', **kw):
apply(Frame.__init__, (self, master), kw)
self.width=width
self.height=height
self.canvas=Canvas(self, width=width, height=height,
background=background)
self.canvas.pack(fill=BOTH, expand=YES)
#CanvasText(self.canvas,text=boxtext)
def postscr(self):
#select output file
#from tkFileDialog import asksaveasfilename
filename=asksaveasfilename()
if filename:
if not filename[-3:] == '.ps':
filename+=".ps"
self.canvas.postscript(width=self.width,height=self.height,file=filename)
if __name__ == '__main__':
print "SimPlot.py %s"%__version__
root=Tk()
plt=SimPlot()
root.title('SimPlot example - First frame')
root1 = Tk()
root1.title('SimPlot example - Second frame')
"""PARAMETER DEFAULTS:
GraphBase
---------
background='white',
title="",
xtitle='',
ytitle=''
GraphBase.draw
--------------
xaxis = 'automatic',
yaxis = 'automatic')
GraphLine
---------
color: 'black',
width: 1,
smooth: 0,
splinesteps: 12
GraphSymbols:
-------------
color: 'black',
width: 1,
fillcolor: 'black',
size: 2,
fillstyle: '',
outline: 'black',
marker: 'circle'}
GraphBars
---------
color: 'black',
width: 1,
fillcolor: 'black',
size: 3,
fillstyle: '',
outline: 'black'
"""
# Plot 1 -- smooth line + filled bars
di = 5.0*pi/40.
data = []
for i in range(40):
data.append((float(i)*di,
(math.sin(float(i)*di)-math.cos(float(i)*di))))
line1 = plt.makeLine(data, color='black', width=1,
smooth=1)
line1a = plt.makeBars(data[1:], color='blue', fillstyle='gray25',
anchor=0.0)
graphObject=plt.makeGraphObjects([line1a,line1])
#Second panel -- Narrow bars
line2 = plt.makeBars([(0,0),(1,145),(2,-90),(3,147),(4,22),(5,31),
(6,77),(7,125),(8,220),(9,550),(10,560),(11,0)],
outline='green',color='red', size=7)
graphObject2=plt.makeGraphObjects([line2])
# Third plot -- Smooth line and unsmoothed line
line3 = plt.makeLine([(1,145+100),(2,151+100),(3,147+100),(4,22+100),(5,31+100),
(6,77+100),(7,125+100),(8,220+100),(9,550+100),(10,560+100)],
color='blue', width=2, smooth=1)
line3a = plt.makeLine([(1,145),(2,151),(3,147),(4,22),(5,31),
(6,77),(7,125),(8,220),(9,550),(10,560)],
color='green', width=2, smooth=0)
line3b = plt.makeStep([(1,145+100),(2,151+100),(3,147+100),(4,22+100),(5,31+100),
(6,77+100),(7,125+100),(8,220+100),(9,550+100),(10,560+100)],
color='red', width=2)
graphObject3 = plt.makeGraphObjects([line3, line3a, line3b])
# Fourth plot -- lines with all available symbols with different
# outline colors/fill colors/sizes
line4 = plt.makeSymbols([(1,100),(2,100),(3,100),(4,100),(5,100),
(6,100),(7,100),(8,100),(9,100),(10,100)],
color='black',fillcolor='red', width=2,marker='triangle')
line5 = plt.makeSymbols([(1,200),(2,200),(3,200),(4,200),(5,200),
(6,200),(7,200),(8,200),(9,200),(10,200)],
color='red', width=2,marker='circle')
line6 = plt.makeSymbols([(1,300),(2,300),(3,300),(4,300),(5,300),
(6,300),(7,300),(8,300),(9,300),(10,300)],
color='green', width=2,marker='dot')
line7 = plt.makeSymbols([(1,400),(2,400),(3,400),(4,400),(5,400),
(6,400),(7,400),(8,400),(9,400),(10,400)],
color='blue', fillcolor='white',
size=2, width=2,marker='square')
line8 = plt.makeSymbols([(1,500),(2,500),(3,500),(4,500),(5,500),
(6,500),(7,500),(8,500),(9,500),(10,500)],
color='yellow', width=2,marker='triangle')
line9 = plt.makeSymbols([(1,600),(2,600),(3,600),(4,600),(5,600),
(6,600),(7,600),(8,600),(9,600),(10,600)],
color='magenta', width=2,marker='cross')
line10 = plt.makeSymbols([(1,700),(2,700),(3,700),(4,700),(5,700),
(6,700),(7,700),(8,700),(9,700),(10,700)],
color='brown', width=2,marker='plus')
line11 = plt.makeSymbols([(1,800),(2,800),(3,800),(4,800),(5,800),
(6,800),(7,800),(8,800),(9,800),(10,800)],
color='black', fillcolor='orange',
width=2,marker='triangle_down')
graphObject4 = GraphObjects([line4, line5, line6, line7, line8,
line9, line10, line11])
# Two panels
f1 = Frame(root)
f2 = Frame(root1)
graph={}
# Plots 1 and 2 in panel f1, side by side
graph[1]= plt.makeGraphBase(f1, 500, 300, title="Plot 1: 1 makeLine call, 1 makeBars call",
xtitle='the x-axis',ytitle='the y-axis')
graph[1].pack(side=LEFT, fill=BOTH, expand=YES)
graph[1].draw(graphObject,xaxis='minimal', yaxis='minimal')
graph[2] = plt.makeGraphBase(f1, 500, 300,title="Plot 2: 1 makeBars call",
xtitle='time',ytitle='pulse [volt]')
# Set side-by-side plots
graph[2].pack(side=LEFT, fill=BOTH, expand=YES)
graph[2].draw(graphObject2, 'minimal', 'automatic')
# Pack panel 1 to make it visible
f1.pack()
# Plots 2 and 3 in panel f2, one under the other
graph[3] = plt.makeGraphBase(f2, 500, 300,
title="Plot 3: 2 makeLine call (smooth, not smooth); 1 makeStep call")
graph[3].pack(side=TOP, fill=BOTH, expand=YES)
graph[3].draw(graphObject3)
graph[4] = plt.makeGraphBase(f2, 500, 300, border=3,title="Plot 4: 8 makeSymbols calls")
# Set one-over-other configuration of plots
graph[4].pack(side=TOP, fill=BOTH, expand=YES)
graph[4].draw(graphObject4)
# Pack panel 2 to make it visible
f2.pack()
# Save graph[1] to Postscript file (user selects filename)
graph[1].postscr()
# end plotting stuff
#### Very Important -- get Tk going by starting event loop
plt.mainloop()
| 38.647059 | 96 | 0.49697 |
e2ae90cc58fe9ca7752520a4daedce7ae2561b04 | 5,365 | py | Python | build/lib.linux-x86_64-2.7/pysmt/walkers/dag.py | aman-goel/pysmt | fc92e716b4b1973d978df73b4cea42131b3bd7cd | [
"Apache-2.0"
] | null | null | null | build/lib.linux-x86_64-2.7/pysmt/walkers/dag.py | aman-goel/pysmt | fc92e716b4b1973d978df73b4cea42131b3bd7cd | [
"Apache-2.0"
] | null | null | null | build/lib.linux-x86_64-2.7/pysmt/walkers/dag.py | aman-goel/pysmt | fc92e716b4b1973d978df73b4cea42131b3bd7cd | [
"Apache-2.0"
] | null | null | null | #
# This file is part of pySMT.
#
# Copyright 2014 Andrea Micheli and Marco Gario
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from pysmt.walkers.tree import Walker
class DagWalker(Walker):
"""DagWalker treats the formula as a DAG and performs memoization of the
intermediate results.
This should be used when the result of applying the function to a
formula is always the same, independently of where the formula has
been found; examples include substitution and solving.
Due to memoization, a few more things need to be taken into
account when using the DagWalker.
:func _get_key needs to be defined if additional arguments via
keywords need to be shared. This function should return the key to
be used in memoization. See substituter for an example.
"""
def __init__(self, env=None, invalidate_memoization=False):
"""The flag ``invalidate_memoization`` can be used to clear the cache
after the walk has been completed: the cache is one-time use.
"""
Walker.__init__(self, env)
self.memoization = {}
self.invalidate_memoization = invalidate_memoization
self.stack = []
return
def _get_children(self, formula):
return formula.args()
def _push_with_children_to_stack(self, formula, **kwargs):
"""Add children to the stack."""
self.stack.append((True, formula))
for s in self._get_children(formula):
# Add only if not memoized already
key = self._get_key(s, **kwargs)
if key not in self.memoization:
self.stack.append((False, s))
def _compute_node_result(self, formula, **kwargs):
"""Apply function to the node and memoize the result.
Note: This function assumes that the results for the children
are already available.
"""
key = self._get_key(formula, **kwargs)
if key not in self.memoization:
try:
f = self.functions[formula.node_type()]
except KeyError:
f = self.walk_error
args = [self.memoization[self._get_key(s, **kwargs)] \
for s in self._get_children(formula)]
self.memoization[key] = f(formula, args=args, **kwargs)
else:
pass
def _process_stack(self, **kwargs):
"""Empties the stack by processing every node in it.
Processing is performed in two steps.
1- A node is expanded and all its children are push to the stack
2- Once all children have been processed, the result for the node
is computed and memoized.
"""
while self.stack:
(was_expanded, formula) = self.stack.pop()
if was_expanded:
self._compute_node_result(formula, **kwargs)
else:
self._push_with_children_to_stack(formula, **kwargs)
def iter_walk(self, formula, **kwargs):
"""Performs an iterative walk of the DAG"""
self.stack.append((False, formula))
self._process_stack(**kwargs)
res_key = self._get_key(formula, **kwargs)
return self.memoization[res_key]
def walk(self, formula, **kwargs):
if formula in self.memoization:
return self.memoization[formula]
res = self.iter_walk(formula, **kwargs)
if self.invalidate_memoization:
self.memoization.clear()
return res
def _get_key(self, formula, **kwargs):
if not kwargs:
return formula
raise NotImplementedError("DagWalker should redefine '_get_key'" +
" when using keywords arguments")
def walk_true(self, formula, args, **kwargs):
#pylint: disable=unused-argument
""" Returns True, independently from the children's value."""
return True
def walk_false(self, formula, args, **kwargs):
#pylint: disable=unused-argument
""" Returns False, independently from the children's value."""
return False
def walk_none(self, formula, args, **kwargs):
#pylint: disable=unused-argument
""" Returns None, independently from the children's value."""
return None
def walk_identity(self, formula, **kwargs):
#pylint: disable=unused-argument
""" Returns formula, independently from the childrens's value."""
return formula
def walk_any(self, formula, args, **kwargs):
#pylint: disable=unused-argument
""" Returns True if any of the children returned True. """
return any(args)
def walk_all(self, formula, args, **kwargs):
#pylint: disable=unused-argument
""" Returns True if all the children returned True. """
return all(args)
# EOC DagWalker
| 35.766667 | 77 | 0.637465 |
3381436449d7859a57172b0cc57d7a61fa7e8a91 | 10,545 | py | Python | contextvars/tests/test_basics.py | smurfix/micropython-lib | 70d90f753633c0cef6ad0adb4c97e3af79cb2c59 | [
"PSF-2.0"
] | null | null | null | contextvars/tests/test_basics.py | smurfix/micropython-lib | 70d90f753633c0cef6ad0adb4c97e3af79cb2c59 | [
"PSF-2.0"
] | null | null | null | contextvars/tests/test_basics.py | smurfix/micropython-lib | 70d90f753633c0cef6ad0adb4c97e3af79cb2c59 | [
"PSF-2.0"
] | null | null | null | # Tests are copied from cpython/Lib/test/test_context.py
# License: PSFL
# Copyright: 2018 Python Software Foundation
import concurrent.futures
import functools
import random
import time
import unittest
import contextvars
def isolated_context(func):
"""Needed to make reftracking test mode work."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
ctx = contextvars.Context()
return ctx.run(func, *args, **kwargs)
return wrapper
class ContextTest(unittest.TestCase):
def test_context_var_new_1(self):
with self.assertRaises(TypeError):
contextvars.ContextVar()
with self.assertRaisesRegex(TypeError, 'must be a str'):
contextvars.ContextVar(1)
c = contextvars.ContextVar('a')
self.assertNotEqual(hash(c), hash('a'))
@isolated_context
def test_context_var_repr_1(self):
c = contextvars.ContextVar('a')
self.assertIn('a', repr(c))
c = contextvars.ContextVar('a', default=123)
self.assertIn('123', repr(c))
if False: # requires recursion-safe repr()
lst = []
c = contextvars.ContextVar('a', default=lst)
lst.append(c)
self.assertIs(c.default,lst)
self.assertIn('...', repr(c))
self.assertIn('...', repr(lst))
t = c.set(1)
self.assertIn(repr(c), repr(t))
self.assertNotIn(' used ', repr(t))
c.reset(t)
self.assertIn(' used ', repr(t))
def _skip_test_context_subclassing_1(self):
with self.assertRaisesRegex(TypeError, 'not an acceptable base type'):
class MyContextVar(contextvars.ContextVar):
# Potentially we might want ContextVars to be subclassable.
pass
with self.assertRaisesRegex(TypeError, 'not an acceptable base type'):
class MyContext(contextvars.Context):
pass
with self.assertRaisesRegex(TypeError, 'not an acceptable base type'):
class MyToken(contextvars.Token):
pass
def test_context_new_1(self):
with self.assertRaises(TypeError):
contextvars.Context(1)
with self.assertRaises(TypeError):
contextvars.Context(1, a=1)
with self.assertRaises(TypeError):
contextvars.Context(a=1)
contextvars.Context(**{})
def test_context_typerrors_1(self):
ctx = contextvars.Context()
with self.assertRaisesRegex(TypeError, 'ContextVar key was expected'):
ctx[1]
with self.assertRaisesRegex(TypeError, 'ContextVar key was expected'):
1 in ctx
with self.assertRaisesRegex(TypeError, 'ContextVar key was expected'):
ctx.get(1)
def test_context_get_context_1(self):
ctx = contextvars.copy_context()
self.assertIsInstance(ctx, contextvars.Context)
def test_context_run_1(self):
ctx = contextvars.Context()
with self.assertRaisesRegex(TypeError, 'missing 1 required'):
ctx.run()
def test_context_run_2(self):
ctx = contextvars.Context()
def func(*args, **kwargs):
kwargs['spam'] = 'foo'
args += ('bar',)
return args, kwargs
for f in (func, functools.partial(func)):
# partial doesn't support FASTCALL
self.assertEqual(ctx.run(f), (('bar',), {'spam': 'foo'}))
self.assertEqual(ctx.run(f, 1), ((1, 'bar'), {'spam': 'foo'}))
self.assertEqual(
ctx.run(f, a=2),
(('bar',), {'a': 2, 'spam': 'foo'}))
self.assertEqual(
ctx.run(f, 11, a=2),
((11, 'bar'), {'a': 2, 'spam': 'foo'}))
a = {}
self.assertEqual(
ctx.run(f, 11, **a),
((11, 'bar'), {'spam': 'foo'}))
self.assertEqual(a, {})
def test_context_run_3(self):
ctx = contextvars.Context()
def func(*args, **kwargs):
1 / 0
with self.assertRaises(ZeroDivisionError):
ctx.run(func)
with self.assertRaises(ZeroDivisionError):
ctx.run(func, 1, 2)
with self.assertRaises(ZeroDivisionError):
ctx.run(func, 1, 2, a=123)
@isolated_context
def test_context_run_4(self):
ctx1 = contextvars.Context()
ctx2 = contextvars.Context()
var = contextvars.ContextVar('var')
def func2():
self.assertIsNone(var.get(None))
def func1():
self.assertIsNone(var.get(None))
var.set('spam')
ctx2.run(func2)
self.assertEqual(var.get(None), 'spam')
cur = contextvars.copy_context()
self.assertEqual(len(cur), 1)
self.assertEqual(cur[var], 'spam')
return cur
returned_ctx = ctx1.run(func1)
self.assertEqual(ctx1, returned_ctx)
self.assertEqual(returned_ctx[var], 'spam')
self.assertIn(var, returned_ctx)
def test_context_run_5(self):
ctx = contextvars.Context()
var = contextvars.ContextVar('var')
def func():
self.assertIsNone(var.get(None))
var.set('spam')
1 / 0
with self.assertRaises(ZeroDivisionError):
ctx.run(func)
self.assertIsNone(var.get(None))
def test_context_run_6(self):
ctx = contextvars.Context()
c = contextvars.ContextVar('a', default=0)
def fun():
self.assertEqual(c.get(), 0)
self.assertIsNone(ctx.get(c))
c.set(42)
self.assertEqual(c.get(), 42)
self.assertEqual(ctx.get(c), 42)
ctx.run(fun)
def test_context_run_7(self):
ctx = contextvars.Context()
def fun():
with self.assertRaisesRegex(RuntimeError, 'is already entered'):
ctx.run(fun)
ctx.run(fun)
@isolated_context
def test_context_getset_1(self):
c = contextvars.ContextVar('c')
with self.assertRaises(LookupError):
c.get()
self.assertIsNone(c.get(None))
t0 = c.set(42)
self.assertEqual(c.get(), 42)
self.assertEqual(c.get(None), 42)
self.assertIs(t0.old_value, t0.MISSING)
self.assertIs(t0.old_value, contextvars.Token.MISSING)
self.assertIs(t0.var, c)
t = c.set('spam')
self.assertEqual(c.get(), 'spam')
self.assertEqual(c.get(None), 'spam')
self.assertEqual(t.old_value, 42)
c.reset(t)
self.assertEqual(c.get(), 42)
self.assertEqual(c.get(None), 42)
c.set('spam2')
with self.assertRaisesRegex(RuntimeError, 'has already been used'):
c.reset(t)
self.assertEqual(c.get(), 'spam2')
ctx1 = contextvars.copy_context()
self.assertIn(c, ctx1)
c.reset(t0)
with self.assertRaisesRegex(RuntimeError, 'has already been used'):
c.reset(t0)
self.assertIsNone(c.get(None))
self.assertIn(c, ctx1)
self.assertEqual(ctx1[c], 'spam2')
self.assertEqual(ctx1.get(c, 'aa'), 'spam2')
self.assertEqual(len(ctx1), 1)
self.assertEqual(list(ctx1.items()), [(c, 'spam2')])
self.assertEqual(list(ctx1.values()), ['spam2'])
self.assertEqual(list(ctx1.keys()), [c])
self.assertEqual(list(ctx1), [c])
ctx2 = contextvars.copy_context()
self.assertNotIn(c, ctx2)
with self.assertRaises(KeyError):
ctx2[c]
self.assertEqual(ctx2.get(c, 'aa'), 'aa')
self.assertEqual(len(ctx2), 0)
self.assertEqual(list(ctx2), [])
@isolated_context
def test_context_getset_2(self):
v1 = contextvars.ContextVar('v1')
v2 = contextvars.ContextVar('v2')
t1 = v1.set(42)
with self.assertRaisesRegex(ValueError, 'by a different'):
v2.reset(t1)
@isolated_context
def test_context_getset_3(self):
c = contextvars.ContextVar('c', default=42)
ctx = contextvars.Context()
def fun():
self.assertEqual(c.get(), 42)
with self.assertRaises(KeyError):
ctx[c]
self.assertIsNone(ctx.get(c))
self.assertEqual(ctx.get(c, 'spam'), 'spam')
self.assertNotIn(c, ctx)
self.assertEqual(list(ctx.keys()), [])
t = c.set(1)
self.assertEqual(list(ctx.keys()), [c])
self.assertEqual(ctx[c], 1)
c.reset(t)
self.assertEqual(list(ctx.keys()), [])
with self.assertRaises(KeyError):
ctx[c]
ctx.run(fun)
@isolated_context
def test_context_getset_4(self):
c = contextvars.ContextVar('c', default=42)
ctx = contextvars.Context()
tok = ctx.run(c.set, 1)
with self.assertRaisesRegex(ValueError, 'different Context'):
c.reset(tok)
@isolated_context
def test_context_getset_5(self):
c = contextvars.ContextVar('c', default=42)
c.set([])
def fun():
c.set([])
c.get().append(42)
self.assertEqual(c.get(), [42])
contextvars.copy_context().run(fun)
self.assertEqual(c.get(), [])
def test_context_copy_1(self):
ctx1 = contextvars.Context()
c = contextvars.ContextVar('c', default=42)
def ctx1_fun():
c.set(10)
ctx2 = ctx1.copy()
self.assertEqual(ctx2[c], 10)
c.set(20)
self.assertEqual(ctx1[c], 20)
self.assertEqual(ctx2[c], 10)
ctx2.run(ctx2_fun)
self.assertEqual(ctx1[c], 20)
self.assertEqual(ctx2[c], 30)
def ctx2_fun():
self.assertEqual(c.get(), 10)
c.set(30)
self.assertEqual(c.get(), 30)
ctx1.run(ctx1_fun)
@isolated_context
def _skip_test_context_threads_1(self):
cvar = contextvars.ContextVar('cvar')
def sub(num):
for i in range(10):
cvar.set(num + i)
time.sleep(random.uniform(0.001, 0.05))
self.assertEqual(cvar.get(), num + i)
return num
tp = concurrent.futures.ThreadPoolExecutor(max_workers=10)
try:
results = list(tp.map(sub, range(10)))
finally:
tp.shutdown()
self.assertEqual(results, list(range(10)))
| 29.620787 | 78 | 0.563774 |
9f01aa24687d49a23d2664bafdc73b3413f96da3 | 2,774 | py | Python | lib_bgp_simulator_engine/tests/utils/graph_writer.py | jfuruness/lib_bgp_simulator_engine | cd45a653d7fa26b7387e7450a5891b62efe5bf01 | [
"BSD-3-Clause"
] | 1 | 2021-09-27T14:13:10.000Z | 2021-09-27T14:13:10.000Z | lib_bgp_simulator_engine/tests/utils/graph_writer.py | jfuruness/lib_bgp_simulator_engine | cd45a653d7fa26b7387e7450a5891b62efe5bf01 | [
"BSD-3-Clause"
] | null | null | null | lib_bgp_simulator_engine/tests/utils/graph_writer.py | jfuruness/lib_bgp_simulator_engine | cd45a653d7fa26b7387e7450a5891b62efe5bf01 | [
"BSD-3-Clause"
] | null | null | null | import csv
from tqdm import tqdm
class _AS:
def __init__(self, asn):
self.asn = asn
self.as_type = None
self.rank = None
self.peer_asns = list()
self.customer_asns = list()
self.provider_asns = list()
def write_graph(peers, customer_providers, as_types, path_obj):
"""Writes DAG to a TSV to be read in later"""
# Dict of asn: as_obj
as_dict = _generate_as_dict(peers, customer_providers, as_types)
# Later change to logging
if len(peers) < 20:
print("asn, peers, customers, providers")
for asn_obj in as_dict.values():
print(asn_obj.asn, asn_obj.peer_asns, asn_obj.customer_asns, asn_obj.provider_asns)
_assign_ranks(as_dict)
with path_obj.open("w") as f:
fieldnames = vars(list(as_dict.values())[0]).keys()
writer = csv.DictWriter(f, fieldnames=fieldnames, delimiter="\t")
writer.writeheader()
for x in as_dict.values():
# https://stackoverflow.com/a/62680/8903959
writer.writerow(vars(x))
def _generate_as_dict(peers, customer_providers, as_types):
"""Generates as dict"""
as_dict = dict()
# Add all peers to dict
for peer_link in peers:
p1, p2 = peer_link.ases
as_dict[p1] = as_dict.get(p1, _AS(p1))
as_dict[p1].peer_asns.append(p2)
as_dict[p2] = as_dict.get(p2, _AS(p2))
as_dict[p2].peer_asns.append(p1)
# Add all customer providers to dict
for cp_link in customer_providers:
c, p = cp_link.customer, cp_link.provider
as_dict[c] = as_dict.get(c, _AS(c))
as_dict[c].provider_asns.append(p)
as_dict[p] = as_dict.get(p, _AS(p))
as_dict[p].customer_asns.append(c)
for asn, as_obj in as_dict.items():
as_obj.as_type = as_types[asn]
for attr in ["peer_asns", "customer_asns", "provider_asns"]:
# Make unique and sort
setattr(as_obj, attr, list(sorted(set(getattr(as_obj, attr)))))
return as_dict
def _assign_ranks(as_dict):
"""Assigns ranks to all AS objects from customers up"""
# I know this could be done faster but idc
# Assign ranks to ASes
for as_obj in tqdm(as_dict.values(), total=len(as_dict), desc="Assigning ranks"):
_assign_ranks_helper(as_obj, 0, as_dict)
def _assign_ranks_helper(as_obj, rank, as_dict):
"""Assigns ranks to all ases in customer/provider chain recursively"""
if as_obj.rank is None or as_obj.rank < rank:
as_obj.rank = rank
# Only update it's providers if it's rank becomes higher
# This will avoid unnessecary rewrites hopefully
for provider_asn in as_obj.provider_asns:
_assign_ranks_helper(as_dict[provider_asn], rank + 1, as_dict)
| 35.113924 | 95 | 0.647801 |
522a4dab4cf18945bec05557aa9c3f72862676ae | 159,237 | py | Python | benchmarks/ltl_maxplus/f3/maxplus_40_71.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 3 | 2021-04-23T23:29:26.000Z | 2022-03-23T10:00:30.000Z | benchmarks/ltl_maxplus/f3/maxplus_40_71.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | null | null | null | benchmarks/ltl_maxplus/f3/maxplus_40_71.py | EnricoMagnago/F3 | c863215c318d7d5f258eb9be38c6962cf6863b52 | [
"MIT"
] | 1 | 2021-11-17T22:02:56.000Z | 2021-11-17T22:02:56.000Z |
from collections import Iterable
from mathsat import msat_term, msat_env
from mathsat import msat_make_true, msat_make_false
from mathsat import msat_make_constant, msat_declare_function
from mathsat import msat_get_rational_type
from mathsat import msat_make_and as _msat_make_and
from mathsat import msat_make_or as _msat_make_or
from mathsat import msat_make_not
from mathsat import msat_make_leq, msat_make_equal
from mathsat import msat_make_number, msat_make_plus, msat_make_times
from ltl.ltl import TermMap, LTLEncoder
from utils import name_next
def msat_make_and(menv: msat_env, *args):
if len(args) == 0:
return msat_make_true(menv)
if len(args) == 1:
return args[0]
res = _msat_make_and(menv, args[0], args[1])
for arg in args[2:]:
res = _msat_make_and(menv, res, arg)
return res
def msat_make_or(menv: msat_env, *args):
if len(args) == 0:
return msat_make_false(menv)
if len(args) == 1:
return args[0]
res = _msat_make_or(menv, args[0], args[1])
for arg in args[2:]:
res = _msat_make_or(menv, res, arg)
return res
def msat_make_minus(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_m1 = msat_make_number(menv, "-1")
arg1 = msat_make_times(menv, arg1, n_m1)
return msat_make_plus(menv, arg0, arg1)
def msat_make_lt(menv: msat_env, arg0: msat_term, arg1: msat_term):
geq = msat_make_geq(menv, arg0, arg1)
return msat_make_not(menv, geq)
def msat_make_geq(menv: msat_env, arg0: msat_term, arg1: msat_term):
return msat_make_leq(menv, arg1, arg0)
def msat_make_gt(menv: msat_env, arg0: msat_term, arg1: msat_term):
leq = msat_make_leq(menv, arg0, arg1)
return msat_make_not(menv, leq)
def msat_make_impl(menv: msat_env, arg0: msat_term, arg1: msat_term):
n_arg0 = msat_make_not(menv, arg0)
return msat_make_or(menv, n_arg0, arg1)
def check_ltl(menv: msat_env, enc: LTLEncoder) -> (Iterable, msat_term,
msat_term, msat_term):
assert menv
assert isinstance(menv, msat_env)
assert enc
assert isinstance(enc, LTLEncoder)
real_type = msat_get_rational_type(menv)
names = ["x_0", "x_1", "x_2", "x_3", "x_4", "x_5", "x_6", "x_7", "x_8", "x_9", "x_10", "x_11", "x_12", "x_13", "x_14", "x_15", "x_16", "x_17", "x_18", "x_19", "x_20", "x_21", "x_22", "x_23", "x_24", "x_25", "x_26", "x_27", "x_28", "x_29", "x_30", "x_31", "x_32", "x_33", "x_34", "x_35", "x_36", "x_37", "x_38", "x_39"]
xs = [msat_declare_function(menv, name, real_type)
for name in names]
xs = [msat_make_constant(menv, x) for x in xs]
x_xs = [msat_declare_function(menv, name_next(name), real_type)
for name in names]
x_xs = [msat_make_constant(menv, x_x) for x_x in x_xs]
curr2next = {x: x_x for x, x_x in zip(xs, x_xs)}
n_10_0 = msat_make_number(menv, "10.0")
n_11_0 = msat_make_number(menv, "11.0")
n_12_0 = msat_make_number(menv, "12.0")
n_13_0 = msat_make_number(menv, "13.0")
n_14_0 = msat_make_number(menv, "14.0")
n_15_0 = msat_make_number(menv, "15.0")
n_16_0 = msat_make_number(menv, "16.0")
n_17_0 = msat_make_number(menv, "17.0")
n_18_0 = msat_make_number(menv, "18.0")
n_19_0 = msat_make_number(menv, "19.0")
n_1_0 = msat_make_number(menv, "1.0")
n_20_0 = msat_make_number(menv, "20.0")
n_2_0 = msat_make_number(menv, "2.0")
n_3_0 = msat_make_number(menv, "3.0")
n_4_0 = msat_make_number(menv, "4.0")
n_5_0 = msat_make_number(menv, "5.0")
n_6_0 = msat_make_number(menv, "6.0")
n_7_0 = msat_make_number(menv, "7.0")
n_8_0 = msat_make_number(menv, "8.0")
n_9_0 = msat_make_number(menv, "9.0")
init = msat_make_true(menv)
trans = msat_make_true(menv)
# transitions
expr0 = msat_make_plus(menv, xs[0], n_9_0)
expr1 = msat_make_plus(menv, xs[2], n_1_0)
expr2 = msat_make_plus(menv, xs[3], n_12_0)
expr3 = msat_make_plus(menv, xs[6], n_3_0)
expr4 = msat_make_plus(menv, xs[7], n_11_0)
expr5 = msat_make_plus(menv, xs[9], n_10_0)
expr6 = msat_make_plus(menv, xs[10], n_7_0)
expr7 = msat_make_plus(menv, xs[11], n_18_0)
expr8 = msat_make_plus(menv, xs[12], n_5_0)
expr9 = msat_make_plus(menv, xs[13], n_1_0)
expr10 = msat_make_plus(menv, xs[14], n_1_0)
expr11 = msat_make_plus(menv, xs[16], n_14_0)
expr12 = msat_make_plus(menv, xs[19], n_4_0)
expr13 = msat_make_plus(menv, xs[23], n_12_0)
expr14 = msat_make_plus(menv, xs[26], n_3_0)
expr15 = msat_make_plus(menv, xs[31], n_7_0)
expr16 = msat_make_plus(menv, xs[34], n_12_0)
expr17 = msat_make_plus(menv, xs[36], n_20_0)
expr18 = msat_make_plus(menv, xs[37], n_19_0)
expr19 = msat_make_plus(menv, xs[39], n_4_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[0], expr0),
msat_make_geq(menv, x_xs[0], expr1),
msat_make_geq(menv, x_xs[0], expr2),
msat_make_geq(menv, x_xs[0], expr3),
msat_make_geq(menv, x_xs[0], expr4),
msat_make_geq(menv, x_xs[0], expr5),
msat_make_geq(menv, x_xs[0], expr6),
msat_make_geq(menv, x_xs[0], expr7),
msat_make_geq(menv, x_xs[0], expr8),
msat_make_geq(menv, x_xs[0], expr9),
msat_make_geq(menv, x_xs[0], expr10),
msat_make_geq(menv, x_xs[0], expr11),
msat_make_geq(menv, x_xs[0], expr12),
msat_make_geq(menv, x_xs[0], expr13),
msat_make_geq(menv, x_xs[0], expr14),
msat_make_geq(menv, x_xs[0], expr15),
msat_make_geq(menv, x_xs[0], expr16),
msat_make_geq(menv, x_xs[0], expr17),
msat_make_geq(menv, x_xs[0], expr18),
msat_make_geq(menv, x_xs[0], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[0], expr0),
msat_make_equal(menv, x_xs[0], expr1),
msat_make_equal(menv, x_xs[0], expr2),
msat_make_equal(menv, x_xs[0], expr3),
msat_make_equal(menv, x_xs[0], expr4),
msat_make_equal(menv, x_xs[0], expr5),
msat_make_equal(menv, x_xs[0], expr6),
msat_make_equal(menv, x_xs[0], expr7),
msat_make_equal(menv, x_xs[0], expr8),
msat_make_equal(menv, x_xs[0], expr9),
msat_make_equal(menv, x_xs[0], expr10),
msat_make_equal(menv, x_xs[0], expr11),
msat_make_equal(menv, x_xs[0], expr12),
msat_make_equal(menv, x_xs[0], expr13),
msat_make_equal(menv, x_xs[0], expr14),
msat_make_equal(menv, x_xs[0], expr15),
msat_make_equal(menv, x_xs[0], expr16),
msat_make_equal(menv, x_xs[0], expr17),
msat_make_equal(menv, x_xs[0], expr18),
msat_make_equal(menv, x_xs[0], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_9_0)
expr1 = msat_make_plus(menv, xs[1], n_19_0)
expr2 = msat_make_plus(menv, xs[3], n_11_0)
expr3 = msat_make_plus(menv, xs[4], n_19_0)
expr4 = msat_make_plus(menv, xs[6], n_4_0)
expr5 = msat_make_plus(menv, xs[7], n_5_0)
expr6 = msat_make_plus(menv, xs[10], n_4_0)
expr7 = msat_make_plus(menv, xs[11], n_12_0)
expr8 = msat_make_plus(menv, xs[14], n_11_0)
expr9 = msat_make_plus(menv, xs[18], n_9_0)
expr10 = msat_make_plus(menv, xs[20], n_9_0)
expr11 = msat_make_plus(menv, xs[22], n_19_0)
expr12 = msat_make_plus(menv, xs[24], n_15_0)
expr13 = msat_make_plus(menv, xs[26], n_6_0)
expr14 = msat_make_plus(menv, xs[27], n_20_0)
expr15 = msat_make_plus(menv, xs[30], n_14_0)
expr16 = msat_make_plus(menv, xs[33], n_2_0)
expr17 = msat_make_plus(menv, xs[35], n_17_0)
expr18 = msat_make_plus(menv, xs[37], n_13_0)
expr19 = msat_make_plus(menv, xs[38], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[1], expr0),
msat_make_geq(menv, x_xs[1], expr1),
msat_make_geq(menv, x_xs[1], expr2),
msat_make_geq(menv, x_xs[1], expr3),
msat_make_geq(menv, x_xs[1], expr4),
msat_make_geq(menv, x_xs[1], expr5),
msat_make_geq(menv, x_xs[1], expr6),
msat_make_geq(menv, x_xs[1], expr7),
msat_make_geq(menv, x_xs[1], expr8),
msat_make_geq(menv, x_xs[1], expr9),
msat_make_geq(menv, x_xs[1], expr10),
msat_make_geq(menv, x_xs[1], expr11),
msat_make_geq(menv, x_xs[1], expr12),
msat_make_geq(menv, x_xs[1], expr13),
msat_make_geq(menv, x_xs[1], expr14),
msat_make_geq(menv, x_xs[1], expr15),
msat_make_geq(menv, x_xs[1], expr16),
msat_make_geq(menv, x_xs[1], expr17),
msat_make_geq(menv, x_xs[1], expr18),
msat_make_geq(menv, x_xs[1], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[1], expr0),
msat_make_equal(menv, x_xs[1], expr1),
msat_make_equal(menv, x_xs[1], expr2),
msat_make_equal(menv, x_xs[1], expr3),
msat_make_equal(menv, x_xs[1], expr4),
msat_make_equal(menv, x_xs[1], expr5),
msat_make_equal(menv, x_xs[1], expr6),
msat_make_equal(menv, x_xs[1], expr7),
msat_make_equal(menv, x_xs[1], expr8),
msat_make_equal(menv, x_xs[1], expr9),
msat_make_equal(menv, x_xs[1], expr10),
msat_make_equal(menv, x_xs[1], expr11),
msat_make_equal(menv, x_xs[1], expr12),
msat_make_equal(menv, x_xs[1], expr13),
msat_make_equal(menv, x_xs[1], expr14),
msat_make_equal(menv, x_xs[1], expr15),
msat_make_equal(menv, x_xs[1], expr16),
msat_make_equal(menv, x_xs[1], expr17),
msat_make_equal(menv, x_xs[1], expr18),
msat_make_equal(menv, x_xs[1], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[7], n_15_0)
expr1 = msat_make_plus(menv, xs[8], n_9_0)
expr2 = msat_make_plus(menv, xs[10], n_7_0)
expr3 = msat_make_plus(menv, xs[12], n_9_0)
expr4 = msat_make_plus(menv, xs[13], n_14_0)
expr5 = msat_make_plus(menv, xs[17], n_15_0)
expr6 = msat_make_plus(menv, xs[18], n_9_0)
expr7 = msat_make_plus(menv, xs[19], n_13_0)
expr8 = msat_make_plus(menv, xs[20], n_13_0)
expr9 = msat_make_plus(menv, xs[21], n_9_0)
expr10 = msat_make_plus(menv, xs[25], n_15_0)
expr11 = msat_make_plus(menv, xs[27], n_16_0)
expr12 = msat_make_plus(menv, xs[28], n_6_0)
expr13 = msat_make_plus(menv, xs[29], n_20_0)
expr14 = msat_make_plus(menv, xs[30], n_16_0)
expr15 = msat_make_plus(menv, xs[31], n_19_0)
expr16 = msat_make_plus(menv, xs[32], n_3_0)
expr17 = msat_make_plus(menv, xs[34], n_10_0)
expr18 = msat_make_plus(menv, xs[38], n_1_0)
expr19 = msat_make_plus(menv, xs[39], n_19_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[2], expr0),
msat_make_geq(menv, x_xs[2], expr1),
msat_make_geq(menv, x_xs[2], expr2),
msat_make_geq(menv, x_xs[2], expr3),
msat_make_geq(menv, x_xs[2], expr4),
msat_make_geq(menv, x_xs[2], expr5),
msat_make_geq(menv, x_xs[2], expr6),
msat_make_geq(menv, x_xs[2], expr7),
msat_make_geq(menv, x_xs[2], expr8),
msat_make_geq(menv, x_xs[2], expr9),
msat_make_geq(menv, x_xs[2], expr10),
msat_make_geq(menv, x_xs[2], expr11),
msat_make_geq(menv, x_xs[2], expr12),
msat_make_geq(menv, x_xs[2], expr13),
msat_make_geq(menv, x_xs[2], expr14),
msat_make_geq(menv, x_xs[2], expr15),
msat_make_geq(menv, x_xs[2], expr16),
msat_make_geq(menv, x_xs[2], expr17),
msat_make_geq(menv, x_xs[2], expr18),
msat_make_geq(menv, x_xs[2], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[2], expr0),
msat_make_equal(menv, x_xs[2], expr1),
msat_make_equal(menv, x_xs[2], expr2),
msat_make_equal(menv, x_xs[2], expr3),
msat_make_equal(menv, x_xs[2], expr4),
msat_make_equal(menv, x_xs[2], expr5),
msat_make_equal(menv, x_xs[2], expr6),
msat_make_equal(menv, x_xs[2], expr7),
msat_make_equal(menv, x_xs[2], expr8),
msat_make_equal(menv, x_xs[2], expr9),
msat_make_equal(menv, x_xs[2], expr10),
msat_make_equal(menv, x_xs[2], expr11),
msat_make_equal(menv, x_xs[2], expr12),
msat_make_equal(menv, x_xs[2], expr13),
msat_make_equal(menv, x_xs[2], expr14),
msat_make_equal(menv, x_xs[2], expr15),
msat_make_equal(menv, x_xs[2], expr16),
msat_make_equal(menv, x_xs[2], expr17),
msat_make_equal(menv, x_xs[2], expr18),
msat_make_equal(menv, x_xs[2], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_2_0)
expr1 = msat_make_plus(menv, xs[2], n_3_0)
expr2 = msat_make_plus(menv, xs[6], n_2_0)
expr3 = msat_make_plus(menv, xs[7], n_6_0)
expr4 = msat_make_plus(menv, xs[10], n_19_0)
expr5 = msat_make_plus(menv, xs[11], n_19_0)
expr6 = msat_make_plus(menv, xs[14], n_12_0)
expr7 = msat_make_plus(menv, xs[16], n_16_0)
expr8 = msat_make_plus(menv, xs[19], n_2_0)
expr9 = msat_make_plus(menv, xs[21], n_19_0)
expr10 = msat_make_plus(menv, xs[22], n_10_0)
expr11 = msat_make_plus(menv, xs[26], n_2_0)
expr12 = msat_make_plus(menv, xs[27], n_15_0)
expr13 = msat_make_plus(menv, xs[28], n_6_0)
expr14 = msat_make_plus(menv, xs[29], n_7_0)
expr15 = msat_make_plus(menv, xs[30], n_8_0)
expr16 = msat_make_plus(menv, xs[33], n_11_0)
expr17 = msat_make_plus(menv, xs[35], n_12_0)
expr18 = msat_make_plus(menv, xs[37], n_1_0)
expr19 = msat_make_plus(menv, xs[38], n_1_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[3], expr0),
msat_make_geq(menv, x_xs[3], expr1),
msat_make_geq(menv, x_xs[3], expr2),
msat_make_geq(menv, x_xs[3], expr3),
msat_make_geq(menv, x_xs[3], expr4),
msat_make_geq(menv, x_xs[3], expr5),
msat_make_geq(menv, x_xs[3], expr6),
msat_make_geq(menv, x_xs[3], expr7),
msat_make_geq(menv, x_xs[3], expr8),
msat_make_geq(menv, x_xs[3], expr9),
msat_make_geq(menv, x_xs[3], expr10),
msat_make_geq(menv, x_xs[3], expr11),
msat_make_geq(menv, x_xs[3], expr12),
msat_make_geq(menv, x_xs[3], expr13),
msat_make_geq(menv, x_xs[3], expr14),
msat_make_geq(menv, x_xs[3], expr15),
msat_make_geq(menv, x_xs[3], expr16),
msat_make_geq(menv, x_xs[3], expr17),
msat_make_geq(menv, x_xs[3], expr18),
msat_make_geq(menv, x_xs[3], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[3], expr0),
msat_make_equal(menv, x_xs[3], expr1),
msat_make_equal(menv, x_xs[3], expr2),
msat_make_equal(menv, x_xs[3], expr3),
msat_make_equal(menv, x_xs[3], expr4),
msat_make_equal(menv, x_xs[3], expr5),
msat_make_equal(menv, x_xs[3], expr6),
msat_make_equal(menv, x_xs[3], expr7),
msat_make_equal(menv, x_xs[3], expr8),
msat_make_equal(menv, x_xs[3], expr9),
msat_make_equal(menv, x_xs[3], expr10),
msat_make_equal(menv, x_xs[3], expr11),
msat_make_equal(menv, x_xs[3], expr12),
msat_make_equal(menv, x_xs[3], expr13),
msat_make_equal(menv, x_xs[3], expr14),
msat_make_equal(menv, x_xs[3], expr15),
msat_make_equal(menv, x_xs[3], expr16),
msat_make_equal(menv, x_xs[3], expr17),
msat_make_equal(menv, x_xs[3], expr18),
msat_make_equal(menv, x_xs[3], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_10_0)
expr1 = msat_make_plus(menv, xs[2], n_9_0)
expr2 = msat_make_plus(menv, xs[4], n_16_0)
expr3 = msat_make_plus(menv, xs[6], n_12_0)
expr4 = msat_make_plus(menv, xs[7], n_16_0)
expr5 = msat_make_plus(menv, xs[8], n_5_0)
expr6 = msat_make_plus(menv, xs[10], n_17_0)
expr7 = msat_make_plus(menv, xs[11], n_5_0)
expr8 = msat_make_plus(menv, xs[13], n_1_0)
expr9 = msat_make_plus(menv, xs[16], n_15_0)
expr10 = msat_make_plus(menv, xs[20], n_13_0)
expr11 = msat_make_plus(menv, xs[22], n_14_0)
expr12 = msat_make_plus(menv, xs[26], n_4_0)
expr13 = msat_make_plus(menv, xs[30], n_17_0)
expr14 = msat_make_plus(menv, xs[31], n_4_0)
expr15 = msat_make_plus(menv, xs[32], n_5_0)
expr16 = msat_make_plus(menv, xs[34], n_7_0)
expr17 = msat_make_plus(menv, xs[35], n_9_0)
expr18 = msat_make_plus(menv, xs[38], n_13_0)
expr19 = msat_make_plus(menv, xs[39], n_4_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[4], expr0),
msat_make_geq(menv, x_xs[4], expr1),
msat_make_geq(menv, x_xs[4], expr2),
msat_make_geq(menv, x_xs[4], expr3),
msat_make_geq(menv, x_xs[4], expr4),
msat_make_geq(menv, x_xs[4], expr5),
msat_make_geq(menv, x_xs[4], expr6),
msat_make_geq(menv, x_xs[4], expr7),
msat_make_geq(menv, x_xs[4], expr8),
msat_make_geq(menv, x_xs[4], expr9),
msat_make_geq(menv, x_xs[4], expr10),
msat_make_geq(menv, x_xs[4], expr11),
msat_make_geq(menv, x_xs[4], expr12),
msat_make_geq(menv, x_xs[4], expr13),
msat_make_geq(menv, x_xs[4], expr14),
msat_make_geq(menv, x_xs[4], expr15),
msat_make_geq(menv, x_xs[4], expr16),
msat_make_geq(menv, x_xs[4], expr17),
msat_make_geq(menv, x_xs[4], expr18),
msat_make_geq(menv, x_xs[4], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[4], expr0),
msat_make_equal(menv, x_xs[4], expr1),
msat_make_equal(menv, x_xs[4], expr2),
msat_make_equal(menv, x_xs[4], expr3),
msat_make_equal(menv, x_xs[4], expr4),
msat_make_equal(menv, x_xs[4], expr5),
msat_make_equal(menv, x_xs[4], expr6),
msat_make_equal(menv, x_xs[4], expr7),
msat_make_equal(menv, x_xs[4], expr8),
msat_make_equal(menv, x_xs[4], expr9),
msat_make_equal(menv, x_xs[4], expr10),
msat_make_equal(menv, x_xs[4], expr11),
msat_make_equal(menv, x_xs[4], expr12),
msat_make_equal(menv, x_xs[4], expr13),
msat_make_equal(menv, x_xs[4], expr14),
msat_make_equal(menv, x_xs[4], expr15),
msat_make_equal(menv, x_xs[4], expr16),
msat_make_equal(menv, x_xs[4], expr17),
msat_make_equal(menv, x_xs[4], expr18),
msat_make_equal(menv, x_xs[4], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_14_0)
expr1 = msat_make_plus(menv, xs[3], n_3_0)
expr2 = msat_make_plus(menv, xs[4], n_6_0)
expr3 = msat_make_plus(menv, xs[5], n_17_0)
expr4 = msat_make_plus(menv, xs[7], n_20_0)
expr5 = msat_make_plus(menv, xs[8], n_3_0)
expr6 = msat_make_plus(menv, xs[9], n_16_0)
expr7 = msat_make_plus(menv, xs[10], n_8_0)
expr8 = msat_make_plus(menv, xs[11], n_14_0)
expr9 = msat_make_plus(menv, xs[12], n_20_0)
expr10 = msat_make_plus(menv, xs[13], n_1_0)
expr11 = msat_make_plus(menv, xs[15], n_12_0)
expr12 = msat_make_plus(menv, xs[16], n_20_0)
expr13 = msat_make_plus(menv, xs[22], n_3_0)
expr14 = msat_make_plus(menv, xs[28], n_6_0)
expr15 = msat_make_plus(menv, xs[29], n_20_0)
expr16 = msat_make_plus(menv, xs[32], n_5_0)
expr17 = msat_make_plus(menv, xs[33], n_13_0)
expr18 = msat_make_plus(menv, xs[38], n_2_0)
expr19 = msat_make_plus(menv, xs[39], n_15_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[5], expr0),
msat_make_geq(menv, x_xs[5], expr1),
msat_make_geq(menv, x_xs[5], expr2),
msat_make_geq(menv, x_xs[5], expr3),
msat_make_geq(menv, x_xs[5], expr4),
msat_make_geq(menv, x_xs[5], expr5),
msat_make_geq(menv, x_xs[5], expr6),
msat_make_geq(menv, x_xs[5], expr7),
msat_make_geq(menv, x_xs[5], expr8),
msat_make_geq(menv, x_xs[5], expr9),
msat_make_geq(menv, x_xs[5], expr10),
msat_make_geq(menv, x_xs[5], expr11),
msat_make_geq(menv, x_xs[5], expr12),
msat_make_geq(menv, x_xs[5], expr13),
msat_make_geq(menv, x_xs[5], expr14),
msat_make_geq(menv, x_xs[5], expr15),
msat_make_geq(menv, x_xs[5], expr16),
msat_make_geq(menv, x_xs[5], expr17),
msat_make_geq(menv, x_xs[5], expr18),
msat_make_geq(menv, x_xs[5], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[5], expr0),
msat_make_equal(menv, x_xs[5], expr1),
msat_make_equal(menv, x_xs[5], expr2),
msat_make_equal(menv, x_xs[5], expr3),
msat_make_equal(menv, x_xs[5], expr4),
msat_make_equal(menv, x_xs[5], expr5),
msat_make_equal(menv, x_xs[5], expr6),
msat_make_equal(menv, x_xs[5], expr7),
msat_make_equal(menv, x_xs[5], expr8),
msat_make_equal(menv, x_xs[5], expr9),
msat_make_equal(menv, x_xs[5], expr10),
msat_make_equal(menv, x_xs[5], expr11),
msat_make_equal(menv, x_xs[5], expr12),
msat_make_equal(menv, x_xs[5], expr13),
msat_make_equal(menv, x_xs[5], expr14),
msat_make_equal(menv, x_xs[5], expr15),
msat_make_equal(menv, x_xs[5], expr16),
msat_make_equal(menv, x_xs[5], expr17),
msat_make_equal(menv, x_xs[5], expr18),
msat_make_equal(menv, x_xs[5], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_14_0)
expr1 = msat_make_plus(menv, xs[5], n_15_0)
expr2 = msat_make_plus(menv, xs[6], n_3_0)
expr3 = msat_make_plus(menv, xs[7], n_5_0)
expr4 = msat_make_plus(menv, xs[8], n_1_0)
expr5 = msat_make_plus(menv, xs[9], n_3_0)
expr6 = msat_make_plus(menv, xs[10], n_6_0)
expr7 = msat_make_plus(menv, xs[14], n_11_0)
expr8 = msat_make_plus(menv, xs[15], n_15_0)
expr9 = msat_make_plus(menv, xs[17], n_11_0)
expr10 = msat_make_plus(menv, xs[18], n_10_0)
expr11 = msat_make_plus(menv, xs[20], n_18_0)
expr12 = msat_make_plus(menv, xs[26], n_17_0)
expr13 = msat_make_plus(menv, xs[27], n_20_0)
expr14 = msat_make_plus(menv, xs[28], n_7_0)
expr15 = msat_make_plus(menv, xs[29], n_6_0)
expr16 = msat_make_plus(menv, xs[31], n_15_0)
expr17 = msat_make_plus(menv, xs[32], n_9_0)
expr18 = msat_make_plus(menv, xs[36], n_18_0)
expr19 = msat_make_plus(menv, xs[37], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[6], expr0),
msat_make_geq(menv, x_xs[6], expr1),
msat_make_geq(menv, x_xs[6], expr2),
msat_make_geq(menv, x_xs[6], expr3),
msat_make_geq(menv, x_xs[6], expr4),
msat_make_geq(menv, x_xs[6], expr5),
msat_make_geq(menv, x_xs[6], expr6),
msat_make_geq(menv, x_xs[6], expr7),
msat_make_geq(menv, x_xs[6], expr8),
msat_make_geq(menv, x_xs[6], expr9),
msat_make_geq(menv, x_xs[6], expr10),
msat_make_geq(menv, x_xs[6], expr11),
msat_make_geq(menv, x_xs[6], expr12),
msat_make_geq(menv, x_xs[6], expr13),
msat_make_geq(menv, x_xs[6], expr14),
msat_make_geq(menv, x_xs[6], expr15),
msat_make_geq(menv, x_xs[6], expr16),
msat_make_geq(menv, x_xs[6], expr17),
msat_make_geq(menv, x_xs[6], expr18),
msat_make_geq(menv, x_xs[6], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[6], expr0),
msat_make_equal(menv, x_xs[6], expr1),
msat_make_equal(menv, x_xs[6], expr2),
msat_make_equal(menv, x_xs[6], expr3),
msat_make_equal(menv, x_xs[6], expr4),
msat_make_equal(menv, x_xs[6], expr5),
msat_make_equal(menv, x_xs[6], expr6),
msat_make_equal(menv, x_xs[6], expr7),
msat_make_equal(menv, x_xs[6], expr8),
msat_make_equal(menv, x_xs[6], expr9),
msat_make_equal(menv, x_xs[6], expr10),
msat_make_equal(menv, x_xs[6], expr11),
msat_make_equal(menv, x_xs[6], expr12),
msat_make_equal(menv, x_xs[6], expr13),
msat_make_equal(menv, x_xs[6], expr14),
msat_make_equal(menv, x_xs[6], expr15),
msat_make_equal(menv, x_xs[6], expr16),
msat_make_equal(menv, x_xs[6], expr17),
msat_make_equal(menv, x_xs[6], expr18),
msat_make_equal(menv, x_xs[6], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_13_0)
expr1 = msat_make_plus(menv, xs[2], n_14_0)
expr2 = msat_make_plus(menv, xs[4], n_7_0)
expr3 = msat_make_plus(menv, xs[7], n_15_0)
expr4 = msat_make_plus(menv, xs[8], n_9_0)
expr5 = msat_make_plus(menv, xs[10], n_15_0)
expr6 = msat_make_plus(menv, xs[11], n_5_0)
expr7 = msat_make_plus(menv, xs[15], n_14_0)
expr8 = msat_make_plus(menv, xs[18], n_14_0)
expr9 = msat_make_plus(menv, xs[19], n_20_0)
expr10 = msat_make_plus(menv, xs[20], n_7_0)
expr11 = msat_make_plus(menv, xs[26], n_6_0)
expr12 = msat_make_plus(menv, xs[28], n_19_0)
expr13 = msat_make_plus(menv, xs[29], n_2_0)
expr14 = msat_make_plus(menv, xs[32], n_16_0)
expr15 = msat_make_plus(menv, xs[33], n_12_0)
expr16 = msat_make_plus(menv, xs[34], n_17_0)
expr17 = msat_make_plus(menv, xs[36], n_14_0)
expr18 = msat_make_plus(menv, xs[37], n_12_0)
expr19 = msat_make_plus(menv, xs[38], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[7], expr0),
msat_make_geq(menv, x_xs[7], expr1),
msat_make_geq(menv, x_xs[7], expr2),
msat_make_geq(menv, x_xs[7], expr3),
msat_make_geq(menv, x_xs[7], expr4),
msat_make_geq(menv, x_xs[7], expr5),
msat_make_geq(menv, x_xs[7], expr6),
msat_make_geq(menv, x_xs[7], expr7),
msat_make_geq(menv, x_xs[7], expr8),
msat_make_geq(menv, x_xs[7], expr9),
msat_make_geq(menv, x_xs[7], expr10),
msat_make_geq(menv, x_xs[7], expr11),
msat_make_geq(menv, x_xs[7], expr12),
msat_make_geq(menv, x_xs[7], expr13),
msat_make_geq(menv, x_xs[7], expr14),
msat_make_geq(menv, x_xs[7], expr15),
msat_make_geq(menv, x_xs[7], expr16),
msat_make_geq(menv, x_xs[7], expr17),
msat_make_geq(menv, x_xs[7], expr18),
msat_make_geq(menv, x_xs[7], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[7], expr0),
msat_make_equal(menv, x_xs[7], expr1),
msat_make_equal(menv, x_xs[7], expr2),
msat_make_equal(menv, x_xs[7], expr3),
msat_make_equal(menv, x_xs[7], expr4),
msat_make_equal(menv, x_xs[7], expr5),
msat_make_equal(menv, x_xs[7], expr6),
msat_make_equal(menv, x_xs[7], expr7),
msat_make_equal(menv, x_xs[7], expr8),
msat_make_equal(menv, x_xs[7], expr9),
msat_make_equal(menv, x_xs[7], expr10),
msat_make_equal(menv, x_xs[7], expr11),
msat_make_equal(menv, x_xs[7], expr12),
msat_make_equal(menv, x_xs[7], expr13),
msat_make_equal(menv, x_xs[7], expr14),
msat_make_equal(menv, x_xs[7], expr15),
msat_make_equal(menv, x_xs[7], expr16),
msat_make_equal(menv, x_xs[7], expr17),
msat_make_equal(menv, x_xs[7], expr18),
msat_make_equal(menv, x_xs[7], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_6_0)
expr1 = msat_make_plus(menv, xs[2], n_3_0)
expr2 = msat_make_plus(menv, xs[4], n_15_0)
expr3 = msat_make_plus(menv, xs[7], n_17_0)
expr4 = msat_make_plus(menv, xs[14], n_11_0)
expr5 = msat_make_plus(menv, xs[15], n_17_0)
expr6 = msat_make_plus(menv, xs[17], n_7_0)
expr7 = msat_make_plus(menv, xs[18], n_13_0)
expr8 = msat_make_plus(menv, xs[19], n_5_0)
expr9 = msat_make_plus(menv, xs[20], n_19_0)
expr10 = msat_make_plus(menv, xs[22], n_13_0)
expr11 = msat_make_plus(menv, xs[24], n_16_0)
expr12 = msat_make_plus(menv, xs[26], n_7_0)
expr13 = msat_make_plus(menv, xs[28], n_4_0)
expr14 = msat_make_plus(menv, xs[29], n_15_0)
expr15 = msat_make_plus(menv, xs[31], n_15_0)
expr16 = msat_make_plus(menv, xs[32], n_2_0)
expr17 = msat_make_plus(menv, xs[36], n_5_0)
expr18 = msat_make_plus(menv, xs[37], n_2_0)
expr19 = msat_make_plus(menv, xs[39], n_20_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[8], expr0),
msat_make_geq(menv, x_xs[8], expr1),
msat_make_geq(menv, x_xs[8], expr2),
msat_make_geq(menv, x_xs[8], expr3),
msat_make_geq(menv, x_xs[8], expr4),
msat_make_geq(menv, x_xs[8], expr5),
msat_make_geq(menv, x_xs[8], expr6),
msat_make_geq(menv, x_xs[8], expr7),
msat_make_geq(menv, x_xs[8], expr8),
msat_make_geq(menv, x_xs[8], expr9),
msat_make_geq(menv, x_xs[8], expr10),
msat_make_geq(menv, x_xs[8], expr11),
msat_make_geq(menv, x_xs[8], expr12),
msat_make_geq(menv, x_xs[8], expr13),
msat_make_geq(menv, x_xs[8], expr14),
msat_make_geq(menv, x_xs[8], expr15),
msat_make_geq(menv, x_xs[8], expr16),
msat_make_geq(menv, x_xs[8], expr17),
msat_make_geq(menv, x_xs[8], expr18),
msat_make_geq(menv, x_xs[8], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[8], expr0),
msat_make_equal(menv, x_xs[8], expr1),
msat_make_equal(menv, x_xs[8], expr2),
msat_make_equal(menv, x_xs[8], expr3),
msat_make_equal(menv, x_xs[8], expr4),
msat_make_equal(menv, x_xs[8], expr5),
msat_make_equal(menv, x_xs[8], expr6),
msat_make_equal(menv, x_xs[8], expr7),
msat_make_equal(menv, x_xs[8], expr8),
msat_make_equal(menv, x_xs[8], expr9),
msat_make_equal(menv, x_xs[8], expr10),
msat_make_equal(menv, x_xs[8], expr11),
msat_make_equal(menv, x_xs[8], expr12),
msat_make_equal(menv, x_xs[8], expr13),
msat_make_equal(menv, x_xs[8], expr14),
msat_make_equal(menv, x_xs[8], expr15),
msat_make_equal(menv, x_xs[8], expr16),
msat_make_equal(menv, x_xs[8], expr17),
msat_make_equal(menv, x_xs[8], expr18),
msat_make_equal(menv, x_xs[8], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_6_0)
expr1 = msat_make_plus(menv, xs[3], n_2_0)
expr2 = msat_make_plus(menv, xs[6], n_17_0)
expr3 = msat_make_plus(menv, xs[7], n_16_0)
expr4 = msat_make_plus(menv, xs[8], n_10_0)
expr5 = msat_make_plus(menv, xs[9], n_7_0)
expr6 = msat_make_plus(menv, xs[10], n_1_0)
expr7 = msat_make_plus(menv, xs[11], n_2_0)
expr8 = msat_make_plus(menv, xs[13], n_17_0)
expr9 = msat_make_plus(menv, xs[14], n_14_0)
expr10 = msat_make_plus(menv, xs[15], n_1_0)
expr11 = msat_make_plus(menv, xs[17], n_2_0)
expr12 = msat_make_plus(menv, xs[18], n_16_0)
expr13 = msat_make_plus(menv, xs[22], n_5_0)
expr14 = msat_make_plus(menv, xs[24], n_19_0)
expr15 = msat_make_plus(menv, xs[27], n_4_0)
expr16 = msat_make_plus(menv, xs[31], n_16_0)
expr17 = msat_make_plus(menv, xs[32], n_2_0)
expr18 = msat_make_plus(menv, xs[33], n_2_0)
expr19 = msat_make_plus(menv, xs[37], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[9], expr0),
msat_make_geq(menv, x_xs[9], expr1),
msat_make_geq(menv, x_xs[9], expr2),
msat_make_geq(menv, x_xs[9], expr3),
msat_make_geq(menv, x_xs[9], expr4),
msat_make_geq(menv, x_xs[9], expr5),
msat_make_geq(menv, x_xs[9], expr6),
msat_make_geq(menv, x_xs[9], expr7),
msat_make_geq(menv, x_xs[9], expr8),
msat_make_geq(menv, x_xs[9], expr9),
msat_make_geq(menv, x_xs[9], expr10),
msat_make_geq(menv, x_xs[9], expr11),
msat_make_geq(menv, x_xs[9], expr12),
msat_make_geq(menv, x_xs[9], expr13),
msat_make_geq(menv, x_xs[9], expr14),
msat_make_geq(menv, x_xs[9], expr15),
msat_make_geq(menv, x_xs[9], expr16),
msat_make_geq(menv, x_xs[9], expr17),
msat_make_geq(menv, x_xs[9], expr18),
msat_make_geq(menv, x_xs[9], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[9], expr0),
msat_make_equal(menv, x_xs[9], expr1),
msat_make_equal(menv, x_xs[9], expr2),
msat_make_equal(menv, x_xs[9], expr3),
msat_make_equal(menv, x_xs[9], expr4),
msat_make_equal(menv, x_xs[9], expr5),
msat_make_equal(menv, x_xs[9], expr6),
msat_make_equal(menv, x_xs[9], expr7),
msat_make_equal(menv, x_xs[9], expr8),
msat_make_equal(menv, x_xs[9], expr9),
msat_make_equal(menv, x_xs[9], expr10),
msat_make_equal(menv, x_xs[9], expr11),
msat_make_equal(menv, x_xs[9], expr12),
msat_make_equal(menv, x_xs[9], expr13),
msat_make_equal(menv, x_xs[9], expr14),
msat_make_equal(menv, x_xs[9], expr15),
msat_make_equal(menv, x_xs[9], expr16),
msat_make_equal(menv, x_xs[9], expr17),
msat_make_equal(menv, x_xs[9], expr18),
msat_make_equal(menv, x_xs[9], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_14_0)
expr1 = msat_make_plus(menv, xs[3], n_17_0)
expr2 = msat_make_plus(menv, xs[4], n_9_0)
expr3 = msat_make_plus(menv, xs[7], n_7_0)
expr4 = msat_make_plus(menv, xs[8], n_6_0)
expr5 = msat_make_plus(menv, xs[12], n_16_0)
expr6 = msat_make_plus(menv, xs[14], n_16_0)
expr7 = msat_make_plus(menv, xs[15], n_12_0)
expr8 = msat_make_plus(menv, xs[18], n_18_0)
expr9 = msat_make_plus(menv, xs[21], n_4_0)
expr10 = msat_make_plus(menv, xs[25], n_13_0)
expr11 = msat_make_plus(menv, xs[26], n_16_0)
expr12 = msat_make_plus(menv, xs[27], n_5_0)
expr13 = msat_make_plus(menv, xs[30], n_12_0)
expr14 = msat_make_plus(menv, xs[31], n_6_0)
expr15 = msat_make_plus(menv, xs[34], n_18_0)
expr16 = msat_make_plus(menv, xs[35], n_3_0)
expr17 = msat_make_plus(menv, xs[36], n_18_0)
expr18 = msat_make_plus(menv, xs[37], n_11_0)
expr19 = msat_make_plus(menv, xs[39], n_14_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[10], expr0),
msat_make_geq(menv, x_xs[10], expr1),
msat_make_geq(menv, x_xs[10], expr2),
msat_make_geq(menv, x_xs[10], expr3),
msat_make_geq(menv, x_xs[10], expr4),
msat_make_geq(menv, x_xs[10], expr5),
msat_make_geq(menv, x_xs[10], expr6),
msat_make_geq(menv, x_xs[10], expr7),
msat_make_geq(menv, x_xs[10], expr8),
msat_make_geq(menv, x_xs[10], expr9),
msat_make_geq(menv, x_xs[10], expr10),
msat_make_geq(menv, x_xs[10], expr11),
msat_make_geq(menv, x_xs[10], expr12),
msat_make_geq(menv, x_xs[10], expr13),
msat_make_geq(menv, x_xs[10], expr14),
msat_make_geq(menv, x_xs[10], expr15),
msat_make_geq(menv, x_xs[10], expr16),
msat_make_geq(menv, x_xs[10], expr17),
msat_make_geq(menv, x_xs[10], expr18),
msat_make_geq(menv, x_xs[10], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[10], expr0),
msat_make_equal(menv, x_xs[10], expr1),
msat_make_equal(menv, x_xs[10], expr2),
msat_make_equal(menv, x_xs[10], expr3),
msat_make_equal(menv, x_xs[10], expr4),
msat_make_equal(menv, x_xs[10], expr5),
msat_make_equal(menv, x_xs[10], expr6),
msat_make_equal(menv, x_xs[10], expr7),
msat_make_equal(menv, x_xs[10], expr8),
msat_make_equal(menv, x_xs[10], expr9),
msat_make_equal(menv, x_xs[10], expr10),
msat_make_equal(menv, x_xs[10], expr11),
msat_make_equal(menv, x_xs[10], expr12),
msat_make_equal(menv, x_xs[10], expr13),
msat_make_equal(menv, x_xs[10], expr14),
msat_make_equal(menv, x_xs[10], expr15),
msat_make_equal(menv, x_xs[10], expr16),
msat_make_equal(menv, x_xs[10], expr17),
msat_make_equal(menv, x_xs[10], expr18),
msat_make_equal(menv, x_xs[10], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_4_0)
expr1 = msat_make_plus(menv, xs[3], n_13_0)
expr2 = msat_make_plus(menv, xs[4], n_9_0)
expr3 = msat_make_plus(menv, xs[9], n_16_0)
expr4 = msat_make_plus(menv, xs[13], n_3_0)
expr5 = msat_make_plus(menv, xs[15], n_12_0)
expr6 = msat_make_plus(menv, xs[17], n_6_0)
expr7 = msat_make_plus(menv, xs[18], n_2_0)
expr8 = msat_make_plus(menv, xs[19], n_3_0)
expr9 = msat_make_plus(menv, xs[20], n_14_0)
expr10 = msat_make_plus(menv, xs[23], n_20_0)
expr11 = msat_make_plus(menv, xs[24], n_15_0)
expr12 = msat_make_plus(menv, xs[25], n_3_0)
expr13 = msat_make_plus(menv, xs[30], n_1_0)
expr14 = msat_make_plus(menv, xs[31], n_15_0)
expr15 = msat_make_plus(menv, xs[32], n_19_0)
expr16 = msat_make_plus(menv, xs[33], n_13_0)
expr17 = msat_make_plus(menv, xs[34], n_11_0)
expr18 = msat_make_plus(menv, xs[37], n_3_0)
expr19 = msat_make_plus(menv, xs[39], n_4_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[11], expr0),
msat_make_geq(menv, x_xs[11], expr1),
msat_make_geq(menv, x_xs[11], expr2),
msat_make_geq(menv, x_xs[11], expr3),
msat_make_geq(menv, x_xs[11], expr4),
msat_make_geq(menv, x_xs[11], expr5),
msat_make_geq(menv, x_xs[11], expr6),
msat_make_geq(menv, x_xs[11], expr7),
msat_make_geq(menv, x_xs[11], expr8),
msat_make_geq(menv, x_xs[11], expr9),
msat_make_geq(menv, x_xs[11], expr10),
msat_make_geq(menv, x_xs[11], expr11),
msat_make_geq(menv, x_xs[11], expr12),
msat_make_geq(menv, x_xs[11], expr13),
msat_make_geq(menv, x_xs[11], expr14),
msat_make_geq(menv, x_xs[11], expr15),
msat_make_geq(menv, x_xs[11], expr16),
msat_make_geq(menv, x_xs[11], expr17),
msat_make_geq(menv, x_xs[11], expr18),
msat_make_geq(menv, x_xs[11], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[11], expr0),
msat_make_equal(menv, x_xs[11], expr1),
msat_make_equal(menv, x_xs[11], expr2),
msat_make_equal(menv, x_xs[11], expr3),
msat_make_equal(menv, x_xs[11], expr4),
msat_make_equal(menv, x_xs[11], expr5),
msat_make_equal(menv, x_xs[11], expr6),
msat_make_equal(menv, x_xs[11], expr7),
msat_make_equal(menv, x_xs[11], expr8),
msat_make_equal(menv, x_xs[11], expr9),
msat_make_equal(menv, x_xs[11], expr10),
msat_make_equal(menv, x_xs[11], expr11),
msat_make_equal(menv, x_xs[11], expr12),
msat_make_equal(menv, x_xs[11], expr13),
msat_make_equal(menv, x_xs[11], expr14),
msat_make_equal(menv, x_xs[11], expr15),
msat_make_equal(menv, x_xs[11], expr16),
msat_make_equal(menv, x_xs[11], expr17),
msat_make_equal(menv, x_xs[11], expr18),
msat_make_equal(menv, x_xs[11], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_2_0)
expr1 = msat_make_plus(menv, xs[1], n_12_0)
expr2 = msat_make_plus(menv, xs[2], n_20_0)
expr3 = msat_make_plus(menv, xs[3], n_5_0)
expr4 = msat_make_plus(menv, xs[6], n_6_0)
expr5 = msat_make_plus(menv, xs[11], n_17_0)
expr6 = msat_make_plus(menv, xs[14], n_1_0)
expr7 = msat_make_plus(menv, xs[16], n_7_0)
expr8 = msat_make_plus(menv, xs[17], n_17_0)
expr9 = msat_make_plus(menv, xs[19], n_14_0)
expr10 = msat_make_plus(menv, xs[21], n_4_0)
expr11 = msat_make_plus(menv, xs[23], n_2_0)
expr12 = msat_make_plus(menv, xs[24], n_13_0)
expr13 = msat_make_plus(menv, xs[25], n_8_0)
expr14 = msat_make_plus(menv, xs[27], n_5_0)
expr15 = msat_make_plus(menv, xs[29], n_15_0)
expr16 = msat_make_plus(menv, xs[31], n_6_0)
expr17 = msat_make_plus(menv, xs[33], n_2_0)
expr18 = msat_make_plus(menv, xs[34], n_11_0)
expr19 = msat_make_plus(menv, xs[38], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[12], expr0),
msat_make_geq(menv, x_xs[12], expr1),
msat_make_geq(menv, x_xs[12], expr2),
msat_make_geq(menv, x_xs[12], expr3),
msat_make_geq(menv, x_xs[12], expr4),
msat_make_geq(menv, x_xs[12], expr5),
msat_make_geq(menv, x_xs[12], expr6),
msat_make_geq(menv, x_xs[12], expr7),
msat_make_geq(menv, x_xs[12], expr8),
msat_make_geq(menv, x_xs[12], expr9),
msat_make_geq(menv, x_xs[12], expr10),
msat_make_geq(menv, x_xs[12], expr11),
msat_make_geq(menv, x_xs[12], expr12),
msat_make_geq(menv, x_xs[12], expr13),
msat_make_geq(menv, x_xs[12], expr14),
msat_make_geq(menv, x_xs[12], expr15),
msat_make_geq(menv, x_xs[12], expr16),
msat_make_geq(menv, x_xs[12], expr17),
msat_make_geq(menv, x_xs[12], expr18),
msat_make_geq(menv, x_xs[12], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[12], expr0),
msat_make_equal(menv, x_xs[12], expr1),
msat_make_equal(menv, x_xs[12], expr2),
msat_make_equal(menv, x_xs[12], expr3),
msat_make_equal(menv, x_xs[12], expr4),
msat_make_equal(menv, x_xs[12], expr5),
msat_make_equal(menv, x_xs[12], expr6),
msat_make_equal(menv, x_xs[12], expr7),
msat_make_equal(menv, x_xs[12], expr8),
msat_make_equal(menv, x_xs[12], expr9),
msat_make_equal(menv, x_xs[12], expr10),
msat_make_equal(menv, x_xs[12], expr11),
msat_make_equal(menv, x_xs[12], expr12),
msat_make_equal(menv, x_xs[12], expr13),
msat_make_equal(menv, x_xs[12], expr14),
msat_make_equal(menv, x_xs[12], expr15),
msat_make_equal(menv, x_xs[12], expr16),
msat_make_equal(menv, x_xs[12], expr17),
msat_make_equal(menv, x_xs[12], expr18),
msat_make_equal(menv, x_xs[12], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_1_0)
expr1 = msat_make_plus(menv, xs[3], n_2_0)
expr2 = msat_make_plus(menv, xs[4], n_1_0)
expr3 = msat_make_plus(menv, xs[6], n_5_0)
expr4 = msat_make_plus(menv, xs[7], n_12_0)
expr5 = msat_make_plus(menv, xs[8], n_20_0)
expr6 = msat_make_plus(menv, xs[9], n_19_0)
expr7 = msat_make_plus(menv, xs[10], n_3_0)
expr8 = msat_make_plus(menv, xs[11], n_20_0)
expr9 = msat_make_plus(menv, xs[14], n_8_0)
expr10 = msat_make_plus(menv, xs[16], n_15_0)
expr11 = msat_make_plus(menv, xs[22], n_16_0)
expr12 = msat_make_plus(menv, xs[24], n_15_0)
expr13 = msat_make_plus(menv, xs[26], n_2_0)
expr14 = msat_make_plus(menv, xs[27], n_13_0)
expr15 = msat_make_plus(menv, xs[30], n_6_0)
expr16 = msat_make_plus(menv, xs[31], n_6_0)
expr17 = msat_make_plus(menv, xs[33], n_16_0)
expr18 = msat_make_plus(menv, xs[35], n_11_0)
expr19 = msat_make_plus(menv, xs[37], n_1_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[13], expr0),
msat_make_geq(menv, x_xs[13], expr1),
msat_make_geq(menv, x_xs[13], expr2),
msat_make_geq(menv, x_xs[13], expr3),
msat_make_geq(menv, x_xs[13], expr4),
msat_make_geq(menv, x_xs[13], expr5),
msat_make_geq(menv, x_xs[13], expr6),
msat_make_geq(menv, x_xs[13], expr7),
msat_make_geq(menv, x_xs[13], expr8),
msat_make_geq(menv, x_xs[13], expr9),
msat_make_geq(menv, x_xs[13], expr10),
msat_make_geq(menv, x_xs[13], expr11),
msat_make_geq(menv, x_xs[13], expr12),
msat_make_geq(menv, x_xs[13], expr13),
msat_make_geq(menv, x_xs[13], expr14),
msat_make_geq(menv, x_xs[13], expr15),
msat_make_geq(menv, x_xs[13], expr16),
msat_make_geq(menv, x_xs[13], expr17),
msat_make_geq(menv, x_xs[13], expr18),
msat_make_geq(menv, x_xs[13], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[13], expr0),
msat_make_equal(menv, x_xs[13], expr1),
msat_make_equal(menv, x_xs[13], expr2),
msat_make_equal(menv, x_xs[13], expr3),
msat_make_equal(menv, x_xs[13], expr4),
msat_make_equal(menv, x_xs[13], expr5),
msat_make_equal(menv, x_xs[13], expr6),
msat_make_equal(menv, x_xs[13], expr7),
msat_make_equal(menv, x_xs[13], expr8),
msat_make_equal(menv, x_xs[13], expr9),
msat_make_equal(menv, x_xs[13], expr10),
msat_make_equal(menv, x_xs[13], expr11),
msat_make_equal(menv, x_xs[13], expr12),
msat_make_equal(menv, x_xs[13], expr13),
msat_make_equal(menv, x_xs[13], expr14),
msat_make_equal(menv, x_xs[13], expr15),
msat_make_equal(menv, x_xs[13], expr16),
msat_make_equal(menv, x_xs[13], expr17),
msat_make_equal(menv, x_xs[13], expr18),
msat_make_equal(menv, x_xs[13], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_8_0)
expr1 = msat_make_plus(menv, xs[1], n_5_0)
expr2 = msat_make_plus(menv, xs[2], n_6_0)
expr3 = msat_make_plus(menv, xs[4], n_5_0)
expr4 = msat_make_plus(menv, xs[7], n_20_0)
expr5 = msat_make_plus(menv, xs[8], n_20_0)
expr6 = msat_make_plus(menv, xs[9], n_15_0)
expr7 = msat_make_plus(menv, xs[14], n_4_0)
expr8 = msat_make_plus(menv, xs[17], n_10_0)
expr9 = msat_make_plus(menv, xs[18], n_19_0)
expr10 = msat_make_plus(menv, xs[19], n_10_0)
expr11 = msat_make_plus(menv, xs[20], n_5_0)
expr12 = msat_make_plus(menv, xs[21], n_11_0)
expr13 = msat_make_plus(menv, xs[22], n_12_0)
expr14 = msat_make_plus(menv, xs[23], n_7_0)
expr15 = msat_make_plus(menv, xs[24], n_12_0)
expr16 = msat_make_plus(menv, xs[25], n_4_0)
expr17 = msat_make_plus(menv, xs[27], n_2_0)
expr18 = msat_make_plus(menv, xs[30], n_6_0)
expr19 = msat_make_plus(menv, xs[37], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[14], expr0),
msat_make_geq(menv, x_xs[14], expr1),
msat_make_geq(menv, x_xs[14], expr2),
msat_make_geq(menv, x_xs[14], expr3),
msat_make_geq(menv, x_xs[14], expr4),
msat_make_geq(menv, x_xs[14], expr5),
msat_make_geq(menv, x_xs[14], expr6),
msat_make_geq(menv, x_xs[14], expr7),
msat_make_geq(menv, x_xs[14], expr8),
msat_make_geq(menv, x_xs[14], expr9),
msat_make_geq(menv, x_xs[14], expr10),
msat_make_geq(menv, x_xs[14], expr11),
msat_make_geq(menv, x_xs[14], expr12),
msat_make_geq(menv, x_xs[14], expr13),
msat_make_geq(menv, x_xs[14], expr14),
msat_make_geq(menv, x_xs[14], expr15),
msat_make_geq(menv, x_xs[14], expr16),
msat_make_geq(menv, x_xs[14], expr17),
msat_make_geq(menv, x_xs[14], expr18),
msat_make_geq(menv, x_xs[14], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[14], expr0),
msat_make_equal(menv, x_xs[14], expr1),
msat_make_equal(menv, x_xs[14], expr2),
msat_make_equal(menv, x_xs[14], expr3),
msat_make_equal(menv, x_xs[14], expr4),
msat_make_equal(menv, x_xs[14], expr5),
msat_make_equal(menv, x_xs[14], expr6),
msat_make_equal(menv, x_xs[14], expr7),
msat_make_equal(menv, x_xs[14], expr8),
msat_make_equal(menv, x_xs[14], expr9),
msat_make_equal(menv, x_xs[14], expr10),
msat_make_equal(menv, x_xs[14], expr11),
msat_make_equal(menv, x_xs[14], expr12),
msat_make_equal(menv, x_xs[14], expr13),
msat_make_equal(menv, x_xs[14], expr14),
msat_make_equal(menv, x_xs[14], expr15),
msat_make_equal(menv, x_xs[14], expr16),
msat_make_equal(menv, x_xs[14], expr17),
msat_make_equal(menv, x_xs[14], expr18),
msat_make_equal(menv, x_xs[14], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[4], n_16_0)
expr1 = msat_make_plus(menv, xs[5], n_20_0)
expr2 = msat_make_plus(menv, xs[7], n_19_0)
expr3 = msat_make_plus(menv, xs[11], n_10_0)
expr4 = msat_make_plus(menv, xs[14], n_17_0)
expr5 = msat_make_plus(menv, xs[15], n_12_0)
expr6 = msat_make_plus(menv, xs[19], n_8_0)
expr7 = msat_make_plus(menv, xs[20], n_7_0)
expr8 = msat_make_plus(menv, xs[21], n_16_0)
expr9 = msat_make_plus(menv, xs[24], n_3_0)
expr10 = msat_make_plus(menv, xs[25], n_3_0)
expr11 = msat_make_plus(menv, xs[26], n_11_0)
expr12 = msat_make_plus(menv, xs[27], n_15_0)
expr13 = msat_make_plus(menv, xs[28], n_1_0)
expr14 = msat_make_plus(menv, xs[31], n_12_0)
expr15 = msat_make_plus(menv, xs[33], n_2_0)
expr16 = msat_make_plus(menv, xs[34], n_5_0)
expr17 = msat_make_plus(menv, xs[36], n_7_0)
expr18 = msat_make_plus(menv, xs[37], n_19_0)
expr19 = msat_make_plus(menv, xs[39], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[15], expr0),
msat_make_geq(menv, x_xs[15], expr1),
msat_make_geq(menv, x_xs[15], expr2),
msat_make_geq(menv, x_xs[15], expr3),
msat_make_geq(menv, x_xs[15], expr4),
msat_make_geq(menv, x_xs[15], expr5),
msat_make_geq(menv, x_xs[15], expr6),
msat_make_geq(menv, x_xs[15], expr7),
msat_make_geq(menv, x_xs[15], expr8),
msat_make_geq(menv, x_xs[15], expr9),
msat_make_geq(menv, x_xs[15], expr10),
msat_make_geq(menv, x_xs[15], expr11),
msat_make_geq(menv, x_xs[15], expr12),
msat_make_geq(menv, x_xs[15], expr13),
msat_make_geq(menv, x_xs[15], expr14),
msat_make_geq(menv, x_xs[15], expr15),
msat_make_geq(menv, x_xs[15], expr16),
msat_make_geq(menv, x_xs[15], expr17),
msat_make_geq(menv, x_xs[15], expr18),
msat_make_geq(menv, x_xs[15], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[15], expr0),
msat_make_equal(menv, x_xs[15], expr1),
msat_make_equal(menv, x_xs[15], expr2),
msat_make_equal(menv, x_xs[15], expr3),
msat_make_equal(menv, x_xs[15], expr4),
msat_make_equal(menv, x_xs[15], expr5),
msat_make_equal(menv, x_xs[15], expr6),
msat_make_equal(menv, x_xs[15], expr7),
msat_make_equal(menv, x_xs[15], expr8),
msat_make_equal(menv, x_xs[15], expr9),
msat_make_equal(menv, x_xs[15], expr10),
msat_make_equal(menv, x_xs[15], expr11),
msat_make_equal(menv, x_xs[15], expr12),
msat_make_equal(menv, x_xs[15], expr13),
msat_make_equal(menv, x_xs[15], expr14),
msat_make_equal(menv, x_xs[15], expr15),
msat_make_equal(menv, x_xs[15], expr16),
msat_make_equal(menv, x_xs[15], expr17),
msat_make_equal(menv, x_xs[15], expr18),
msat_make_equal(menv, x_xs[15], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_16_0)
expr1 = msat_make_plus(menv, xs[2], n_5_0)
expr2 = msat_make_plus(menv, xs[4], n_9_0)
expr3 = msat_make_plus(menv, xs[9], n_9_0)
expr4 = msat_make_plus(menv, xs[14], n_3_0)
expr5 = msat_make_plus(menv, xs[15], n_10_0)
expr6 = msat_make_plus(menv, xs[16], n_17_0)
expr7 = msat_make_plus(menv, xs[20], n_7_0)
expr8 = msat_make_plus(menv, xs[21], n_9_0)
expr9 = msat_make_plus(menv, xs[22], n_4_0)
expr10 = msat_make_plus(menv, xs[23], n_1_0)
expr11 = msat_make_plus(menv, xs[24], n_1_0)
expr12 = msat_make_plus(menv, xs[25], n_15_0)
expr13 = msat_make_plus(menv, xs[28], n_2_0)
expr14 = msat_make_plus(menv, xs[30], n_9_0)
expr15 = msat_make_plus(menv, xs[31], n_7_0)
expr16 = msat_make_plus(menv, xs[32], n_5_0)
expr17 = msat_make_plus(menv, xs[34], n_13_0)
expr18 = msat_make_plus(menv, xs[35], n_12_0)
expr19 = msat_make_plus(menv, xs[36], n_18_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[16], expr0),
msat_make_geq(menv, x_xs[16], expr1),
msat_make_geq(menv, x_xs[16], expr2),
msat_make_geq(menv, x_xs[16], expr3),
msat_make_geq(menv, x_xs[16], expr4),
msat_make_geq(menv, x_xs[16], expr5),
msat_make_geq(menv, x_xs[16], expr6),
msat_make_geq(menv, x_xs[16], expr7),
msat_make_geq(menv, x_xs[16], expr8),
msat_make_geq(menv, x_xs[16], expr9),
msat_make_geq(menv, x_xs[16], expr10),
msat_make_geq(menv, x_xs[16], expr11),
msat_make_geq(menv, x_xs[16], expr12),
msat_make_geq(menv, x_xs[16], expr13),
msat_make_geq(menv, x_xs[16], expr14),
msat_make_geq(menv, x_xs[16], expr15),
msat_make_geq(menv, x_xs[16], expr16),
msat_make_geq(menv, x_xs[16], expr17),
msat_make_geq(menv, x_xs[16], expr18),
msat_make_geq(menv, x_xs[16], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[16], expr0),
msat_make_equal(menv, x_xs[16], expr1),
msat_make_equal(menv, x_xs[16], expr2),
msat_make_equal(menv, x_xs[16], expr3),
msat_make_equal(menv, x_xs[16], expr4),
msat_make_equal(menv, x_xs[16], expr5),
msat_make_equal(menv, x_xs[16], expr6),
msat_make_equal(menv, x_xs[16], expr7),
msat_make_equal(menv, x_xs[16], expr8),
msat_make_equal(menv, x_xs[16], expr9),
msat_make_equal(menv, x_xs[16], expr10),
msat_make_equal(menv, x_xs[16], expr11),
msat_make_equal(menv, x_xs[16], expr12),
msat_make_equal(menv, x_xs[16], expr13),
msat_make_equal(menv, x_xs[16], expr14),
msat_make_equal(menv, x_xs[16], expr15),
msat_make_equal(menv, x_xs[16], expr16),
msat_make_equal(menv, x_xs[16], expr17),
msat_make_equal(menv, x_xs[16], expr18),
msat_make_equal(menv, x_xs[16], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_4_0)
expr1 = msat_make_plus(menv, xs[3], n_10_0)
expr2 = msat_make_plus(menv, xs[5], n_11_0)
expr3 = msat_make_plus(menv, xs[6], n_8_0)
expr4 = msat_make_plus(menv, xs[9], n_18_0)
expr5 = msat_make_plus(menv, xs[11], n_2_0)
expr6 = msat_make_plus(menv, xs[13], n_1_0)
expr7 = msat_make_plus(menv, xs[14], n_14_0)
expr8 = msat_make_plus(menv, xs[15], n_3_0)
expr9 = msat_make_plus(menv, xs[17], n_19_0)
expr10 = msat_make_plus(menv, xs[18], n_3_0)
expr11 = msat_make_plus(menv, xs[19], n_8_0)
expr12 = msat_make_plus(menv, xs[24], n_7_0)
expr13 = msat_make_plus(menv, xs[26], n_10_0)
expr14 = msat_make_plus(menv, xs[27], n_16_0)
expr15 = msat_make_plus(menv, xs[34], n_18_0)
expr16 = msat_make_plus(menv, xs[35], n_6_0)
expr17 = msat_make_plus(menv, xs[36], n_13_0)
expr18 = msat_make_plus(menv, xs[37], n_2_0)
expr19 = msat_make_plus(menv, xs[39], n_15_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[17], expr0),
msat_make_geq(menv, x_xs[17], expr1),
msat_make_geq(menv, x_xs[17], expr2),
msat_make_geq(menv, x_xs[17], expr3),
msat_make_geq(menv, x_xs[17], expr4),
msat_make_geq(menv, x_xs[17], expr5),
msat_make_geq(menv, x_xs[17], expr6),
msat_make_geq(menv, x_xs[17], expr7),
msat_make_geq(menv, x_xs[17], expr8),
msat_make_geq(menv, x_xs[17], expr9),
msat_make_geq(menv, x_xs[17], expr10),
msat_make_geq(menv, x_xs[17], expr11),
msat_make_geq(menv, x_xs[17], expr12),
msat_make_geq(menv, x_xs[17], expr13),
msat_make_geq(menv, x_xs[17], expr14),
msat_make_geq(menv, x_xs[17], expr15),
msat_make_geq(menv, x_xs[17], expr16),
msat_make_geq(menv, x_xs[17], expr17),
msat_make_geq(menv, x_xs[17], expr18),
msat_make_geq(menv, x_xs[17], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[17], expr0),
msat_make_equal(menv, x_xs[17], expr1),
msat_make_equal(menv, x_xs[17], expr2),
msat_make_equal(menv, x_xs[17], expr3),
msat_make_equal(menv, x_xs[17], expr4),
msat_make_equal(menv, x_xs[17], expr5),
msat_make_equal(menv, x_xs[17], expr6),
msat_make_equal(menv, x_xs[17], expr7),
msat_make_equal(menv, x_xs[17], expr8),
msat_make_equal(menv, x_xs[17], expr9),
msat_make_equal(menv, x_xs[17], expr10),
msat_make_equal(menv, x_xs[17], expr11),
msat_make_equal(menv, x_xs[17], expr12),
msat_make_equal(menv, x_xs[17], expr13),
msat_make_equal(menv, x_xs[17], expr14),
msat_make_equal(menv, x_xs[17], expr15),
msat_make_equal(menv, x_xs[17], expr16),
msat_make_equal(menv, x_xs[17], expr17),
msat_make_equal(menv, x_xs[17], expr18),
msat_make_equal(menv, x_xs[17], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_3_0)
expr1 = msat_make_plus(menv, xs[2], n_17_0)
expr2 = msat_make_plus(menv, xs[3], n_18_0)
expr3 = msat_make_plus(menv, xs[4], n_4_0)
expr4 = msat_make_plus(menv, xs[8], n_2_0)
expr5 = msat_make_plus(menv, xs[10], n_3_0)
expr6 = msat_make_plus(menv, xs[11], n_6_0)
expr7 = msat_make_plus(menv, xs[14], n_11_0)
expr8 = msat_make_plus(menv, xs[16], n_4_0)
expr9 = msat_make_plus(menv, xs[17], n_10_0)
expr10 = msat_make_plus(menv, xs[19], n_16_0)
expr11 = msat_make_plus(menv, xs[25], n_19_0)
expr12 = msat_make_plus(menv, xs[26], n_6_0)
expr13 = msat_make_plus(menv, xs[28], n_11_0)
expr14 = msat_make_plus(menv, xs[29], n_12_0)
expr15 = msat_make_plus(menv, xs[30], n_4_0)
expr16 = msat_make_plus(menv, xs[32], n_19_0)
expr17 = msat_make_plus(menv, xs[35], n_3_0)
expr18 = msat_make_plus(menv, xs[37], n_16_0)
expr19 = msat_make_plus(menv, xs[38], n_14_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[18], expr0),
msat_make_geq(menv, x_xs[18], expr1),
msat_make_geq(menv, x_xs[18], expr2),
msat_make_geq(menv, x_xs[18], expr3),
msat_make_geq(menv, x_xs[18], expr4),
msat_make_geq(menv, x_xs[18], expr5),
msat_make_geq(menv, x_xs[18], expr6),
msat_make_geq(menv, x_xs[18], expr7),
msat_make_geq(menv, x_xs[18], expr8),
msat_make_geq(menv, x_xs[18], expr9),
msat_make_geq(menv, x_xs[18], expr10),
msat_make_geq(menv, x_xs[18], expr11),
msat_make_geq(menv, x_xs[18], expr12),
msat_make_geq(menv, x_xs[18], expr13),
msat_make_geq(menv, x_xs[18], expr14),
msat_make_geq(menv, x_xs[18], expr15),
msat_make_geq(menv, x_xs[18], expr16),
msat_make_geq(menv, x_xs[18], expr17),
msat_make_geq(menv, x_xs[18], expr18),
msat_make_geq(menv, x_xs[18], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[18], expr0),
msat_make_equal(menv, x_xs[18], expr1),
msat_make_equal(menv, x_xs[18], expr2),
msat_make_equal(menv, x_xs[18], expr3),
msat_make_equal(menv, x_xs[18], expr4),
msat_make_equal(menv, x_xs[18], expr5),
msat_make_equal(menv, x_xs[18], expr6),
msat_make_equal(menv, x_xs[18], expr7),
msat_make_equal(menv, x_xs[18], expr8),
msat_make_equal(menv, x_xs[18], expr9),
msat_make_equal(menv, x_xs[18], expr10),
msat_make_equal(menv, x_xs[18], expr11),
msat_make_equal(menv, x_xs[18], expr12),
msat_make_equal(menv, x_xs[18], expr13),
msat_make_equal(menv, x_xs[18], expr14),
msat_make_equal(menv, x_xs[18], expr15),
msat_make_equal(menv, x_xs[18], expr16),
msat_make_equal(menv, x_xs[18], expr17),
msat_make_equal(menv, x_xs[18], expr18),
msat_make_equal(menv, x_xs[18], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_13_0)
expr1 = msat_make_plus(menv, xs[2], n_19_0)
expr2 = msat_make_plus(menv, xs[9], n_8_0)
expr3 = msat_make_plus(menv, xs[10], n_15_0)
expr4 = msat_make_plus(menv, xs[11], n_18_0)
expr5 = msat_make_plus(menv, xs[15], n_10_0)
expr6 = msat_make_plus(menv, xs[16], n_11_0)
expr7 = msat_make_plus(menv, xs[19], n_18_0)
expr8 = msat_make_plus(menv, xs[22], n_7_0)
expr9 = msat_make_plus(menv, xs[25], n_14_0)
expr10 = msat_make_plus(menv, xs[26], n_7_0)
expr11 = msat_make_plus(menv, xs[28], n_14_0)
expr12 = msat_make_plus(menv, xs[30], n_17_0)
expr13 = msat_make_plus(menv, xs[31], n_12_0)
expr14 = msat_make_plus(menv, xs[32], n_2_0)
expr15 = msat_make_plus(menv, xs[34], n_1_0)
expr16 = msat_make_plus(menv, xs[36], n_18_0)
expr17 = msat_make_plus(menv, xs[37], n_2_0)
expr18 = msat_make_plus(menv, xs[38], n_15_0)
expr19 = msat_make_plus(menv, xs[39], n_3_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[19], expr0),
msat_make_geq(menv, x_xs[19], expr1),
msat_make_geq(menv, x_xs[19], expr2),
msat_make_geq(menv, x_xs[19], expr3),
msat_make_geq(menv, x_xs[19], expr4),
msat_make_geq(menv, x_xs[19], expr5),
msat_make_geq(menv, x_xs[19], expr6),
msat_make_geq(menv, x_xs[19], expr7),
msat_make_geq(menv, x_xs[19], expr8),
msat_make_geq(menv, x_xs[19], expr9),
msat_make_geq(menv, x_xs[19], expr10),
msat_make_geq(menv, x_xs[19], expr11),
msat_make_geq(menv, x_xs[19], expr12),
msat_make_geq(menv, x_xs[19], expr13),
msat_make_geq(menv, x_xs[19], expr14),
msat_make_geq(menv, x_xs[19], expr15),
msat_make_geq(menv, x_xs[19], expr16),
msat_make_geq(menv, x_xs[19], expr17),
msat_make_geq(menv, x_xs[19], expr18),
msat_make_geq(menv, x_xs[19], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[19], expr0),
msat_make_equal(menv, x_xs[19], expr1),
msat_make_equal(menv, x_xs[19], expr2),
msat_make_equal(menv, x_xs[19], expr3),
msat_make_equal(menv, x_xs[19], expr4),
msat_make_equal(menv, x_xs[19], expr5),
msat_make_equal(menv, x_xs[19], expr6),
msat_make_equal(menv, x_xs[19], expr7),
msat_make_equal(menv, x_xs[19], expr8),
msat_make_equal(menv, x_xs[19], expr9),
msat_make_equal(menv, x_xs[19], expr10),
msat_make_equal(menv, x_xs[19], expr11),
msat_make_equal(menv, x_xs[19], expr12),
msat_make_equal(menv, x_xs[19], expr13),
msat_make_equal(menv, x_xs[19], expr14),
msat_make_equal(menv, x_xs[19], expr15),
msat_make_equal(menv, x_xs[19], expr16),
msat_make_equal(menv, x_xs[19], expr17),
msat_make_equal(menv, x_xs[19], expr18),
msat_make_equal(menv, x_xs[19], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[4], n_15_0)
expr1 = msat_make_plus(menv, xs[5], n_7_0)
expr2 = msat_make_plus(menv, xs[8], n_1_0)
expr3 = msat_make_plus(menv, xs[11], n_13_0)
expr4 = msat_make_plus(menv, xs[13], n_7_0)
expr5 = msat_make_plus(menv, xs[15], n_15_0)
expr6 = msat_make_plus(menv, xs[16], n_4_0)
expr7 = msat_make_plus(menv, xs[17], n_2_0)
expr8 = msat_make_plus(menv, xs[18], n_4_0)
expr9 = msat_make_plus(menv, xs[20], n_12_0)
expr10 = msat_make_plus(menv, xs[21], n_11_0)
expr11 = msat_make_plus(menv, xs[23], n_18_0)
expr12 = msat_make_plus(menv, xs[24], n_15_0)
expr13 = msat_make_plus(menv, xs[25], n_11_0)
expr14 = msat_make_plus(menv, xs[27], n_8_0)
expr15 = msat_make_plus(menv, xs[28], n_1_0)
expr16 = msat_make_plus(menv, xs[30], n_10_0)
expr17 = msat_make_plus(menv, xs[32], n_14_0)
expr18 = msat_make_plus(menv, xs[34], n_20_0)
expr19 = msat_make_plus(menv, xs[37], n_9_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[20], expr0),
msat_make_geq(menv, x_xs[20], expr1),
msat_make_geq(menv, x_xs[20], expr2),
msat_make_geq(menv, x_xs[20], expr3),
msat_make_geq(menv, x_xs[20], expr4),
msat_make_geq(menv, x_xs[20], expr5),
msat_make_geq(menv, x_xs[20], expr6),
msat_make_geq(menv, x_xs[20], expr7),
msat_make_geq(menv, x_xs[20], expr8),
msat_make_geq(menv, x_xs[20], expr9),
msat_make_geq(menv, x_xs[20], expr10),
msat_make_geq(menv, x_xs[20], expr11),
msat_make_geq(menv, x_xs[20], expr12),
msat_make_geq(menv, x_xs[20], expr13),
msat_make_geq(menv, x_xs[20], expr14),
msat_make_geq(menv, x_xs[20], expr15),
msat_make_geq(menv, x_xs[20], expr16),
msat_make_geq(menv, x_xs[20], expr17),
msat_make_geq(menv, x_xs[20], expr18),
msat_make_geq(menv, x_xs[20], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[20], expr0),
msat_make_equal(menv, x_xs[20], expr1),
msat_make_equal(menv, x_xs[20], expr2),
msat_make_equal(menv, x_xs[20], expr3),
msat_make_equal(menv, x_xs[20], expr4),
msat_make_equal(menv, x_xs[20], expr5),
msat_make_equal(menv, x_xs[20], expr6),
msat_make_equal(menv, x_xs[20], expr7),
msat_make_equal(menv, x_xs[20], expr8),
msat_make_equal(menv, x_xs[20], expr9),
msat_make_equal(menv, x_xs[20], expr10),
msat_make_equal(menv, x_xs[20], expr11),
msat_make_equal(menv, x_xs[20], expr12),
msat_make_equal(menv, x_xs[20], expr13),
msat_make_equal(menv, x_xs[20], expr14),
msat_make_equal(menv, x_xs[20], expr15),
msat_make_equal(menv, x_xs[20], expr16),
msat_make_equal(menv, x_xs[20], expr17),
msat_make_equal(menv, x_xs[20], expr18),
msat_make_equal(menv, x_xs[20], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_20_0)
expr1 = msat_make_plus(menv, xs[1], n_9_0)
expr2 = msat_make_plus(menv, xs[2], n_1_0)
expr3 = msat_make_plus(menv, xs[5], n_13_0)
expr4 = msat_make_plus(menv, xs[7], n_9_0)
expr5 = msat_make_plus(menv, xs[8], n_2_0)
expr6 = msat_make_plus(menv, xs[9], n_9_0)
expr7 = msat_make_plus(menv, xs[13], n_15_0)
expr8 = msat_make_plus(menv, xs[17], n_13_0)
expr9 = msat_make_plus(menv, xs[18], n_6_0)
expr10 = msat_make_plus(menv, xs[19], n_7_0)
expr11 = msat_make_plus(menv, xs[21], n_13_0)
expr12 = msat_make_plus(menv, xs[22], n_6_0)
expr13 = msat_make_plus(menv, xs[23], n_16_0)
expr14 = msat_make_plus(menv, xs[26], n_9_0)
expr15 = msat_make_plus(menv, xs[27], n_14_0)
expr16 = msat_make_plus(menv, xs[29], n_15_0)
expr17 = msat_make_plus(menv, xs[31], n_7_0)
expr18 = msat_make_plus(menv, xs[35], n_9_0)
expr19 = msat_make_plus(menv, xs[37], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[21], expr0),
msat_make_geq(menv, x_xs[21], expr1),
msat_make_geq(menv, x_xs[21], expr2),
msat_make_geq(menv, x_xs[21], expr3),
msat_make_geq(menv, x_xs[21], expr4),
msat_make_geq(menv, x_xs[21], expr5),
msat_make_geq(menv, x_xs[21], expr6),
msat_make_geq(menv, x_xs[21], expr7),
msat_make_geq(menv, x_xs[21], expr8),
msat_make_geq(menv, x_xs[21], expr9),
msat_make_geq(menv, x_xs[21], expr10),
msat_make_geq(menv, x_xs[21], expr11),
msat_make_geq(menv, x_xs[21], expr12),
msat_make_geq(menv, x_xs[21], expr13),
msat_make_geq(menv, x_xs[21], expr14),
msat_make_geq(menv, x_xs[21], expr15),
msat_make_geq(menv, x_xs[21], expr16),
msat_make_geq(menv, x_xs[21], expr17),
msat_make_geq(menv, x_xs[21], expr18),
msat_make_geq(menv, x_xs[21], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[21], expr0),
msat_make_equal(menv, x_xs[21], expr1),
msat_make_equal(menv, x_xs[21], expr2),
msat_make_equal(menv, x_xs[21], expr3),
msat_make_equal(menv, x_xs[21], expr4),
msat_make_equal(menv, x_xs[21], expr5),
msat_make_equal(menv, x_xs[21], expr6),
msat_make_equal(menv, x_xs[21], expr7),
msat_make_equal(menv, x_xs[21], expr8),
msat_make_equal(menv, x_xs[21], expr9),
msat_make_equal(menv, x_xs[21], expr10),
msat_make_equal(menv, x_xs[21], expr11),
msat_make_equal(menv, x_xs[21], expr12),
msat_make_equal(menv, x_xs[21], expr13),
msat_make_equal(menv, x_xs[21], expr14),
msat_make_equal(menv, x_xs[21], expr15),
msat_make_equal(menv, x_xs[21], expr16),
msat_make_equal(menv, x_xs[21], expr17),
msat_make_equal(menv, x_xs[21], expr18),
msat_make_equal(menv, x_xs[21], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[3], n_2_0)
expr1 = msat_make_plus(menv, xs[5], n_12_0)
expr2 = msat_make_plus(menv, xs[7], n_6_0)
expr3 = msat_make_plus(menv, xs[8], n_6_0)
expr4 = msat_make_plus(menv, xs[10], n_6_0)
expr5 = msat_make_plus(menv, xs[14], n_15_0)
expr6 = msat_make_plus(menv, xs[15], n_16_0)
expr7 = msat_make_plus(menv, xs[17], n_1_0)
expr8 = msat_make_plus(menv, xs[20], n_2_0)
expr9 = msat_make_plus(menv, xs[21], n_1_0)
expr10 = msat_make_plus(menv, xs[22], n_11_0)
expr11 = msat_make_plus(menv, xs[23], n_14_0)
expr12 = msat_make_plus(menv, xs[24], n_4_0)
expr13 = msat_make_plus(menv, xs[29], n_20_0)
expr14 = msat_make_plus(menv, xs[31], n_13_0)
expr15 = msat_make_plus(menv, xs[35], n_20_0)
expr16 = msat_make_plus(menv, xs[36], n_18_0)
expr17 = msat_make_plus(menv, xs[37], n_3_0)
expr18 = msat_make_plus(menv, xs[38], n_4_0)
expr19 = msat_make_plus(menv, xs[39], n_2_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[22], expr0),
msat_make_geq(menv, x_xs[22], expr1),
msat_make_geq(menv, x_xs[22], expr2),
msat_make_geq(menv, x_xs[22], expr3),
msat_make_geq(menv, x_xs[22], expr4),
msat_make_geq(menv, x_xs[22], expr5),
msat_make_geq(menv, x_xs[22], expr6),
msat_make_geq(menv, x_xs[22], expr7),
msat_make_geq(menv, x_xs[22], expr8),
msat_make_geq(menv, x_xs[22], expr9),
msat_make_geq(menv, x_xs[22], expr10),
msat_make_geq(menv, x_xs[22], expr11),
msat_make_geq(menv, x_xs[22], expr12),
msat_make_geq(menv, x_xs[22], expr13),
msat_make_geq(menv, x_xs[22], expr14),
msat_make_geq(menv, x_xs[22], expr15),
msat_make_geq(menv, x_xs[22], expr16),
msat_make_geq(menv, x_xs[22], expr17),
msat_make_geq(menv, x_xs[22], expr18),
msat_make_geq(menv, x_xs[22], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[22], expr0),
msat_make_equal(menv, x_xs[22], expr1),
msat_make_equal(menv, x_xs[22], expr2),
msat_make_equal(menv, x_xs[22], expr3),
msat_make_equal(menv, x_xs[22], expr4),
msat_make_equal(menv, x_xs[22], expr5),
msat_make_equal(menv, x_xs[22], expr6),
msat_make_equal(menv, x_xs[22], expr7),
msat_make_equal(menv, x_xs[22], expr8),
msat_make_equal(menv, x_xs[22], expr9),
msat_make_equal(menv, x_xs[22], expr10),
msat_make_equal(menv, x_xs[22], expr11),
msat_make_equal(menv, x_xs[22], expr12),
msat_make_equal(menv, x_xs[22], expr13),
msat_make_equal(menv, x_xs[22], expr14),
msat_make_equal(menv, x_xs[22], expr15),
msat_make_equal(menv, x_xs[22], expr16),
msat_make_equal(menv, x_xs[22], expr17),
msat_make_equal(menv, x_xs[22], expr18),
msat_make_equal(menv, x_xs[22], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_16_0)
expr1 = msat_make_plus(menv, xs[2], n_15_0)
expr2 = msat_make_plus(menv, xs[3], n_5_0)
expr3 = msat_make_plus(menv, xs[4], n_16_0)
expr4 = msat_make_plus(menv, xs[8], n_19_0)
expr5 = msat_make_plus(menv, xs[11], n_19_0)
expr6 = msat_make_plus(menv, xs[13], n_1_0)
expr7 = msat_make_plus(menv, xs[14], n_18_0)
expr8 = msat_make_plus(menv, xs[15], n_9_0)
expr9 = msat_make_plus(menv, xs[16], n_2_0)
expr10 = msat_make_plus(menv, xs[20], n_19_0)
expr11 = msat_make_plus(menv, xs[26], n_4_0)
expr12 = msat_make_plus(menv, xs[27], n_13_0)
expr13 = msat_make_plus(menv, xs[31], n_14_0)
expr14 = msat_make_plus(menv, xs[32], n_17_0)
expr15 = msat_make_plus(menv, xs[33], n_7_0)
expr16 = msat_make_plus(menv, xs[34], n_15_0)
expr17 = msat_make_plus(menv, xs[35], n_18_0)
expr18 = msat_make_plus(menv, xs[36], n_16_0)
expr19 = msat_make_plus(menv, xs[38], n_20_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[23], expr0),
msat_make_geq(menv, x_xs[23], expr1),
msat_make_geq(menv, x_xs[23], expr2),
msat_make_geq(menv, x_xs[23], expr3),
msat_make_geq(menv, x_xs[23], expr4),
msat_make_geq(menv, x_xs[23], expr5),
msat_make_geq(menv, x_xs[23], expr6),
msat_make_geq(menv, x_xs[23], expr7),
msat_make_geq(menv, x_xs[23], expr8),
msat_make_geq(menv, x_xs[23], expr9),
msat_make_geq(menv, x_xs[23], expr10),
msat_make_geq(menv, x_xs[23], expr11),
msat_make_geq(menv, x_xs[23], expr12),
msat_make_geq(menv, x_xs[23], expr13),
msat_make_geq(menv, x_xs[23], expr14),
msat_make_geq(menv, x_xs[23], expr15),
msat_make_geq(menv, x_xs[23], expr16),
msat_make_geq(menv, x_xs[23], expr17),
msat_make_geq(menv, x_xs[23], expr18),
msat_make_geq(menv, x_xs[23], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[23], expr0),
msat_make_equal(menv, x_xs[23], expr1),
msat_make_equal(menv, x_xs[23], expr2),
msat_make_equal(menv, x_xs[23], expr3),
msat_make_equal(menv, x_xs[23], expr4),
msat_make_equal(menv, x_xs[23], expr5),
msat_make_equal(menv, x_xs[23], expr6),
msat_make_equal(menv, x_xs[23], expr7),
msat_make_equal(menv, x_xs[23], expr8),
msat_make_equal(menv, x_xs[23], expr9),
msat_make_equal(menv, x_xs[23], expr10),
msat_make_equal(menv, x_xs[23], expr11),
msat_make_equal(menv, x_xs[23], expr12),
msat_make_equal(menv, x_xs[23], expr13),
msat_make_equal(menv, x_xs[23], expr14),
msat_make_equal(menv, x_xs[23], expr15),
msat_make_equal(menv, x_xs[23], expr16),
msat_make_equal(menv, x_xs[23], expr17),
msat_make_equal(menv, x_xs[23], expr18),
msat_make_equal(menv, x_xs[23], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_5_0)
expr1 = msat_make_plus(menv, xs[2], n_7_0)
expr2 = msat_make_plus(menv, xs[3], n_12_0)
expr3 = msat_make_plus(menv, xs[5], n_19_0)
expr4 = msat_make_plus(menv, xs[6], n_17_0)
expr5 = msat_make_plus(menv, xs[8], n_5_0)
expr6 = msat_make_plus(menv, xs[9], n_7_0)
expr7 = msat_make_plus(menv, xs[10], n_1_0)
expr8 = msat_make_plus(menv, xs[12], n_6_0)
expr9 = msat_make_plus(menv, xs[15], n_9_0)
expr10 = msat_make_plus(menv, xs[17], n_8_0)
expr11 = msat_make_plus(menv, xs[20], n_17_0)
expr12 = msat_make_plus(menv, xs[22], n_8_0)
expr13 = msat_make_plus(menv, xs[23], n_6_0)
expr14 = msat_make_plus(menv, xs[28], n_9_0)
expr15 = msat_make_plus(menv, xs[29], n_19_0)
expr16 = msat_make_plus(menv, xs[35], n_12_0)
expr17 = msat_make_plus(menv, xs[37], n_5_0)
expr18 = msat_make_plus(menv, xs[38], n_20_0)
expr19 = msat_make_plus(menv, xs[39], n_7_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[24], expr0),
msat_make_geq(menv, x_xs[24], expr1),
msat_make_geq(menv, x_xs[24], expr2),
msat_make_geq(menv, x_xs[24], expr3),
msat_make_geq(menv, x_xs[24], expr4),
msat_make_geq(menv, x_xs[24], expr5),
msat_make_geq(menv, x_xs[24], expr6),
msat_make_geq(menv, x_xs[24], expr7),
msat_make_geq(menv, x_xs[24], expr8),
msat_make_geq(menv, x_xs[24], expr9),
msat_make_geq(menv, x_xs[24], expr10),
msat_make_geq(menv, x_xs[24], expr11),
msat_make_geq(menv, x_xs[24], expr12),
msat_make_geq(menv, x_xs[24], expr13),
msat_make_geq(menv, x_xs[24], expr14),
msat_make_geq(menv, x_xs[24], expr15),
msat_make_geq(menv, x_xs[24], expr16),
msat_make_geq(menv, x_xs[24], expr17),
msat_make_geq(menv, x_xs[24], expr18),
msat_make_geq(menv, x_xs[24], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[24], expr0),
msat_make_equal(menv, x_xs[24], expr1),
msat_make_equal(menv, x_xs[24], expr2),
msat_make_equal(menv, x_xs[24], expr3),
msat_make_equal(menv, x_xs[24], expr4),
msat_make_equal(menv, x_xs[24], expr5),
msat_make_equal(menv, x_xs[24], expr6),
msat_make_equal(menv, x_xs[24], expr7),
msat_make_equal(menv, x_xs[24], expr8),
msat_make_equal(menv, x_xs[24], expr9),
msat_make_equal(menv, x_xs[24], expr10),
msat_make_equal(menv, x_xs[24], expr11),
msat_make_equal(menv, x_xs[24], expr12),
msat_make_equal(menv, x_xs[24], expr13),
msat_make_equal(menv, x_xs[24], expr14),
msat_make_equal(menv, x_xs[24], expr15),
msat_make_equal(menv, x_xs[24], expr16),
msat_make_equal(menv, x_xs[24], expr17),
msat_make_equal(menv, x_xs[24], expr18),
msat_make_equal(menv, x_xs[24], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_8_0)
expr1 = msat_make_plus(menv, xs[2], n_8_0)
expr2 = msat_make_plus(menv, xs[4], n_19_0)
expr3 = msat_make_plus(menv, xs[5], n_14_0)
expr4 = msat_make_plus(menv, xs[7], n_2_0)
expr5 = msat_make_plus(menv, xs[9], n_5_0)
expr6 = msat_make_plus(menv, xs[11], n_20_0)
expr7 = msat_make_plus(menv, xs[12], n_13_0)
expr8 = msat_make_plus(menv, xs[14], n_16_0)
expr9 = msat_make_plus(menv, xs[19], n_14_0)
expr10 = msat_make_plus(menv, xs[21], n_1_0)
expr11 = msat_make_plus(menv, xs[23], n_17_0)
expr12 = msat_make_plus(menv, xs[24], n_10_0)
expr13 = msat_make_plus(menv, xs[25], n_15_0)
expr14 = msat_make_plus(menv, xs[26], n_2_0)
expr15 = msat_make_plus(menv, xs[29], n_9_0)
expr16 = msat_make_plus(menv, xs[30], n_4_0)
expr17 = msat_make_plus(menv, xs[34], n_2_0)
expr18 = msat_make_plus(menv, xs[36], n_5_0)
expr19 = msat_make_plus(menv, xs[39], n_4_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[25], expr0),
msat_make_geq(menv, x_xs[25], expr1),
msat_make_geq(menv, x_xs[25], expr2),
msat_make_geq(menv, x_xs[25], expr3),
msat_make_geq(menv, x_xs[25], expr4),
msat_make_geq(menv, x_xs[25], expr5),
msat_make_geq(menv, x_xs[25], expr6),
msat_make_geq(menv, x_xs[25], expr7),
msat_make_geq(menv, x_xs[25], expr8),
msat_make_geq(menv, x_xs[25], expr9),
msat_make_geq(menv, x_xs[25], expr10),
msat_make_geq(menv, x_xs[25], expr11),
msat_make_geq(menv, x_xs[25], expr12),
msat_make_geq(menv, x_xs[25], expr13),
msat_make_geq(menv, x_xs[25], expr14),
msat_make_geq(menv, x_xs[25], expr15),
msat_make_geq(menv, x_xs[25], expr16),
msat_make_geq(menv, x_xs[25], expr17),
msat_make_geq(menv, x_xs[25], expr18),
msat_make_geq(menv, x_xs[25], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[25], expr0),
msat_make_equal(menv, x_xs[25], expr1),
msat_make_equal(menv, x_xs[25], expr2),
msat_make_equal(menv, x_xs[25], expr3),
msat_make_equal(menv, x_xs[25], expr4),
msat_make_equal(menv, x_xs[25], expr5),
msat_make_equal(menv, x_xs[25], expr6),
msat_make_equal(menv, x_xs[25], expr7),
msat_make_equal(menv, x_xs[25], expr8),
msat_make_equal(menv, x_xs[25], expr9),
msat_make_equal(menv, x_xs[25], expr10),
msat_make_equal(menv, x_xs[25], expr11),
msat_make_equal(menv, x_xs[25], expr12),
msat_make_equal(menv, x_xs[25], expr13),
msat_make_equal(menv, x_xs[25], expr14),
msat_make_equal(menv, x_xs[25], expr15),
msat_make_equal(menv, x_xs[25], expr16),
msat_make_equal(menv, x_xs[25], expr17),
msat_make_equal(menv, x_xs[25], expr18),
msat_make_equal(menv, x_xs[25], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_17_0)
expr1 = msat_make_plus(menv, xs[1], n_16_0)
expr2 = msat_make_plus(menv, xs[7], n_20_0)
expr3 = msat_make_plus(menv, xs[8], n_13_0)
expr4 = msat_make_plus(menv, xs[9], n_17_0)
expr5 = msat_make_plus(menv, xs[11], n_16_0)
expr6 = msat_make_plus(menv, xs[16], n_14_0)
expr7 = msat_make_plus(menv, xs[17], n_14_0)
expr8 = msat_make_plus(menv, xs[20], n_20_0)
expr9 = msat_make_plus(menv, xs[21], n_10_0)
expr10 = msat_make_plus(menv, xs[22], n_13_0)
expr11 = msat_make_plus(menv, xs[23], n_2_0)
expr12 = msat_make_plus(menv, xs[24], n_11_0)
expr13 = msat_make_plus(menv, xs[26], n_15_0)
expr14 = msat_make_plus(menv, xs[27], n_9_0)
expr15 = msat_make_plus(menv, xs[29], n_15_0)
expr16 = msat_make_plus(menv, xs[33], n_8_0)
expr17 = msat_make_plus(menv, xs[34], n_3_0)
expr18 = msat_make_plus(menv, xs[36], n_7_0)
expr19 = msat_make_plus(menv, xs[38], n_9_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[26], expr0),
msat_make_geq(menv, x_xs[26], expr1),
msat_make_geq(menv, x_xs[26], expr2),
msat_make_geq(menv, x_xs[26], expr3),
msat_make_geq(menv, x_xs[26], expr4),
msat_make_geq(menv, x_xs[26], expr5),
msat_make_geq(menv, x_xs[26], expr6),
msat_make_geq(menv, x_xs[26], expr7),
msat_make_geq(menv, x_xs[26], expr8),
msat_make_geq(menv, x_xs[26], expr9),
msat_make_geq(menv, x_xs[26], expr10),
msat_make_geq(menv, x_xs[26], expr11),
msat_make_geq(menv, x_xs[26], expr12),
msat_make_geq(menv, x_xs[26], expr13),
msat_make_geq(menv, x_xs[26], expr14),
msat_make_geq(menv, x_xs[26], expr15),
msat_make_geq(menv, x_xs[26], expr16),
msat_make_geq(menv, x_xs[26], expr17),
msat_make_geq(menv, x_xs[26], expr18),
msat_make_geq(menv, x_xs[26], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[26], expr0),
msat_make_equal(menv, x_xs[26], expr1),
msat_make_equal(menv, x_xs[26], expr2),
msat_make_equal(menv, x_xs[26], expr3),
msat_make_equal(menv, x_xs[26], expr4),
msat_make_equal(menv, x_xs[26], expr5),
msat_make_equal(menv, x_xs[26], expr6),
msat_make_equal(menv, x_xs[26], expr7),
msat_make_equal(menv, x_xs[26], expr8),
msat_make_equal(menv, x_xs[26], expr9),
msat_make_equal(menv, x_xs[26], expr10),
msat_make_equal(menv, x_xs[26], expr11),
msat_make_equal(menv, x_xs[26], expr12),
msat_make_equal(menv, x_xs[26], expr13),
msat_make_equal(menv, x_xs[26], expr14),
msat_make_equal(menv, x_xs[26], expr15),
msat_make_equal(menv, x_xs[26], expr16),
msat_make_equal(menv, x_xs[26], expr17),
msat_make_equal(menv, x_xs[26], expr18),
msat_make_equal(menv, x_xs[26], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_3_0)
expr1 = msat_make_plus(menv, xs[2], n_6_0)
expr2 = msat_make_plus(menv, xs[4], n_15_0)
expr3 = msat_make_plus(menv, xs[5], n_19_0)
expr4 = msat_make_plus(menv, xs[6], n_2_0)
expr5 = msat_make_plus(menv, xs[7], n_2_0)
expr6 = msat_make_plus(menv, xs[14], n_13_0)
expr7 = msat_make_plus(menv, xs[15], n_14_0)
expr8 = msat_make_plus(menv, xs[18], n_5_0)
expr9 = msat_make_plus(menv, xs[19], n_10_0)
expr10 = msat_make_plus(menv, xs[21], n_18_0)
expr11 = msat_make_plus(menv, xs[23], n_16_0)
expr12 = msat_make_plus(menv, xs[24], n_6_0)
expr13 = msat_make_plus(menv, xs[26], n_10_0)
expr14 = msat_make_plus(menv, xs[27], n_13_0)
expr15 = msat_make_plus(menv, xs[29], n_7_0)
expr16 = msat_make_plus(menv, xs[30], n_16_0)
expr17 = msat_make_plus(menv, xs[32], n_16_0)
expr18 = msat_make_plus(menv, xs[38], n_18_0)
expr19 = msat_make_plus(menv, xs[39], n_19_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[27], expr0),
msat_make_geq(menv, x_xs[27], expr1),
msat_make_geq(menv, x_xs[27], expr2),
msat_make_geq(menv, x_xs[27], expr3),
msat_make_geq(menv, x_xs[27], expr4),
msat_make_geq(menv, x_xs[27], expr5),
msat_make_geq(menv, x_xs[27], expr6),
msat_make_geq(menv, x_xs[27], expr7),
msat_make_geq(menv, x_xs[27], expr8),
msat_make_geq(menv, x_xs[27], expr9),
msat_make_geq(menv, x_xs[27], expr10),
msat_make_geq(menv, x_xs[27], expr11),
msat_make_geq(menv, x_xs[27], expr12),
msat_make_geq(menv, x_xs[27], expr13),
msat_make_geq(menv, x_xs[27], expr14),
msat_make_geq(menv, x_xs[27], expr15),
msat_make_geq(menv, x_xs[27], expr16),
msat_make_geq(menv, x_xs[27], expr17),
msat_make_geq(menv, x_xs[27], expr18),
msat_make_geq(menv, x_xs[27], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[27], expr0),
msat_make_equal(menv, x_xs[27], expr1),
msat_make_equal(menv, x_xs[27], expr2),
msat_make_equal(menv, x_xs[27], expr3),
msat_make_equal(menv, x_xs[27], expr4),
msat_make_equal(menv, x_xs[27], expr5),
msat_make_equal(menv, x_xs[27], expr6),
msat_make_equal(menv, x_xs[27], expr7),
msat_make_equal(menv, x_xs[27], expr8),
msat_make_equal(menv, x_xs[27], expr9),
msat_make_equal(menv, x_xs[27], expr10),
msat_make_equal(menv, x_xs[27], expr11),
msat_make_equal(menv, x_xs[27], expr12),
msat_make_equal(menv, x_xs[27], expr13),
msat_make_equal(menv, x_xs[27], expr14),
msat_make_equal(menv, x_xs[27], expr15),
msat_make_equal(menv, x_xs[27], expr16),
msat_make_equal(menv, x_xs[27], expr17),
msat_make_equal(menv, x_xs[27], expr18),
msat_make_equal(menv, x_xs[27], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_7_0)
expr1 = msat_make_plus(menv, xs[1], n_15_0)
expr2 = msat_make_plus(menv, xs[3], n_9_0)
expr3 = msat_make_plus(menv, xs[4], n_2_0)
expr4 = msat_make_plus(menv, xs[5], n_10_0)
expr5 = msat_make_plus(menv, xs[7], n_2_0)
expr6 = msat_make_plus(menv, xs[8], n_11_0)
expr7 = msat_make_plus(menv, xs[10], n_1_0)
expr8 = msat_make_plus(menv, xs[11], n_17_0)
expr9 = msat_make_plus(menv, xs[12], n_12_0)
expr10 = msat_make_plus(menv, xs[14], n_13_0)
expr11 = msat_make_plus(menv, xs[16], n_7_0)
expr12 = msat_make_plus(menv, xs[20], n_12_0)
expr13 = msat_make_plus(menv, xs[21], n_15_0)
expr14 = msat_make_plus(menv, xs[22], n_1_0)
expr15 = msat_make_plus(menv, xs[25], n_14_0)
expr16 = msat_make_plus(menv, xs[28], n_2_0)
expr17 = msat_make_plus(menv, xs[30], n_12_0)
expr18 = msat_make_plus(menv, xs[37], n_4_0)
expr19 = msat_make_plus(menv, xs[38], n_3_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[28], expr0),
msat_make_geq(menv, x_xs[28], expr1),
msat_make_geq(menv, x_xs[28], expr2),
msat_make_geq(menv, x_xs[28], expr3),
msat_make_geq(menv, x_xs[28], expr4),
msat_make_geq(menv, x_xs[28], expr5),
msat_make_geq(menv, x_xs[28], expr6),
msat_make_geq(menv, x_xs[28], expr7),
msat_make_geq(menv, x_xs[28], expr8),
msat_make_geq(menv, x_xs[28], expr9),
msat_make_geq(menv, x_xs[28], expr10),
msat_make_geq(menv, x_xs[28], expr11),
msat_make_geq(menv, x_xs[28], expr12),
msat_make_geq(menv, x_xs[28], expr13),
msat_make_geq(menv, x_xs[28], expr14),
msat_make_geq(menv, x_xs[28], expr15),
msat_make_geq(menv, x_xs[28], expr16),
msat_make_geq(menv, x_xs[28], expr17),
msat_make_geq(menv, x_xs[28], expr18),
msat_make_geq(menv, x_xs[28], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[28], expr0),
msat_make_equal(menv, x_xs[28], expr1),
msat_make_equal(menv, x_xs[28], expr2),
msat_make_equal(menv, x_xs[28], expr3),
msat_make_equal(menv, x_xs[28], expr4),
msat_make_equal(menv, x_xs[28], expr5),
msat_make_equal(menv, x_xs[28], expr6),
msat_make_equal(menv, x_xs[28], expr7),
msat_make_equal(menv, x_xs[28], expr8),
msat_make_equal(menv, x_xs[28], expr9),
msat_make_equal(menv, x_xs[28], expr10),
msat_make_equal(menv, x_xs[28], expr11),
msat_make_equal(menv, x_xs[28], expr12),
msat_make_equal(menv, x_xs[28], expr13),
msat_make_equal(menv, x_xs[28], expr14),
msat_make_equal(menv, x_xs[28], expr15),
msat_make_equal(menv, x_xs[28], expr16),
msat_make_equal(menv, x_xs[28], expr17),
msat_make_equal(menv, x_xs[28], expr18),
msat_make_equal(menv, x_xs[28], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[3], n_4_0)
expr1 = msat_make_plus(menv, xs[7], n_19_0)
expr2 = msat_make_plus(menv, xs[9], n_12_0)
expr3 = msat_make_plus(menv, xs[10], n_2_0)
expr4 = msat_make_plus(menv, xs[11], n_3_0)
expr5 = msat_make_plus(menv, xs[13], n_12_0)
expr6 = msat_make_plus(menv, xs[14], n_17_0)
expr7 = msat_make_plus(menv, xs[17], n_7_0)
expr8 = msat_make_plus(menv, xs[19], n_14_0)
expr9 = msat_make_plus(menv, xs[20], n_11_0)
expr10 = msat_make_plus(menv, xs[22], n_9_0)
expr11 = msat_make_plus(menv, xs[23], n_4_0)
expr12 = msat_make_plus(menv, xs[25], n_7_0)
expr13 = msat_make_plus(menv, xs[27], n_11_0)
expr14 = msat_make_plus(menv, xs[29], n_6_0)
expr15 = msat_make_plus(menv, xs[31], n_4_0)
expr16 = msat_make_plus(menv, xs[33], n_13_0)
expr17 = msat_make_plus(menv, xs[34], n_5_0)
expr18 = msat_make_plus(menv, xs[37], n_15_0)
expr19 = msat_make_plus(menv, xs[39], n_11_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[29], expr0),
msat_make_geq(menv, x_xs[29], expr1),
msat_make_geq(menv, x_xs[29], expr2),
msat_make_geq(menv, x_xs[29], expr3),
msat_make_geq(menv, x_xs[29], expr4),
msat_make_geq(menv, x_xs[29], expr5),
msat_make_geq(menv, x_xs[29], expr6),
msat_make_geq(menv, x_xs[29], expr7),
msat_make_geq(menv, x_xs[29], expr8),
msat_make_geq(menv, x_xs[29], expr9),
msat_make_geq(menv, x_xs[29], expr10),
msat_make_geq(menv, x_xs[29], expr11),
msat_make_geq(menv, x_xs[29], expr12),
msat_make_geq(menv, x_xs[29], expr13),
msat_make_geq(menv, x_xs[29], expr14),
msat_make_geq(menv, x_xs[29], expr15),
msat_make_geq(menv, x_xs[29], expr16),
msat_make_geq(menv, x_xs[29], expr17),
msat_make_geq(menv, x_xs[29], expr18),
msat_make_geq(menv, x_xs[29], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[29], expr0),
msat_make_equal(menv, x_xs[29], expr1),
msat_make_equal(menv, x_xs[29], expr2),
msat_make_equal(menv, x_xs[29], expr3),
msat_make_equal(menv, x_xs[29], expr4),
msat_make_equal(menv, x_xs[29], expr5),
msat_make_equal(menv, x_xs[29], expr6),
msat_make_equal(menv, x_xs[29], expr7),
msat_make_equal(menv, x_xs[29], expr8),
msat_make_equal(menv, x_xs[29], expr9),
msat_make_equal(menv, x_xs[29], expr10),
msat_make_equal(menv, x_xs[29], expr11),
msat_make_equal(menv, x_xs[29], expr12),
msat_make_equal(menv, x_xs[29], expr13),
msat_make_equal(menv, x_xs[29], expr14),
msat_make_equal(menv, x_xs[29], expr15),
msat_make_equal(menv, x_xs[29], expr16),
msat_make_equal(menv, x_xs[29], expr17),
msat_make_equal(menv, x_xs[29], expr18),
msat_make_equal(menv, x_xs[29], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_8_0)
expr1 = msat_make_plus(menv, xs[2], n_13_0)
expr2 = msat_make_plus(menv, xs[4], n_3_0)
expr3 = msat_make_plus(menv, xs[13], n_6_0)
expr4 = msat_make_plus(menv, xs[14], n_16_0)
expr5 = msat_make_plus(menv, xs[15], n_5_0)
expr6 = msat_make_plus(menv, xs[18], n_12_0)
expr7 = msat_make_plus(menv, xs[20], n_15_0)
expr8 = msat_make_plus(menv, xs[21], n_3_0)
expr9 = msat_make_plus(menv, xs[23], n_17_0)
expr10 = msat_make_plus(menv, xs[25], n_15_0)
expr11 = msat_make_plus(menv, xs[26], n_6_0)
expr12 = msat_make_plus(menv, xs[27], n_8_0)
expr13 = msat_make_plus(menv, xs[29], n_8_0)
expr14 = msat_make_plus(menv, xs[31], n_2_0)
expr15 = msat_make_plus(menv, xs[32], n_18_0)
expr16 = msat_make_plus(menv, xs[33], n_16_0)
expr17 = msat_make_plus(menv, xs[35], n_20_0)
expr18 = msat_make_plus(menv, xs[36], n_7_0)
expr19 = msat_make_plus(menv, xs[38], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[30], expr0),
msat_make_geq(menv, x_xs[30], expr1),
msat_make_geq(menv, x_xs[30], expr2),
msat_make_geq(menv, x_xs[30], expr3),
msat_make_geq(menv, x_xs[30], expr4),
msat_make_geq(menv, x_xs[30], expr5),
msat_make_geq(menv, x_xs[30], expr6),
msat_make_geq(menv, x_xs[30], expr7),
msat_make_geq(menv, x_xs[30], expr8),
msat_make_geq(menv, x_xs[30], expr9),
msat_make_geq(menv, x_xs[30], expr10),
msat_make_geq(menv, x_xs[30], expr11),
msat_make_geq(menv, x_xs[30], expr12),
msat_make_geq(menv, x_xs[30], expr13),
msat_make_geq(menv, x_xs[30], expr14),
msat_make_geq(menv, x_xs[30], expr15),
msat_make_geq(menv, x_xs[30], expr16),
msat_make_geq(menv, x_xs[30], expr17),
msat_make_geq(menv, x_xs[30], expr18),
msat_make_geq(menv, x_xs[30], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[30], expr0),
msat_make_equal(menv, x_xs[30], expr1),
msat_make_equal(menv, x_xs[30], expr2),
msat_make_equal(menv, x_xs[30], expr3),
msat_make_equal(menv, x_xs[30], expr4),
msat_make_equal(menv, x_xs[30], expr5),
msat_make_equal(menv, x_xs[30], expr6),
msat_make_equal(menv, x_xs[30], expr7),
msat_make_equal(menv, x_xs[30], expr8),
msat_make_equal(menv, x_xs[30], expr9),
msat_make_equal(menv, x_xs[30], expr10),
msat_make_equal(menv, x_xs[30], expr11),
msat_make_equal(menv, x_xs[30], expr12),
msat_make_equal(menv, x_xs[30], expr13),
msat_make_equal(menv, x_xs[30], expr14),
msat_make_equal(menv, x_xs[30], expr15),
msat_make_equal(menv, x_xs[30], expr16),
msat_make_equal(menv, x_xs[30], expr17),
msat_make_equal(menv, x_xs[30], expr18),
msat_make_equal(menv, x_xs[30], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_12_0)
expr1 = msat_make_plus(menv, xs[2], n_9_0)
expr2 = msat_make_plus(menv, xs[3], n_3_0)
expr3 = msat_make_plus(menv, xs[5], n_19_0)
expr4 = msat_make_plus(menv, xs[6], n_3_0)
expr5 = msat_make_plus(menv, xs[9], n_4_0)
expr6 = msat_make_plus(menv, xs[11], n_13_0)
expr7 = msat_make_plus(menv, xs[13], n_15_0)
expr8 = msat_make_plus(menv, xs[16], n_11_0)
expr9 = msat_make_plus(menv, xs[18], n_12_0)
expr10 = msat_make_plus(menv, xs[20], n_7_0)
expr11 = msat_make_plus(menv, xs[23], n_16_0)
expr12 = msat_make_plus(menv, xs[25], n_13_0)
expr13 = msat_make_plus(menv, xs[27], n_7_0)
expr14 = msat_make_plus(menv, xs[30], n_5_0)
expr15 = msat_make_plus(menv, xs[32], n_18_0)
expr16 = msat_make_plus(menv, xs[34], n_19_0)
expr17 = msat_make_plus(menv, xs[37], n_14_0)
expr18 = msat_make_plus(menv, xs[38], n_5_0)
expr19 = msat_make_plus(menv, xs[39], n_16_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[31], expr0),
msat_make_geq(menv, x_xs[31], expr1),
msat_make_geq(menv, x_xs[31], expr2),
msat_make_geq(menv, x_xs[31], expr3),
msat_make_geq(menv, x_xs[31], expr4),
msat_make_geq(menv, x_xs[31], expr5),
msat_make_geq(menv, x_xs[31], expr6),
msat_make_geq(menv, x_xs[31], expr7),
msat_make_geq(menv, x_xs[31], expr8),
msat_make_geq(menv, x_xs[31], expr9),
msat_make_geq(menv, x_xs[31], expr10),
msat_make_geq(menv, x_xs[31], expr11),
msat_make_geq(menv, x_xs[31], expr12),
msat_make_geq(menv, x_xs[31], expr13),
msat_make_geq(menv, x_xs[31], expr14),
msat_make_geq(menv, x_xs[31], expr15),
msat_make_geq(menv, x_xs[31], expr16),
msat_make_geq(menv, x_xs[31], expr17),
msat_make_geq(menv, x_xs[31], expr18),
msat_make_geq(menv, x_xs[31], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[31], expr0),
msat_make_equal(menv, x_xs[31], expr1),
msat_make_equal(menv, x_xs[31], expr2),
msat_make_equal(menv, x_xs[31], expr3),
msat_make_equal(menv, x_xs[31], expr4),
msat_make_equal(menv, x_xs[31], expr5),
msat_make_equal(menv, x_xs[31], expr6),
msat_make_equal(menv, x_xs[31], expr7),
msat_make_equal(menv, x_xs[31], expr8),
msat_make_equal(menv, x_xs[31], expr9),
msat_make_equal(menv, x_xs[31], expr10),
msat_make_equal(menv, x_xs[31], expr11),
msat_make_equal(menv, x_xs[31], expr12),
msat_make_equal(menv, x_xs[31], expr13),
msat_make_equal(menv, x_xs[31], expr14),
msat_make_equal(menv, x_xs[31], expr15),
msat_make_equal(menv, x_xs[31], expr16),
msat_make_equal(menv, x_xs[31], expr17),
msat_make_equal(menv, x_xs[31], expr18),
msat_make_equal(menv, x_xs[31], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_8_0)
expr1 = msat_make_plus(menv, xs[1], n_5_0)
expr2 = msat_make_plus(menv, xs[2], n_3_0)
expr3 = msat_make_plus(menv, xs[5], n_12_0)
expr4 = msat_make_plus(menv, xs[6], n_4_0)
expr5 = msat_make_plus(menv, xs[8], n_1_0)
expr6 = msat_make_plus(menv, xs[11], n_19_0)
expr7 = msat_make_plus(menv, xs[13], n_1_0)
expr8 = msat_make_plus(menv, xs[15], n_13_0)
expr9 = msat_make_plus(menv, xs[18], n_20_0)
expr10 = msat_make_plus(menv, xs[19], n_8_0)
expr11 = msat_make_plus(menv, xs[20], n_15_0)
expr12 = msat_make_plus(menv, xs[22], n_16_0)
expr13 = msat_make_plus(menv, xs[23], n_13_0)
expr14 = msat_make_plus(menv, xs[24], n_7_0)
expr15 = msat_make_plus(menv, xs[26], n_1_0)
expr16 = msat_make_plus(menv, xs[29], n_17_0)
expr17 = msat_make_plus(menv, xs[32], n_2_0)
expr18 = msat_make_plus(menv, xs[33], n_15_0)
expr19 = msat_make_plus(menv, xs[36], n_20_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[32], expr0),
msat_make_geq(menv, x_xs[32], expr1),
msat_make_geq(menv, x_xs[32], expr2),
msat_make_geq(menv, x_xs[32], expr3),
msat_make_geq(menv, x_xs[32], expr4),
msat_make_geq(menv, x_xs[32], expr5),
msat_make_geq(menv, x_xs[32], expr6),
msat_make_geq(menv, x_xs[32], expr7),
msat_make_geq(menv, x_xs[32], expr8),
msat_make_geq(menv, x_xs[32], expr9),
msat_make_geq(menv, x_xs[32], expr10),
msat_make_geq(menv, x_xs[32], expr11),
msat_make_geq(menv, x_xs[32], expr12),
msat_make_geq(menv, x_xs[32], expr13),
msat_make_geq(menv, x_xs[32], expr14),
msat_make_geq(menv, x_xs[32], expr15),
msat_make_geq(menv, x_xs[32], expr16),
msat_make_geq(menv, x_xs[32], expr17),
msat_make_geq(menv, x_xs[32], expr18),
msat_make_geq(menv, x_xs[32], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[32], expr0),
msat_make_equal(menv, x_xs[32], expr1),
msat_make_equal(menv, x_xs[32], expr2),
msat_make_equal(menv, x_xs[32], expr3),
msat_make_equal(menv, x_xs[32], expr4),
msat_make_equal(menv, x_xs[32], expr5),
msat_make_equal(menv, x_xs[32], expr6),
msat_make_equal(menv, x_xs[32], expr7),
msat_make_equal(menv, x_xs[32], expr8),
msat_make_equal(menv, x_xs[32], expr9),
msat_make_equal(menv, x_xs[32], expr10),
msat_make_equal(menv, x_xs[32], expr11),
msat_make_equal(menv, x_xs[32], expr12),
msat_make_equal(menv, x_xs[32], expr13),
msat_make_equal(menv, x_xs[32], expr14),
msat_make_equal(menv, x_xs[32], expr15),
msat_make_equal(menv, x_xs[32], expr16),
msat_make_equal(menv, x_xs[32], expr17),
msat_make_equal(menv, x_xs[32], expr18),
msat_make_equal(menv, x_xs[32], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_19_0)
expr1 = msat_make_plus(menv, xs[3], n_15_0)
expr2 = msat_make_plus(menv, xs[5], n_20_0)
expr3 = msat_make_plus(menv, xs[7], n_10_0)
expr4 = msat_make_plus(menv, xs[8], n_11_0)
expr5 = msat_make_plus(menv, xs[10], n_16_0)
expr6 = msat_make_plus(menv, xs[13], n_14_0)
expr7 = msat_make_plus(menv, xs[17], n_12_0)
expr8 = msat_make_plus(menv, xs[18], n_3_0)
expr9 = msat_make_plus(menv, xs[19], n_4_0)
expr10 = msat_make_plus(menv, xs[21], n_19_0)
expr11 = msat_make_plus(menv, xs[22], n_16_0)
expr12 = msat_make_plus(menv, xs[24], n_5_0)
expr13 = msat_make_plus(menv, xs[25], n_5_0)
expr14 = msat_make_plus(menv, xs[29], n_4_0)
expr15 = msat_make_plus(menv, xs[31], n_1_0)
expr16 = msat_make_plus(menv, xs[34], n_10_0)
expr17 = msat_make_plus(menv, xs[35], n_10_0)
expr18 = msat_make_plus(menv, xs[37], n_12_0)
expr19 = msat_make_plus(menv, xs[39], n_11_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[33], expr0),
msat_make_geq(menv, x_xs[33], expr1),
msat_make_geq(menv, x_xs[33], expr2),
msat_make_geq(menv, x_xs[33], expr3),
msat_make_geq(menv, x_xs[33], expr4),
msat_make_geq(menv, x_xs[33], expr5),
msat_make_geq(menv, x_xs[33], expr6),
msat_make_geq(menv, x_xs[33], expr7),
msat_make_geq(menv, x_xs[33], expr8),
msat_make_geq(menv, x_xs[33], expr9),
msat_make_geq(menv, x_xs[33], expr10),
msat_make_geq(menv, x_xs[33], expr11),
msat_make_geq(menv, x_xs[33], expr12),
msat_make_geq(menv, x_xs[33], expr13),
msat_make_geq(menv, x_xs[33], expr14),
msat_make_geq(menv, x_xs[33], expr15),
msat_make_geq(menv, x_xs[33], expr16),
msat_make_geq(menv, x_xs[33], expr17),
msat_make_geq(menv, x_xs[33], expr18),
msat_make_geq(menv, x_xs[33], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[33], expr0),
msat_make_equal(menv, x_xs[33], expr1),
msat_make_equal(menv, x_xs[33], expr2),
msat_make_equal(menv, x_xs[33], expr3),
msat_make_equal(menv, x_xs[33], expr4),
msat_make_equal(menv, x_xs[33], expr5),
msat_make_equal(menv, x_xs[33], expr6),
msat_make_equal(menv, x_xs[33], expr7),
msat_make_equal(menv, x_xs[33], expr8),
msat_make_equal(menv, x_xs[33], expr9),
msat_make_equal(menv, x_xs[33], expr10),
msat_make_equal(menv, x_xs[33], expr11),
msat_make_equal(menv, x_xs[33], expr12),
msat_make_equal(menv, x_xs[33], expr13),
msat_make_equal(menv, x_xs[33], expr14),
msat_make_equal(menv, x_xs[33], expr15),
msat_make_equal(menv, x_xs[33], expr16),
msat_make_equal(menv, x_xs[33], expr17),
msat_make_equal(menv, x_xs[33], expr18),
msat_make_equal(menv, x_xs[33], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[3], n_3_0)
expr1 = msat_make_plus(menv, xs[5], n_13_0)
expr2 = msat_make_plus(menv, xs[7], n_9_0)
expr3 = msat_make_plus(menv, xs[8], n_18_0)
expr4 = msat_make_plus(menv, xs[9], n_17_0)
expr5 = msat_make_plus(menv, xs[11], n_20_0)
expr6 = msat_make_plus(menv, xs[12], n_7_0)
expr7 = msat_make_plus(menv, xs[13], n_17_0)
expr8 = msat_make_plus(menv, xs[16], n_10_0)
expr9 = msat_make_plus(menv, xs[21], n_9_0)
expr10 = msat_make_plus(menv, xs[23], n_14_0)
expr11 = msat_make_plus(menv, xs[25], n_20_0)
expr12 = msat_make_plus(menv, xs[27], n_13_0)
expr13 = msat_make_plus(menv, xs[29], n_14_0)
expr14 = msat_make_plus(menv, xs[30], n_15_0)
expr15 = msat_make_plus(menv, xs[31], n_14_0)
expr16 = msat_make_plus(menv, xs[32], n_7_0)
expr17 = msat_make_plus(menv, xs[34], n_4_0)
expr18 = msat_make_plus(menv, xs[36], n_17_0)
expr19 = msat_make_plus(menv, xs[37], n_6_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[34], expr0),
msat_make_geq(menv, x_xs[34], expr1),
msat_make_geq(menv, x_xs[34], expr2),
msat_make_geq(menv, x_xs[34], expr3),
msat_make_geq(menv, x_xs[34], expr4),
msat_make_geq(menv, x_xs[34], expr5),
msat_make_geq(menv, x_xs[34], expr6),
msat_make_geq(menv, x_xs[34], expr7),
msat_make_geq(menv, x_xs[34], expr8),
msat_make_geq(menv, x_xs[34], expr9),
msat_make_geq(menv, x_xs[34], expr10),
msat_make_geq(menv, x_xs[34], expr11),
msat_make_geq(menv, x_xs[34], expr12),
msat_make_geq(menv, x_xs[34], expr13),
msat_make_geq(menv, x_xs[34], expr14),
msat_make_geq(menv, x_xs[34], expr15),
msat_make_geq(menv, x_xs[34], expr16),
msat_make_geq(menv, x_xs[34], expr17),
msat_make_geq(menv, x_xs[34], expr18),
msat_make_geq(menv, x_xs[34], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[34], expr0),
msat_make_equal(menv, x_xs[34], expr1),
msat_make_equal(menv, x_xs[34], expr2),
msat_make_equal(menv, x_xs[34], expr3),
msat_make_equal(menv, x_xs[34], expr4),
msat_make_equal(menv, x_xs[34], expr5),
msat_make_equal(menv, x_xs[34], expr6),
msat_make_equal(menv, x_xs[34], expr7),
msat_make_equal(menv, x_xs[34], expr8),
msat_make_equal(menv, x_xs[34], expr9),
msat_make_equal(menv, x_xs[34], expr10),
msat_make_equal(menv, x_xs[34], expr11),
msat_make_equal(menv, x_xs[34], expr12),
msat_make_equal(menv, x_xs[34], expr13),
msat_make_equal(menv, x_xs[34], expr14),
msat_make_equal(menv, x_xs[34], expr15),
msat_make_equal(menv, x_xs[34], expr16),
msat_make_equal(menv, x_xs[34], expr17),
msat_make_equal(menv, x_xs[34], expr18),
msat_make_equal(menv, x_xs[34], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[1], n_15_0)
expr1 = msat_make_plus(menv, xs[5], n_20_0)
expr2 = msat_make_plus(menv, xs[6], n_14_0)
expr3 = msat_make_plus(menv, xs[9], n_12_0)
expr4 = msat_make_plus(menv, xs[12], n_19_0)
expr5 = msat_make_plus(menv, xs[15], n_19_0)
expr6 = msat_make_plus(menv, xs[16], n_10_0)
expr7 = msat_make_plus(menv, xs[17], n_15_0)
expr8 = msat_make_plus(menv, xs[18], n_10_0)
expr9 = msat_make_plus(menv, xs[20], n_12_0)
expr10 = msat_make_plus(menv, xs[21], n_5_0)
expr11 = msat_make_plus(menv, xs[22], n_13_0)
expr12 = msat_make_plus(menv, xs[27], n_2_0)
expr13 = msat_make_plus(menv, xs[30], n_17_0)
expr14 = msat_make_plus(menv, xs[31], n_14_0)
expr15 = msat_make_plus(menv, xs[32], n_16_0)
expr16 = msat_make_plus(menv, xs[33], n_17_0)
expr17 = msat_make_plus(menv, xs[34], n_13_0)
expr18 = msat_make_plus(menv, xs[36], n_18_0)
expr19 = msat_make_plus(menv, xs[38], n_19_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[35], expr0),
msat_make_geq(menv, x_xs[35], expr1),
msat_make_geq(menv, x_xs[35], expr2),
msat_make_geq(menv, x_xs[35], expr3),
msat_make_geq(menv, x_xs[35], expr4),
msat_make_geq(menv, x_xs[35], expr5),
msat_make_geq(menv, x_xs[35], expr6),
msat_make_geq(menv, x_xs[35], expr7),
msat_make_geq(menv, x_xs[35], expr8),
msat_make_geq(menv, x_xs[35], expr9),
msat_make_geq(menv, x_xs[35], expr10),
msat_make_geq(menv, x_xs[35], expr11),
msat_make_geq(menv, x_xs[35], expr12),
msat_make_geq(menv, x_xs[35], expr13),
msat_make_geq(menv, x_xs[35], expr14),
msat_make_geq(menv, x_xs[35], expr15),
msat_make_geq(menv, x_xs[35], expr16),
msat_make_geq(menv, x_xs[35], expr17),
msat_make_geq(menv, x_xs[35], expr18),
msat_make_geq(menv, x_xs[35], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[35], expr0),
msat_make_equal(menv, x_xs[35], expr1),
msat_make_equal(menv, x_xs[35], expr2),
msat_make_equal(menv, x_xs[35], expr3),
msat_make_equal(menv, x_xs[35], expr4),
msat_make_equal(menv, x_xs[35], expr5),
msat_make_equal(menv, x_xs[35], expr6),
msat_make_equal(menv, x_xs[35], expr7),
msat_make_equal(menv, x_xs[35], expr8),
msat_make_equal(menv, x_xs[35], expr9),
msat_make_equal(menv, x_xs[35], expr10),
msat_make_equal(menv, x_xs[35], expr11),
msat_make_equal(menv, x_xs[35], expr12),
msat_make_equal(menv, x_xs[35], expr13),
msat_make_equal(menv, x_xs[35], expr14),
msat_make_equal(menv, x_xs[35], expr15),
msat_make_equal(menv, x_xs[35], expr16),
msat_make_equal(menv, x_xs[35], expr17),
msat_make_equal(menv, x_xs[35], expr18),
msat_make_equal(menv, x_xs[35], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_14_0)
expr1 = msat_make_plus(menv, xs[3], n_15_0)
expr2 = msat_make_plus(menv, xs[4], n_16_0)
expr3 = msat_make_plus(menv, xs[8], n_3_0)
expr4 = msat_make_plus(menv, xs[9], n_12_0)
expr5 = msat_make_plus(menv, xs[11], n_12_0)
expr6 = msat_make_plus(menv, xs[13], n_7_0)
expr7 = msat_make_plus(menv, xs[15], n_12_0)
expr8 = msat_make_plus(menv, xs[16], n_10_0)
expr9 = msat_make_plus(menv, xs[18], n_13_0)
expr10 = msat_make_plus(menv, xs[19], n_16_0)
expr11 = msat_make_plus(menv, xs[21], n_15_0)
expr12 = msat_make_plus(menv, xs[24], n_1_0)
expr13 = msat_make_plus(menv, xs[25], n_1_0)
expr14 = msat_make_plus(menv, xs[26], n_1_0)
expr15 = msat_make_plus(menv, xs[27], n_10_0)
expr16 = msat_make_plus(menv, xs[31], n_12_0)
expr17 = msat_make_plus(menv, xs[33], n_18_0)
expr18 = msat_make_plus(menv, xs[34], n_18_0)
expr19 = msat_make_plus(menv, xs[35], n_5_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[36], expr0),
msat_make_geq(menv, x_xs[36], expr1),
msat_make_geq(menv, x_xs[36], expr2),
msat_make_geq(menv, x_xs[36], expr3),
msat_make_geq(menv, x_xs[36], expr4),
msat_make_geq(menv, x_xs[36], expr5),
msat_make_geq(menv, x_xs[36], expr6),
msat_make_geq(menv, x_xs[36], expr7),
msat_make_geq(menv, x_xs[36], expr8),
msat_make_geq(menv, x_xs[36], expr9),
msat_make_geq(menv, x_xs[36], expr10),
msat_make_geq(menv, x_xs[36], expr11),
msat_make_geq(menv, x_xs[36], expr12),
msat_make_geq(menv, x_xs[36], expr13),
msat_make_geq(menv, x_xs[36], expr14),
msat_make_geq(menv, x_xs[36], expr15),
msat_make_geq(menv, x_xs[36], expr16),
msat_make_geq(menv, x_xs[36], expr17),
msat_make_geq(menv, x_xs[36], expr18),
msat_make_geq(menv, x_xs[36], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[36], expr0),
msat_make_equal(menv, x_xs[36], expr1),
msat_make_equal(menv, x_xs[36], expr2),
msat_make_equal(menv, x_xs[36], expr3),
msat_make_equal(menv, x_xs[36], expr4),
msat_make_equal(menv, x_xs[36], expr5),
msat_make_equal(menv, x_xs[36], expr6),
msat_make_equal(menv, x_xs[36], expr7),
msat_make_equal(menv, x_xs[36], expr8),
msat_make_equal(menv, x_xs[36], expr9),
msat_make_equal(menv, x_xs[36], expr10),
msat_make_equal(menv, x_xs[36], expr11),
msat_make_equal(menv, x_xs[36], expr12),
msat_make_equal(menv, x_xs[36], expr13),
msat_make_equal(menv, x_xs[36], expr14),
msat_make_equal(menv, x_xs[36], expr15),
msat_make_equal(menv, x_xs[36], expr16),
msat_make_equal(menv, x_xs[36], expr17),
msat_make_equal(menv, x_xs[36], expr18),
msat_make_equal(menv, x_xs[36], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_10_0)
expr1 = msat_make_plus(menv, xs[6], n_11_0)
expr2 = msat_make_plus(menv, xs[7], n_20_0)
expr3 = msat_make_plus(menv, xs[9], n_18_0)
expr4 = msat_make_plus(menv, xs[10], n_10_0)
expr5 = msat_make_plus(menv, xs[11], n_4_0)
expr6 = msat_make_plus(menv, xs[12], n_16_0)
expr7 = msat_make_plus(menv, xs[16], n_5_0)
expr8 = msat_make_plus(menv, xs[18], n_7_0)
expr9 = msat_make_plus(menv, xs[21], n_14_0)
expr10 = msat_make_plus(menv, xs[22], n_20_0)
expr11 = msat_make_plus(menv, xs[23], n_15_0)
expr12 = msat_make_plus(menv, xs[25], n_8_0)
expr13 = msat_make_plus(menv, xs[28], n_17_0)
expr14 = msat_make_plus(menv, xs[29], n_6_0)
expr15 = msat_make_plus(menv, xs[30], n_7_0)
expr16 = msat_make_plus(menv, xs[32], n_1_0)
expr17 = msat_make_plus(menv, xs[34], n_17_0)
expr18 = msat_make_plus(menv, xs[36], n_17_0)
expr19 = msat_make_plus(menv, xs[38], n_12_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[37], expr0),
msat_make_geq(menv, x_xs[37], expr1),
msat_make_geq(menv, x_xs[37], expr2),
msat_make_geq(menv, x_xs[37], expr3),
msat_make_geq(menv, x_xs[37], expr4),
msat_make_geq(menv, x_xs[37], expr5),
msat_make_geq(menv, x_xs[37], expr6),
msat_make_geq(menv, x_xs[37], expr7),
msat_make_geq(menv, x_xs[37], expr8),
msat_make_geq(menv, x_xs[37], expr9),
msat_make_geq(menv, x_xs[37], expr10),
msat_make_geq(menv, x_xs[37], expr11),
msat_make_geq(menv, x_xs[37], expr12),
msat_make_geq(menv, x_xs[37], expr13),
msat_make_geq(menv, x_xs[37], expr14),
msat_make_geq(menv, x_xs[37], expr15),
msat_make_geq(menv, x_xs[37], expr16),
msat_make_geq(menv, x_xs[37], expr17),
msat_make_geq(menv, x_xs[37], expr18),
msat_make_geq(menv, x_xs[37], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[37], expr0),
msat_make_equal(menv, x_xs[37], expr1),
msat_make_equal(menv, x_xs[37], expr2),
msat_make_equal(menv, x_xs[37], expr3),
msat_make_equal(menv, x_xs[37], expr4),
msat_make_equal(menv, x_xs[37], expr5),
msat_make_equal(menv, x_xs[37], expr6),
msat_make_equal(menv, x_xs[37], expr7),
msat_make_equal(menv, x_xs[37], expr8),
msat_make_equal(menv, x_xs[37], expr9),
msat_make_equal(menv, x_xs[37], expr10),
msat_make_equal(menv, x_xs[37], expr11),
msat_make_equal(menv, x_xs[37], expr12),
msat_make_equal(menv, x_xs[37], expr13),
msat_make_equal(menv, x_xs[37], expr14),
msat_make_equal(menv, x_xs[37], expr15),
msat_make_equal(menv, x_xs[37], expr16),
msat_make_equal(menv, x_xs[37], expr17),
msat_make_equal(menv, x_xs[37], expr18),
msat_make_equal(menv, x_xs[37], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[2], n_11_0)
expr1 = msat_make_plus(menv, xs[3], n_9_0)
expr2 = msat_make_plus(menv, xs[5], n_19_0)
expr3 = msat_make_plus(menv, xs[6], n_7_0)
expr4 = msat_make_plus(menv, xs[7], n_11_0)
expr5 = msat_make_plus(menv, xs[10], n_19_0)
expr6 = msat_make_plus(menv, xs[11], n_19_0)
expr7 = msat_make_plus(menv, xs[12], n_6_0)
expr8 = msat_make_plus(menv, xs[15], n_13_0)
expr9 = msat_make_plus(menv, xs[17], n_12_0)
expr10 = msat_make_plus(menv, xs[18], n_14_0)
expr11 = msat_make_plus(menv, xs[19], n_5_0)
expr12 = msat_make_plus(menv, xs[20], n_19_0)
expr13 = msat_make_plus(menv, xs[21], n_10_0)
expr14 = msat_make_plus(menv, xs[25], n_1_0)
expr15 = msat_make_plus(menv, xs[26], n_17_0)
expr16 = msat_make_plus(menv, xs[33], n_10_0)
expr17 = msat_make_plus(menv, xs[34], n_5_0)
expr18 = msat_make_plus(menv, xs[35], n_1_0)
expr19 = msat_make_plus(menv, xs[36], n_8_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[38], expr0),
msat_make_geq(menv, x_xs[38], expr1),
msat_make_geq(menv, x_xs[38], expr2),
msat_make_geq(menv, x_xs[38], expr3),
msat_make_geq(menv, x_xs[38], expr4),
msat_make_geq(menv, x_xs[38], expr5),
msat_make_geq(menv, x_xs[38], expr6),
msat_make_geq(menv, x_xs[38], expr7),
msat_make_geq(menv, x_xs[38], expr8),
msat_make_geq(menv, x_xs[38], expr9),
msat_make_geq(menv, x_xs[38], expr10),
msat_make_geq(menv, x_xs[38], expr11),
msat_make_geq(menv, x_xs[38], expr12),
msat_make_geq(menv, x_xs[38], expr13),
msat_make_geq(menv, x_xs[38], expr14),
msat_make_geq(menv, x_xs[38], expr15),
msat_make_geq(menv, x_xs[38], expr16),
msat_make_geq(menv, x_xs[38], expr17),
msat_make_geq(menv, x_xs[38], expr18),
msat_make_geq(menv, x_xs[38], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[38], expr0),
msat_make_equal(menv, x_xs[38], expr1),
msat_make_equal(menv, x_xs[38], expr2),
msat_make_equal(menv, x_xs[38], expr3),
msat_make_equal(menv, x_xs[38], expr4),
msat_make_equal(menv, x_xs[38], expr5),
msat_make_equal(menv, x_xs[38], expr6),
msat_make_equal(menv, x_xs[38], expr7),
msat_make_equal(menv, x_xs[38], expr8),
msat_make_equal(menv, x_xs[38], expr9),
msat_make_equal(menv, x_xs[38], expr10),
msat_make_equal(menv, x_xs[38], expr11),
msat_make_equal(menv, x_xs[38], expr12),
msat_make_equal(menv, x_xs[38], expr13),
msat_make_equal(menv, x_xs[38], expr14),
msat_make_equal(menv, x_xs[38], expr15),
msat_make_equal(menv, x_xs[38], expr16),
msat_make_equal(menv, x_xs[38], expr17),
msat_make_equal(menv, x_xs[38], expr18),
msat_make_equal(menv, x_xs[38], expr19),))
trans = msat_make_and(menv, trans, _t)
expr0 = msat_make_plus(menv, xs[0], n_6_0)
expr1 = msat_make_plus(menv, xs[3], n_18_0)
expr2 = msat_make_plus(menv, xs[4], n_1_0)
expr3 = msat_make_plus(menv, xs[7], n_4_0)
expr4 = msat_make_plus(menv, xs[10], n_13_0)
expr5 = msat_make_plus(menv, xs[12], n_3_0)
expr6 = msat_make_plus(menv, xs[13], n_7_0)
expr7 = msat_make_plus(menv, xs[14], n_2_0)
expr8 = msat_make_plus(menv, xs[15], n_16_0)
expr9 = msat_make_plus(menv, xs[19], n_12_0)
expr10 = msat_make_plus(menv, xs[21], n_18_0)
expr11 = msat_make_plus(menv, xs[22], n_14_0)
expr12 = msat_make_plus(menv, xs[25], n_3_0)
expr13 = msat_make_plus(menv, xs[28], n_11_0)
expr14 = msat_make_plus(menv, xs[29], n_18_0)
expr15 = msat_make_plus(menv, xs[31], n_17_0)
expr16 = msat_make_plus(menv, xs[32], n_8_0)
expr17 = msat_make_plus(menv, xs[34], n_12_0)
expr18 = msat_make_plus(menv, xs[36], n_13_0)
expr19 = msat_make_plus(menv, xs[39], n_3_0)
_t = msat_make_and(menv,
msat_make_geq(menv, x_xs[39], expr0),
msat_make_geq(menv, x_xs[39], expr1),
msat_make_geq(menv, x_xs[39], expr2),
msat_make_geq(menv, x_xs[39], expr3),
msat_make_geq(menv, x_xs[39], expr4),
msat_make_geq(menv, x_xs[39], expr5),
msat_make_geq(menv, x_xs[39], expr6),
msat_make_geq(menv, x_xs[39], expr7),
msat_make_geq(menv, x_xs[39], expr8),
msat_make_geq(menv, x_xs[39], expr9),
msat_make_geq(menv, x_xs[39], expr10),
msat_make_geq(menv, x_xs[39], expr11),
msat_make_geq(menv, x_xs[39], expr12),
msat_make_geq(menv, x_xs[39], expr13),
msat_make_geq(menv, x_xs[39], expr14),
msat_make_geq(menv, x_xs[39], expr15),
msat_make_geq(menv, x_xs[39], expr16),
msat_make_geq(menv, x_xs[39], expr17),
msat_make_geq(menv, x_xs[39], expr18),
msat_make_geq(menv, x_xs[39], expr19),)
_t = msat_make_and(menv, _t,
msat_make_or(menv,
msat_make_equal(menv, x_xs[39], expr0),
msat_make_equal(menv, x_xs[39], expr1),
msat_make_equal(menv, x_xs[39], expr2),
msat_make_equal(menv, x_xs[39], expr3),
msat_make_equal(menv, x_xs[39], expr4),
msat_make_equal(menv, x_xs[39], expr5),
msat_make_equal(menv, x_xs[39], expr6),
msat_make_equal(menv, x_xs[39], expr7),
msat_make_equal(menv, x_xs[39], expr8),
msat_make_equal(menv, x_xs[39], expr9),
msat_make_equal(menv, x_xs[39], expr10),
msat_make_equal(menv, x_xs[39], expr11),
msat_make_equal(menv, x_xs[39], expr12),
msat_make_equal(menv, x_xs[39], expr13),
msat_make_equal(menv, x_xs[39], expr14),
msat_make_equal(menv, x_xs[39], expr15),
msat_make_equal(menv, x_xs[39], expr16),
msat_make_equal(menv, x_xs[39], expr17),
msat_make_equal(menv, x_xs[39], expr18),
msat_make_equal(menv, x_xs[39], expr19),))
trans = msat_make_and(menv, trans, _t)
# ltl property: ((x_12 - x_39 > 3) & (G (F (x_11 - x_17 > -8))))
ltl = msat_make_and(menv, msat_make_gt(menv, msat_make_minus(menv, xs[12], xs[39]), msat_make_number(menv, "3")), enc.make_G(enc.make_F(msat_make_gt(menv, msat_make_minus(menv, xs[11], xs[17]), msat_make_number(menv, "-8")))))
return TermMap(curr2next), init, trans, ltl
| 58.694066 | 322 | 0.497535 |
005ca2969c9b4a6d0b6e77fa39ed8e6dd4425fb5 | 2,723 | py | Python | PyFlow/Packages/PyFlowBase/Nodes/consoleOutput.py | luzpaz/PyFlow | e00642794051b1d9b7b2665eee38567e9763558d | [
"Apache-2.0"
] | 1,463 | 2019-07-29T15:45:22.000Z | 2022-03-31T23:32:13.000Z | PyFlow/Packages/PyFlowBase/Nodes/consoleOutput.py | luzpaz/PyFlow | e00642794051b1d9b7b2665eee38567e9763558d | [
"Apache-2.0"
] | 58 | 2019-07-31T07:58:57.000Z | 2022-02-23T05:46:08.000Z | PyFlow/Packages/PyFlowBase/Nodes/consoleOutput.py | luzpaz/PyFlow | e00642794051b1d9b7b2665eee38567e9763558d | [
"Apache-2.0"
] | 169 | 2019-08-03T16:38:57.000Z | 2022-03-31T14:20:12.000Z | ## Copyright 2015-2019 Ilgar Lunin, Pedro Cabrera
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
## http://www.apache.org/licenses/LICENSE-2.0
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from nine import *
import logging
from PyFlow.Core import NodeBase
from PyFlow.Core.NodeBase import NodePinsSuggestionsHelper
from PyFlow.ConfigManager import ConfigManager
from PyFlow.Core.Common import *
class consoleOutput(NodeBase):
def __init__(self, name):
super(consoleOutput, self).__init__(name)
self.inExec = self.createInputPin(DEFAULT_IN_EXEC_NAME, 'ExecPin', None, self.compute)
self.entity = self.createInputPin('entity', 'AnyPin', structure=StructureType.Multi)
self.entity.enableOptions(PinOptions.AllowAny)
self.outExec = self.createOutputPin(DEFAULT_OUT_EXEC_NAME, 'ExecPin')
@staticmethod
def pinTypeHints():
helper = NodePinsSuggestionsHelper()
helper.addInputDataType('ExecPin')
helper.addInputDataType('AnyPin')
helper.addOutputDataType('ExecPin')
helper.addInputStruct(StructureType.Multi)
helper.addInputStruct(StructureType.Single)
helper.addOutputStruct(StructureType.Single)
return helper
@staticmethod
def category():
return 'Common'
@staticmethod
def keywords():
return ['print']
@staticmethod
def description():
return "Python's 'print' function wrapper"
def compute(self, *args, **kwargs):
redirectionEnabled = ConfigManager().shouldRedirectOutput()
if self.getWrapper() is not None and redirectionEnabled:
data = str(self.entity.getData())
if self.entity.dataType != "StringPin":
data = data.encode('unicode-escape')
if IS_PYTHON2:
data = data.replace("\\n", "<br/>")
else:
if isinstance(data, bytes):
data = data.decode("utf-8")
data = str(data).replace("\\n", "<br/>")
errorLink = """<a href=%s><span style=" text-decoration: underline; color:green;">%s</span></a></p>""" % (self.name, "<br/>%s" % data)
logging.getLogger(None).consoleoutput(errorLink)
else:
print(self.entity.getData())
self.outExec.call()
| 36.797297 | 146 | 0.662872 |
1177d86a3e2e14a41ae75e633cb521cd6b80113b | 482 | py | Python | test/tools_test.py | jjlawren/ha-ffmpeg | 898491671e8dc60b140a0353b0cddeb234e11906 | [
"BSD-3-Clause"
] | null | null | null | test/tools_test.py | jjlawren/ha-ffmpeg | 898491671e8dc60b140a0353b0cddeb234e11906 | [
"BSD-3-Clause"
] | null | null | null | test/tools_test.py | jjlawren/ha-ffmpeg | 898491671e8dc60b140a0353b0cddeb234e11906 | [
"BSD-3-Clause"
] | null | null | null | import logging
import sys
import click
sys.path.append("../")
from haffmpeg import Test
logging.basicConfig(level=logging.DEBUG)
@click.command()
@click.option("--ffmpeg", "-f", default="ffmpeg", help="FFmpeg binary")
@click.option("--source", "-s", help="Input file for ffmpeg")
def cli(ffmpeg, source):
"""FFMPEG capture frame as image."""
stream = Test(ffmpeg_bin=ffmpeg)
stream.run_test(
input_source=source,
)
if __name__ == "__main__":
cli()
| 20.083333 | 71 | 0.672199 |
62f27bf5acbf3640dfdaf4fcdb53d67ab0f96f72 | 763 | py | Python | kattis/k_fizzbuzz.py | ivanlyon/exercises | 0792976ae2acb85187b26a52812f9ebdd119b5e8 | [
"MIT"
] | null | null | null | kattis/k_fizzbuzz.py | ivanlyon/exercises | 0792976ae2acb85187b26a52812f9ebdd119b5e8 | [
"MIT"
] | null | null | null | kattis/k_fizzbuzz.py | ivanlyon/exercises | 0792976ae2acb85187b26a52812f9ebdd119b5e8 | [
"MIT"
] | null | null | null | '''
The interview question
Status: Accepted
'''
###############################################################################
def main():
"""Read input and print output"""
fizzing, buzzing, domain = [int(i) for i in input().split()]
result = [*range(domain + 1)]
for index in range(domain + 1):
if index % fizzing == 0:
result[index] = "Fizz"
for index in range(domain + 1):
if index % buzzing == 0:
if result[index] == "Fizz":
result[index] = "FizzBuzz"
else:
result[index] = "Buzz"
for line in result[1:]:
print(line)
###############################################################################
if __name__ == '__main__':
main()
| 24.612903 | 79 | 0.410223 |
e2ea64017dd2857f0ff92cd8d63bea08905f3df2 | 304 | py | Python | golly-4.0-win-64bit/Scripts/Python/glife/gun46.py | larayeung/gollywithlocusts | e7adbaaa691fe46f22e88fb4d13e42b3d702a871 | [
"Apache-2.0"
] | null | null | null | golly-4.0-win-64bit/Scripts/Python/glife/gun46.py | larayeung/gollywithlocusts | e7adbaaa691fe46f22e88fb4d13e42b3d702a871 | [
"Apache-2.0"
] | null | null | null | golly-4.0-win-64bit/Scripts/Python/glife/gun46.py | larayeung/gollywithlocusts | e7adbaaa691fe46f22e88fb4d13e42b3d702a871 | [
"Apache-2.0"
] | null | null | null | from glife.base import *
rule() # use Life rule to evolve phases
__half = bheptomino (0, 2, flip_x) + bheptomino (0, -2, flip) + block (16, -4) + block (16, 3)
gun46_double = __half (7, -2) + __half (-8, 2, flip_x)
gun46 = __half[1] (1, -7) + __half (-13, -4, flip_x) # aligned version shooting SE
| 30.4 | 94 | 0.628289 |
af7ca299d2a55adc57c5fc68d5c32e2ed9a31af2 | 1,683 | py | Python | sknetwork/data/tests/test_toy_graphs.py | altana-tech/scikit-network | dedc9d3e694c7106e4709aae22dffb5142c15859 | [
"BSD-3-Clause"
] | 457 | 2018-07-24T12:42:14.000Z | 2022-03-31T08:30:39.000Z | sknetwork/data/tests/test_toy_graphs.py | altana-tech/scikit-network | dedc9d3e694c7106e4709aae22dffb5142c15859 | [
"BSD-3-Clause"
] | 281 | 2018-07-13T05:01:19.000Z | 2022-03-31T14:13:43.000Z | sknetwork/data/tests/test_toy_graphs.py | altana-tech/scikit-network | dedc9d3e694c7106e4709aae22dffb5142c15859 | [
"BSD-3-Clause"
] | 58 | 2019-04-22T09:04:32.000Z | 2022-03-30T12:43:08.000Z | # -*- coding: utf-8 -*-
# tests for toy_graphs.py
"""
@author: Quentin Lutz <qlutz@enst.fr>
@author: Nathan de Lara <ndelara@enst.fr>
@author: Thomas Bonald <tbonald@enst.fr>
"""
import unittest
from sknetwork.data.toy_graphs import *
class TestToys(unittest.TestCase):
def test_undirected(self):
adjacency = house()
self.assertEqual(adjacency.shape, (5, 5))
graph = house(metadata=True)
self.assertEqual(graph.position.shape, (5, 2))
adjacency = bow_tie()
self.assertEqual(adjacency.shape, (5, 5))
graph = bow_tie(metadata=True)
self.assertEqual(graph.position.shape, (5, 2))
graph = karate_club(True)
self.assertEqual(graph.adjacency.shape, (34, 34))
self.assertEqual(len(graph.labels), 34)
graph = miserables(True)
self.assertEqual(graph.adjacency.shape, (77, 77))
self.assertEqual(len(graph.names), 77)
def test_directed(self):
adjacency = painters()
self.assertEqual(adjacency.shape, (14, 14))
graph = painters(True)
self.assertEqual(graph.adjacency.shape, (14, 14))
self.assertEqual(len(graph.names), 14)
def test_bipartite(self):
graph = star_wars(True)
self.assertEqual(graph.biadjacency.shape, (4, 3))
self.assertEqual(len(graph.names), 4)
self.assertEqual(len(graph.names_col), 3)
graph = movie_actor(True)
self.assertEqual(graph.biadjacency.shape, (15, 16))
self.assertEqual(len(graph.names), 15)
self.assertEqual(len(graph.names_col), 16)
graph = hourglass(True)
self.assertEqual(graph.biadjacency.shape, (2, 2))
| 29.526316 | 59 | 0.639335 |
22b845accd00de1e499708aad70679260e142683 | 3,915 | py | Python | counterfit/core/wrappers.py | pralab/counterfit | 4d0b6134afc951185505c4754209ebd896557e4b | [
"MIT"
] | null | null | null | counterfit/core/wrappers.py | pralab/counterfit | 4d0b6134afc951185505c4754209ebd896557e4b | [
"MIT"
] | null | null | null | counterfit/core/wrappers.py | pralab/counterfit | 4d0b6134afc951185505c4754209ebd896557e4b | [
"MIT"
] | null | null | null | import numpy as np
from art.estimators.estimator import BaseEstimator, NeuralNetworkMixin
from art.estimators.classification import ClassifierMixin
from secml.array import CArray
from secml_malware.attack.blackbox.c_wrapper_phi import CWrapperPhi
class BlackBoxClassifierWrapper(BaseEstimator, NeuralNetworkMixin, ClassifierMixin):
"""This counterfit class wraps the Adversarial Robustness blackbox classifier"""
def __init__(self, submit_sample, model_input_shape, nb_output_classes, clip_values, channels_first):
# do not call parent constructor. We're inheriting only for the sake of ART type checking
# ...and tending to the other variables manually
# super(BlackBoxClassifierWrapper, self).__init__(model,clip_values=clip_values)
self._predictions = submit_sample
self._input_shape = model_input_shape
self._nb_classes = nb_output_classes
self._clip_values = clip_values # Tuple of the form `(min, max)` of floats or `np.ndarray` representing the minimum and maximum values allowed for features. If arrays are provided, each value will be considered the bound for a feature, thus
# the shape of clip values needs to match the total number of features.
self._channels_first = channels_first # Boolean to indicate index of the color channels in the sample `X`.
def fit(self, X, y):
pass
def class_gradient(self, *args, **kwargs):
"""Compute per-class derivatives w.r.t. `X`."""
raise Exception("I didn't expect to be called!")
def loss_gradient(self, *args, **kwargs):
raise Exception("I didn't expect to be called!")
def get_activations(self, *args, **kwargs):
raise Exception("I didn't expect to be called!")
def set_learning_phase(self, *args, **kwargs):
raise Exception("I didn't expect to be called!")
# ART 1.61
def compute_loss(self, x: np.ndarray, y: np.ndarray, **kwargs) -> np.ndarray:
raise NotImplementedError
# ART < 1.6
def loss(self, x: np.ndarray, y: np.ndarray, **kwargs) -> np.ndarray:
raise NotImplementedError
@property
def input_shape(self):
return self._input_shape
def predict(self, X, batch_size=1):
"""
Abstract method performs prediction of the estimator for input `X`.
:param X: Samples of shape (nb_samples, nb_features) or (nb_samples, nb_pixels_1, nb_pixels_2,
nb_channels) or (nb_samples, nb_channels, nb_pixels_1, nb_pixels_2).
:param batch_size: Batch size.
:return: predictions.
:rtype: format as expected by the `model`
"""
predictions = np.zeros((X.shape[0], self._nb_classes), dtype=np.float32)
for batch_index in range(int(np.ceil(X.shape[0] / float(batch_size)))):
begin, end = (
batch_index * batch_size,
min((batch_index + 1) * batch_size, X.shape[0]),
)
predictions[begin:end] = self._predictions(X[begin:end])
return predictions
class SecMLBlackBoxClassifierWrapper:
"""This counterfit class wraps the BlackBox SecML malware class"""
def __init__(self, model : CWrapperPhi, prediction_function):
self._wrapper = model
self._prediction_function = prediction_function
def predict(self, x : CArray, return_decision_function : bool = True):
# padding_position = x.find(x == 256)
# if padding_position:
# x = x[0, :padding_position[0]]
# feature_vector = self.extract_features(x)
labels, scores = self._prediction_function(x)
labels, scores = CArray(labels).atleast_2d(), CArray(scores).atleast_2d()
return labels, scores
@property
def classifier(self):
return self._wrapper.classifier
def extract_features(self, x : CArray):
return self._wrapper.extract_features(x) | 41.210526 | 249 | 0.679693 |
4308b05d0540a65f61eed427ea1d96e359bda249 | 25,330 | py | Python | openstackclient/tests/unit/identity/v2_0/test_user.py | cloudification-io/python-openstackclient | e07324e30fbb24e89fd63d1c5a5fe485f693a45c | [
"Apache-2.0"
] | null | null | null | openstackclient/tests/unit/identity/v2_0/test_user.py | cloudification-io/python-openstackclient | e07324e30fbb24e89fd63d1c5a5fe485f693a45c | [
"Apache-2.0"
] | null | null | null | openstackclient/tests/unit/identity/v2_0/test_user.py | cloudification-io/python-openstackclient | e07324e30fbb24e89fd63d1c5a5fe485f693a45c | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from unittest import mock
from keystoneauth1 import exceptions as ks_exc
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.identity.v2_0 import user
from openstackclient.tests.unit.identity.v2_0 import fakes as identity_fakes
class TestUser(identity_fakes.TestIdentityv2):
fake_project = identity_fakes.FakeProject.create_one_project()
attr = {
'tenantId': fake_project.id,
}
fake_user = identity_fakes.FakeUser.create_one_user(attr)
def setUp(self):
super(TestUser, self).setUp()
# Get a shortcut to the TenantManager Mock
self.projects_mock = self.app.client_manager.identity.tenants
self.projects_mock.reset_mock()
# Get a shortcut to the UserManager Mock
self.users_mock = self.app.client_manager.identity.users
self.users_mock.reset_mock()
class TestUserCreate(TestUser):
fake_project_c = identity_fakes.FakeProject.create_one_project()
attr = {
'tenantId': fake_project_c.id,
}
fake_user_c = identity_fakes.FakeUser.create_one_user(attr)
columns = (
'email',
'enabled',
'id',
'name',
'project_id',
)
datalist = (
fake_user_c.email,
True,
fake_user_c.id,
fake_user_c.name,
fake_project_c.id,
)
def setUp(self):
super(TestUserCreate, self).setUp()
self.projects_mock.get.return_value = self.fake_project_c
self.users_mock.create.return_value = self.fake_user_c
# Get the command object to test
self.cmd = user.CreateUser(self.app, None)
def test_user_create_no_options(self):
arglist = [
self.fake_user_c.name,
]
verifylist = [
('enable', False),
('disable', False),
('name', self.fake_user_c.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
None,
None,
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_password(self):
arglist = [
'--password', 'secret',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('password_prompt', False),
('password', 'secret')
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
'secret',
None,
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_password_prompt(self):
arglist = [
'--password-prompt',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('password_prompt', True)
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
mocker = mock.Mock()
mocker.return_value = 'abc123'
with mock.patch("osc_lib.utils.get_password", mocker):
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
'abc123',
None,
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_email(self):
arglist = [
'--email', 'barney@example.com',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('email', 'barney@example.com'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
None,
'barney@example.com',
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_project(self):
# Return the new project
self.projects_mock.get.return_value = self.fake_project_c
# Set up to return an updated user
attr = {
'tenantId': self.fake_project_c.id,
}
user_2 = identity_fakes.FakeUser.create_one_user(attr)
self.users_mock.create.return_value = user_2
arglist = [
'--project', self.fake_project_c.name,
user_2.name,
]
verifylist = [
('name', user_2.name),
('project', self.fake_project_c.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': self.fake_project_c.id,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
user_2.name,
None,
None,
**kwargs
)
self.assertEqual(self.columns, columns)
datalist = (
user_2.email,
True,
user_2.id,
user_2.name,
self.fake_project_c.id,
)
self.assertEqual(datalist, data)
def test_user_create_enable(self):
arglist = [
'--enable',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('enable', True),
('disable', False),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
None,
None,
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_disable(self):
arglist = [
'--disable',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('enable', False),
('disable', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': False,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
None,
None,
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_or_show_exists(self):
def _raise_conflict(*args, **kwargs):
raise ks_exc.Conflict(None)
# need to make this throw an exception...
self.users_mock.create.side_effect = _raise_conflict
self.users_mock.get.return_value = self.fake_user_c
arglist = [
'--or-show',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('or_show', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.get.assert_called_with(self.fake_user_c.name)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
def test_user_create_or_show_not_exists(self):
arglist = [
'--or-show',
self.fake_user_c.name,
]
verifylist = [
('name', self.fake_user_c.name),
('or_show', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'tenant_id': None,
}
# UserManager.create(name, password, email, tenant_id=, enabled=)
self.users_mock.create.assert_called_with(
self.fake_user_c.name,
None,
None,
**kwargs
)
self.assertEqual(self.columns, columns)
self.assertEqual(self.datalist, data)
class TestUserDelete(TestUser):
def setUp(self):
super(TestUserDelete, self).setUp()
# This is the return value for utils.find_resource()
self.users_mock.get.return_value = self.fake_user
self.users_mock.delete.return_value = None
# Get the command object to test
self.cmd = user.DeleteUser(self.app, None)
def test_user_delete_no_options(self):
arglist = [
self.fake_user.id,
]
verifylist = [
('users', [self.fake_user.id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.users_mock.delete.assert_called_with(
self.fake_user.id,
)
self.assertIsNone(result)
@mock.patch.object(utils, 'find_resource')
def test_delete_multi_users_with_exception(self, find_mock):
find_mock.side_effect = [self.fake_user,
exceptions.CommandError]
arglist = [
self.fake_user.id,
'unexist_user',
]
verifylist = [
('users', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
try:
self.cmd.take_action(parsed_args)
self.fail('CommandError should be raised.')
except exceptions.CommandError as e:
self.assertEqual('1 of 2 users failed to delete.',
str(e))
find_mock.assert_any_call(self.users_mock, self.fake_user.id)
find_mock.assert_any_call(self.users_mock, 'unexist_user')
self.assertEqual(2, find_mock.call_count)
self.users_mock.delete.assert_called_once_with(self.fake_user.id)
class TestUserList(TestUser):
fake_project_l = identity_fakes.FakeProject.create_one_project()
attr = {
'tenantId': fake_project_l.id,
}
fake_user_l = identity_fakes.FakeUser.create_one_user(attr)
columns = (
'ID',
'Name',
)
datalist = (
(
fake_user_l.id,
fake_user_l.name,
),
)
def setUp(self):
super(TestUserList, self).setUp()
self.projects_mock.get.return_value = self.fake_project_l
self.projects_mock.list.return_value = [self.fake_project_l]
self.users_mock.list.return_value = [self.fake_user_l]
# Get the command object to test
self.cmd = user.ListUser(self.app, None)
def test_user_list_no_options(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.users_mock.list.assert_called_with(tenant_id=None)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_user_list_project(self):
arglist = [
'--project', self.fake_project_l.id,
]
verifylist = [
('project', self.fake_project_l.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
project_id = self.fake_project_l.id
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.users_mock.list.assert_called_with(tenant_id=project_id)
self.assertEqual(self.columns, columns)
self.assertListItemEqual(self.datalist, tuple(data))
def test_user_list_long(self):
arglist = [
'--long',
]
verifylist = [
('long', True),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class Lister in cliff, abstract method take_action()
# returns a tuple containing the column names and an iterable
# containing the data to be listed.
columns, data = self.cmd.take_action(parsed_args)
self.users_mock.list.assert_called_with(tenant_id=None)
collist = ('ID', 'Name', 'Project', 'Email', 'Enabled')
self.assertEqual(collist, columns)
datalist = ((
self.fake_user_l.id,
self.fake_user_l.name,
user.ProjectColumn(
self.fake_project_l.id,
{self.fake_project_l.id: self.fake_project_l}),
self.fake_user_l.email,
True,
), )
self.assertListItemEqual(datalist, tuple(data))
class TestUserSet(TestUser):
def setUp(self):
super(TestUserSet, self).setUp()
self.projects_mock.get.return_value = self.fake_project
self.users_mock.get.return_value = self.fake_user
# Get the command object to test
self.cmd = user.SetUser(self.app, None)
def test_user_set_no_options(self):
arglist = [
self.fake_user.name,
]
verifylist = [
('name', None),
('password', None),
('email', None),
('project', None),
('enable', False),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.assertIsNone(result)
def test_user_set_unexist_user(self):
arglist = [
"unexist-user",
]
verifylist = [
('name', None),
('password', None),
('email', None),
('project', None),
('enable', False),
('disable', False),
('user', "unexist-user"),
]
self.users_mock.get.side_effect = exceptions.NotFound(None)
self.users_mock.find.side_effect = exceptions.NotFound(None)
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
self.assertRaises(
exceptions.CommandError, self.cmd.take_action, parsed_args)
def test_user_set_name(self):
arglist = [
'--name', 'qwerty',
self.fake_user.name,
]
verifylist = [
('name', 'qwerty'),
('password', None),
('email', None),
('project', None),
('enable', False),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
'name': 'qwerty',
}
# UserManager.update(user, **kwargs)
self.users_mock.update.assert_called_with(
self.fake_user.id,
**kwargs
)
self.assertIsNone(result)
def test_user_set_password(self):
arglist = [
'--password', 'secret',
self.fake_user.name,
]
verifylist = [
('name', None),
('password', 'secret'),
('password_prompt', False),
('email', None),
('project', None),
('enable', False),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# UserManager.update_password(user, password)
self.users_mock.update_password.assert_called_with(
self.fake_user.id,
'secret',
)
self.assertIsNone(result)
def test_user_set_password_prompt(self):
arglist = [
'--password-prompt',
self.fake_user.name,
]
verifylist = [
('name', None),
('password', None),
('password_prompt', True),
('email', None),
('project', None),
('enable', False),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
mocker = mock.Mock()
mocker.return_value = 'abc123'
with mock.patch("osc_lib.utils.get_password", mocker):
result = self.cmd.take_action(parsed_args)
# UserManager.update_password(user, password)
self.users_mock.update_password.assert_called_with(
self.fake_user.id,
'abc123',
)
self.assertIsNone(result)
def test_user_set_email(self):
arglist = [
'--email', 'barney@example.com',
self.fake_user.name,
]
verifylist = [
('name', None),
('password', None),
('email', 'barney@example.com'),
('project', None),
('enable', False),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'email': 'barney@example.com',
'enabled': True,
}
# UserManager.update(user, **kwargs)
self.users_mock.update.assert_called_with(
self.fake_user.id,
**kwargs
)
self.assertIsNone(result)
def test_user_set_project(self):
arglist = [
'--project', self.fake_project.id,
self.fake_user.name,
]
verifylist = [
('name', None),
('password', None),
('email', None),
('project', self.fake_project.id),
('enable', False),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# UserManager.update_tenant(user, tenant)
self.users_mock.update_tenant.assert_called_with(
self.fake_user.id,
self.fake_project.id,
)
self.assertIsNone(result)
def test_user_set_enable(self):
arglist = [
'--enable',
self.fake_user.name,
]
verifylist = [
('name', None),
('password', None),
('email', None),
('project', None),
('enable', True),
('disable', False),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': True,
}
# UserManager.update(user, **kwargs)
self.users_mock.update.assert_called_with(
self.fake_user.id,
**kwargs
)
self.assertIsNone(result)
def test_user_set_disable(self):
arglist = [
'--disable',
self.fake_user.name,
]
verifylist = [
('name', None),
('password', None),
('email', None),
('project', None),
('enable', False),
('disable', True),
('user', self.fake_user.name),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
# Set expected values
kwargs = {
'enabled': False,
}
# UserManager.update(user, **kwargs)
self.users_mock.update.assert_called_with(
self.fake_user.id,
**kwargs
)
self.assertIsNone(result)
class TestUserShow(TestUser):
def setUp(self):
super(TestUserShow, self).setUp()
self.users_mock.get.return_value = self.fake_user
# Get the command object to test
self.cmd = user.ShowUser(self.app, None)
def test_user_show(self):
arglist = [
self.fake_user.id,
]
verifylist = [
('user', self.fake_user.id),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
# In base command class ShowOne in cliff, abstract method take_action()
# returns a two-part tuple with a tuple of column names and a tuple of
# data to be shown.
columns, data = self.cmd.take_action(parsed_args)
self.users_mock.get.assert_called_with(self.fake_user.id)
collist = ('email', 'enabled', 'id', 'name', 'project_id')
self.assertEqual(collist, columns)
datalist = (
self.fake_user.email,
True,
self.fake_user.id,
self.fake_user.name,
self.fake_project.id,
)
self.assertItemEqual(datalist, data)
| 30.777643 | 79 | 0.572444 |
0ef879902316025633e73c71e0fab3896270d086 | 21,744 | py | Python | cibuildwheel/options.py | bbayles/cibuildwheel | af2014c954cdda356d496732c4d5398c61526b8f | [
"BSD-2-Clause"
] | 371 | 2021-05-19T18:48:06.000Z | 2022-03-31T11:31:41.000Z | cibuildwheel/options.py | bbayles/cibuildwheel | af2014c954cdda356d496732c4d5398c61526b8f | [
"BSD-2-Clause"
] | 234 | 2021-05-19T19:25:42.000Z | 2022-03-29T19:03:00.000Z | cibuildwheel/options.py | bbayles/cibuildwheel | af2014c954cdda356d496732c4d5398c61526b8f | [
"BSD-2-Clause"
] | 56 | 2021-05-25T19:59:54.000Z | 2022-03-26T13:49:14.000Z | import os
import sys
import traceback
from configparser import ConfigParser
from contextlib import contextmanager
from pathlib import Path
from typing import (
Any,
Dict,
Iterator,
List,
Mapping,
NamedTuple,
Optional,
Set,
Tuple,
Union,
)
import tomli
from packaging.specifiers import SpecifierSet
from .architecture import Architecture
from .environment import EnvironmentParseError, ParsedEnvironment, parse_environment
from .projectfiles import get_requires_python_str
from .typing import PLATFORMS, Literal, PlatformName, TypedDict
from .util import (
MANYLINUX_ARCHS,
MUSLLINUX_ARCHS,
BuildFrontend,
BuildSelector,
DependencyConstraints,
TestSelector,
resources_dir,
selector_matches,
strtobool,
unwrap,
)
class CommandLineArguments:
platform: Literal["auto", "linux", "macos", "windows"]
archs: Optional[str]
output_dir: Optional[str]
config_file: str
package_dir: str
print_build_identifiers: bool
allow_empty: bool
prerelease_pythons: bool
class GlobalOptions(NamedTuple):
package_dir: Path
output_dir: Path
build_selector: BuildSelector
test_selector: TestSelector
architectures: Set[Architecture]
class BuildOptions(NamedTuple):
globals: GlobalOptions
environment: ParsedEnvironment
before_all: str
before_build: Optional[str]
repair_command: str
manylinux_images: Optional[Dict[str, str]]
musllinux_images: Optional[Dict[str, str]]
dependency_constraints: Optional[DependencyConstraints]
test_command: Optional[str]
before_test: Optional[str]
test_requires: List[str]
test_extras: str
build_verbosity: int
build_frontend: BuildFrontend
@property
def package_dir(self) -> Path:
return self.globals.package_dir
@property
def output_dir(self) -> Path:
return self.globals.output_dir
@property
def build_selector(self) -> BuildSelector:
return self.globals.build_selector
@property
def test_selector(self) -> TestSelector:
return self.globals.test_selector
@property
def architectures(self) -> Set[Architecture]:
return self.globals.architectures
Setting = Union[Dict[str, str], List[str], str]
class Override(NamedTuple):
select_pattern: str
options: Dict[str, Setting]
MANYLINUX_OPTIONS = {f"manylinux-{build_platform}-image" for build_platform in MANYLINUX_ARCHS}
MUSLLINUX_OPTIONS = {f"musllinux-{build_platform}-image" for build_platform in MUSLLINUX_ARCHS}
DISALLOWED_OPTIONS = {
"linux": {"dependency-versions"},
"macos": MANYLINUX_OPTIONS | MUSLLINUX_OPTIONS,
"windows": MANYLINUX_OPTIONS | MUSLLINUX_OPTIONS,
}
class TableFmt(TypedDict):
item: str
sep: str
class ConfigOptionError(KeyError):
pass
def _dig_first(*pairs: Tuple[Mapping[str, Setting], str], ignore_empty: bool = False) -> Setting:
"""
Return the first dict item that matches from pairs of dicts and keys.
Will throw a KeyError if missing.
_dig_first((dict1, "key1"), (dict2, "key2"), ...)
"""
if not pairs:
raise ValueError("pairs cannot be empty")
for dict_like, key in pairs:
if key in dict_like:
value = dict_like[key]
if ignore_empty and value == "":
continue
return value
raise KeyError(key)
class OptionsReader:
"""
Gets options from the environment, config or defaults, optionally scoped
by the platform.
Example:
>>> options_reader = OptionsReader(config_file, platform='macos')
>>> options_reader.get('cool-color')
This will return the value of CIBW_COOL_COLOR_MACOS if it exists,
otherwise the value of CIBW_COOL_COLOR, otherwise
'tool.cibuildwheel.macos.cool-color' or 'tool.cibuildwheel.cool-color'
from `config_file`, or from cibuildwheel/resources/defaults.toml. An
error is thrown if there are any unexpected keys or sections in
tool.cibuildwheel.
"""
def __init__(
self,
config_file_path: Optional[Path] = None,
*,
platform: PlatformName,
disallow: Optional[Dict[str, Set[str]]] = None,
) -> None:
self.platform = platform
self.disallow = disallow or {}
# Open defaults.toml, loading both global and platform sections
defaults_path = resources_dir / "defaults.toml"
self.default_options, self.default_platform_options = self._load_file(defaults_path)
# Load the project config file
config_options: Dict[str, Any] = {}
config_platform_options: Dict[str, Any] = {}
if config_file_path is not None:
config_options, config_platform_options = self._load_file(config_file_path)
# Validate project config
for option_name in config_options:
if not self._is_valid_global_option(option_name):
raise ConfigOptionError(f'Option "{option_name}" not supported in a config file')
for option_name in config_platform_options:
if not self._is_valid_platform_option(option_name):
raise ConfigOptionError(
f'Option "{option_name}" not supported in the "{self.platform}" section'
)
self.config_options = config_options
self.config_platform_options = config_platform_options
self.overrides: List[Override] = []
self.current_identifier: Optional[str] = None
config_overrides = self.config_options.get("overrides")
if config_overrides is not None:
if not isinstance(config_overrides, list):
raise ConfigOptionError('"tool.cibuildwheel.overrides" must be a list')
for config_override in config_overrides:
select = config_override.pop("select", None)
if not select:
raise ConfigOptionError('"select" must be set in an override')
if isinstance(select, list):
select = " ".join(select)
self.overrides.append(Override(select, config_override))
def _is_valid_global_option(self, name: str) -> bool:
"""
Returns True if an option with this name is allowed in the
[tool.cibuildwheel] section of a config file.
"""
allowed_option_names = self.default_options.keys() | PLATFORMS | {"overrides"}
return name in allowed_option_names
def _is_valid_platform_option(self, name: str) -> bool:
"""
Returns True if an option with this name is allowed in the
[tool.cibuildwheel.<current-platform>] section of a config file.
"""
disallowed_platform_options = self.disallow.get(self.platform, set())
if name in disallowed_platform_options:
return False
allowed_option_names = self.default_options.keys() | self.default_platform_options.keys()
return name in allowed_option_names
def _load_file(self, filename: Path) -> Tuple[Dict[str, Any], Dict[str, Any]]:
"""
Load a toml file, returns global and platform as separate dicts.
"""
with filename.open("rb") as f:
config = tomli.load(f)
global_options = config.get("tool", {}).get("cibuildwheel", {})
platform_options = global_options.get(self.platform, {})
return global_options, platform_options
@property
def active_config_overrides(self) -> List[Override]:
if self.current_identifier is None:
return []
return [
o for o in self.overrides if selector_matches(o.select_pattern, self.current_identifier)
]
@contextmanager
def identifier(self, identifier: Optional[str]) -> Iterator[None]:
self.current_identifier = identifier
try:
yield
finally:
self.current_identifier = None
def get(
self,
name: str,
*,
env_plat: bool = True,
sep: Optional[str] = None,
table: Optional[TableFmt] = None,
ignore_empty: bool = False,
) -> str:
"""
Get and return the value for the named option from environment,
configuration file, or the default. If env_plat is False, then don't
accept platform versions of the environment variable. If this is an
array it will be merged with "sep" before returning. If it is a table,
it will be formatted with "table['item']" using {k} and {v} and merged
with "table['sep']". Empty variables will not override if ignore_empty
is True.
"""
if name not in self.default_options and name not in self.default_platform_options:
raise ConfigOptionError(f"{name} must be in cibuildwheel/resources/defaults.toml file")
# Environment variable form
envvar = f"CIBW_{name.upper().replace('-', '_')}"
plat_envvar = f"{envvar}_{self.platform.upper()}"
# later overrides take precedence over earlier ones, so reverse the list
active_config_overrides = reversed(self.active_config_overrides)
# get the option from the environment, then the config file, then finally the default.
# platform-specific options are preferred, if they're allowed.
result = _dig_first(
(os.environ if env_plat else {}, plat_envvar), # type: ignore[arg-type]
(os.environ, envvar),
*[(o.options, name) for o in active_config_overrides],
(self.config_platform_options, name),
(self.config_options, name),
(self.default_platform_options, name),
(self.default_options, name),
ignore_empty=ignore_empty,
)
if isinstance(result, dict):
if table is None:
raise ConfigOptionError(f"{name} does not accept a table")
return table["sep"].join(table["item"].format(k=k, v=v) for k, v in result.items())
elif isinstance(result, list):
if sep is None:
raise ConfigOptionError(f"{name} does not accept a list")
return sep.join(result)
elif isinstance(result, int):
return str(result)
else:
return result
class Options:
def __init__(self, platform: PlatformName, command_line_arguments: CommandLineArguments):
self.platform = platform
self.command_line_arguments = command_line_arguments
self.reader = OptionsReader(
self.config_file_path,
platform=platform,
disallow=DISALLOWED_OPTIONS,
)
@property
def config_file_path(self) -> Optional[Path]:
args = self.command_line_arguments
if args.config_file:
return Path(args.config_file.format(package=args.package_dir))
# return pyproject.toml, if it's available
pyproject_toml_path = Path(args.package_dir) / "pyproject.toml"
if pyproject_toml_path.exists():
return pyproject_toml_path
return None
@property
def package_requires_python_str(self) -> Optional[str]:
if not hasattr(self, "_package_requires_python_str"):
args = self.command_line_arguments
self._package_requires_python_str = get_requires_python_str(Path(args.package_dir))
return self._package_requires_python_str
@property
def globals(self) -> GlobalOptions:
args = self.command_line_arguments
package_dir = Path(args.package_dir)
output_dir = Path(
args.output_dir
if args.output_dir is not None
else os.environ.get("CIBW_OUTPUT_DIR", "wheelhouse")
)
build_config = self.reader.get("build", env_plat=False, sep=" ") or "*"
skip_config = self.reader.get("skip", env_plat=False, sep=" ")
test_skip = self.reader.get("test-skip", env_plat=False, sep=" ")
prerelease_pythons = args.prerelease_pythons or strtobool(
os.environ.get("CIBW_PRERELEASE_PYTHONS", "0")
)
# This is not supported in tool.cibuildwheel, as it comes from a standard location.
# Passing this in as an environment variable will override pyproject.toml, setup.cfg, or setup.py
requires_python_str: Optional[str] = (
os.environ.get("CIBW_PROJECT_REQUIRES_PYTHON") or self.package_requires_python_str
)
requires_python = None if requires_python_str is None else SpecifierSet(requires_python_str)
build_selector = BuildSelector(
build_config=build_config,
skip_config=skip_config,
requires_python=requires_python,
prerelease_pythons=prerelease_pythons,
)
test_selector = TestSelector(skip_config=test_skip)
archs_config_str = args.archs or self.reader.get("archs", sep=" ")
architectures = Architecture.parse_config(archs_config_str, platform=self.platform)
return GlobalOptions(
package_dir=package_dir,
output_dir=output_dir,
build_selector=build_selector,
test_selector=test_selector,
architectures=architectures,
)
def build_options(self, identifier: Optional[str]) -> BuildOptions:
"""
Compute BuildOptions for a single run configuration.
"""
with self.reader.identifier(identifier):
before_all = self.reader.get("before-all", sep=" && ")
build_frontend_str = self.reader.get("build-frontend", env_plat=False)
environment_config = self.reader.get(
"environment", table={"item": '{k}="{v}"', "sep": " "}
)
environment_pass = self.reader.get("environment-pass", sep=" ").split()
before_build = self.reader.get("before-build", sep=" && ")
repair_command = self.reader.get("repair-wheel-command", sep=" && ")
dependency_versions = self.reader.get("dependency-versions")
test_command = self.reader.get("test-command", sep=" && ")
before_test = self.reader.get("before-test", sep=" && ")
test_requires = self.reader.get("test-requires", sep=" ").split()
test_extras = self.reader.get("test-extras", sep=",")
build_verbosity_str = self.reader.get("build-verbosity")
build_frontend: BuildFrontend
if build_frontend_str == "build":
build_frontend = "build"
elif build_frontend_str == "pip":
build_frontend = "pip"
else:
msg = f"cibuildwheel: Unrecognised build frontend '{build_frontend_str}', only 'pip' and 'build' are supported"
print(msg, file=sys.stderr)
sys.exit(2)
try:
environment = parse_environment(environment_config)
except (EnvironmentParseError, ValueError):
print(
f'cibuildwheel: Malformed environment option "{environment_config}"',
file=sys.stderr,
)
traceback.print_exc(None, sys.stderr)
sys.exit(2)
# Pass through environment variables
if self.platform == "linux":
for env_var_name in environment_pass:
try:
environment.add(env_var_name, os.environ[env_var_name])
except KeyError:
pass
if dependency_versions == "pinned":
dependency_constraints: Optional[
DependencyConstraints
] = DependencyConstraints.with_defaults()
elif dependency_versions == "latest":
dependency_constraints = None
else:
dependency_versions_path = Path(dependency_versions)
dependency_constraints = DependencyConstraints(dependency_versions_path)
if test_extras:
test_extras = f"[{test_extras}]"
try:
build_verbosity = min(3, max(-3, int(build_verbosity_str)))
except ValueError:
build_verbosity = 0
manylinux_images: Dict[str, str] = {}
musllinux_images: Dict[str, str] = {}
if self.platform == "linux":
all_pinned_docker_images = _get_pinned_docker_images()
for build_platform in MANYLINUX_ARCHS:
pinned_images = all_pinned_docker_images[build_platform]
config_value = self.reader.get(
f"manylinux-{build_platform}-image", ignore_empty=True
)
if not config_value:
# default to manylinux2014
image = pinned_images.get("manylinux2014")
elif config_value in pinned_images:
image = pinned_images[config_value]
else:
image = config_value
assert image is not None
manylinux_images[build_platform] = image
for build_platform in MUSLLINUX_ARCHS:
pinned_images = all_pinned_docker_images[build_platform]
config_value = self.reader.get(f"musllinux-{build_platform}-image")
if config_value is None:
image = pinned_images["musllinux_1_1"]
elif config_value in pinned_images:
image = pinned_images[config_value]
else:
image = config_value
musllinux_images[build_platform] = image
return BuildOptions(
globals=self.globals,
test_command=test_command,
test_requires=test_requires,
test_extras=test_extras,
before_test=before_test,
before_build=before_build,
before_all=before_all,
build_verbosity=build_verbosity,
repair_command=repair_command,
environment=environment,
dependency_constraints=dependency_constraints,
manylinux_images=manylinux_images or None,
musllinux_images=musllinux_images or None,
build_frontend=build_frontend,
)
def check_for_invalid_configuration(self, identifiers: List[str]) -> None:
if self.platform in ["macos", "windows"]:
before_all_values = {self.build_options(i).before_all for i in identifiers}
if len(before_all_values) > 1:
raise ValueError(
unwrap(
f"""
before_all cannot be set to multiple values. On macOS and Windows,
before_all is only run once, at the start of the build. before_all values
are: {before_all_values!r}
"""
)
)
def check_for_deprecated_options(self) -> None:
build_selector = self.globals.build_selector
test_selector = self.globals.test_selector
deprecated_selectors("CIBW_BUILD", build_selector.build_config, error=True)
deprecated_selectors("CIBW_SKIP", build_selector.skip_config)
deprecated_selectors("CIBW_TEST_SKIP", test_selector.skip_config)
def summary(self, identifiers: List[str]) -> str:
lines = [
f"{option_name}: {option_value!r}"
for option_name, option_value in sorted(self.globals._asdict().items())
]
build_option_defaults = self.build_options(identifier=None)
for option_name, default_value in sorted(build_option_defaults._asdict().items()):
if option_name == "globals":
continue
lines.append(f"{option_name}: {default_value!r}")
# if any identifiers have an overridden value, print that too
for identifier in identifiers:
option_value = self.build_options(identifier=identifier)._asdict()[option_name]
if option_value != default_value:
lines.append(f" {identifier}: {option_value!r}")
return "\n".join(lines)
def compute_options(
platform: PlatformName,
command_line_arguments: CommandLineArguments,
) -> Options:
options = Options(platform=platform, command_line_arguments=command_line_arguments)
options.check_for_deprecated_options()
return options
_all_pinned_docker_images: Optional[ConfigParser] = None
def _get_pinned_docker_images() -> Mapping[str, Mapping[str, str]]:
"""
This looks like a dict of dicts, e.g.
{ 'x86_64': {'manylinux1': '...', 'manylinux2010': '...', 'manylinux2014': '...'},
'i686': {'manylinux1': '...', 'manylinux2010': '...', 'manylinux2014': '...'},
'pypy_x86_64': {'manylinux2010': '...' }
... }
"""
global _all_pinned_docker_images
if _all_pinned_docker_images is None:
pinned_docker_images_file = resources_dir / "pinned_docker_images.cfg"
_all_pinned_docker_images = ConfigParser()
_all_pinned_docker_images.read(pinned_docker_images_file)
return _all_pinned_docker_images
def deprecated_selectors(name: str, selector: str, *, error: bool = False) -> None:
if "p2" in selector or "p35" in selector:
msg = f"cibuildwheel 2.x no longer supports Python < 3.6. Please use the 1.x series or update {name}"
print(msg, file=sys.stderr)
if error:
sys.exit(4)
| 36.059701 | 127 | 0.625966 |
2e1d1bc8732bc133c891f0d1d04b503aca834b8c | 1,688 | py | Python | kakao 2020/hyeongyu/p6.py | mrkimkim/csi | 2b8dde9419646c64f1f37c6d03e46efe0a49fe75 | [
"Apache-2.0"
] | null | null | null | kakao 2020/hyeongyu/p6.py | mrkimkim/csi | 2b8dde9419646c64f1f37c6d03e46efe0a49fe75 | [
"Apache-2.0"
] | null | null | null | kakao 2020/hyeongyu/p6.py | mrkimkim/csi | 2b8dde9419646c64f1f37c6d03e46efe0a49fe75 | [
"Apache-2.0"
] | null | null | null | def count_people(number):
cnt = 0
while number > 0:
cnt += number % 2
number //= 2
return cnt
def process(weak, dist, dp):
K = []
for i in range(len(dist)):
K.append(2 ** i)
# initial position is always zero
dp[0][K[i]] = dist[i]
for i in range(1, len(weak)):
for j in range(len(dp[0])):
# move from previous state without adding people
if dp[i - 1][j] >= weak[i] - weak[i - 1]:
dp[i][j] = dp[i - 1][j] - (weak[i] - weak[i - 1])
# add people from possible previous state
for k in range(len(dist)):
if j & K[k] == K[k] and dp[i - 1][j ^ K[k]] >= 0:
dp[i][j] = max(dp[i][j], dist[k])
answer = len(dist) + 1
for j in range(len(dp[0])):
if dp[len(weak) - 1][j] >= 0:
new_answer = count_people(j)
answer = min(answer, new_answer)
return answer != len(dist) + 1 and answer or -1
def solution(n, weak, dist):
k = 2 ** len(dist)
answer = -1
for i in range(len(weak)):
# set this position as start position
new_weak = [0] * len(weak)
pivot = weak[i]
for j in range(i, len(weak)):
new_weak[j - i] = weak[j]- pivot
for j in range(i):
new_weak[j + len(weak) - i] = weak[j] + n - pivot
# DP
dp = [[-1] * k for j in range(len(new_weak))]
new_answer = process(new_weak, dist, dp)
if new_answer != -1:
answer = answer == -1 and new_answer or min(answer, new_answer)
return answer
| 31.849057 | 75 | 0.469787 |
9ce8d5fd4fe4b7369dbf10a884db28665a03e88b | 5,811 | py | Python | py/pnet_report.py | bcgov/diutils | caf510c81f7f43372d4a8e18f77eaa86cdede6a5 | [
"Apache-2.0"
] | null | null | null | py/pnet_report.py | bcgov/diutils | caf510c81f7f43372d4a8e18f77eaa86cdede6a5 | [
"Apache-2.0"
] | 1 | 2020-12-14T22:00:24.000Z | 2020-12-14T22:00:24.000Z | py/pnet_report.py | bcgov/diutils | caf510c81f7f43372d4a8e18f77eaa86cdede6a5 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from misc import *
start_year, end_year = 1980, 2025
run_jobs = True
unique_record_files = {}
unique_record_files_lc = {}
unique_studyid_count_files = {}
status_files = {}
bad_data_files = {}
reversal_counts = {}
total_counts = {}
veterinary_counts = {}
#================================================================
f = open("jobs.txt", "wb") # produce nonduplicate records
for i in range(start_year, end_year):
fn = "pnet_" + str(i) + ".csv"
if os.path.exists(fn):
cmd = "unique " + fn
of = fn + "_unique-_.csv"
unique_record_files[i] = of
if not os.path.exists(of):
f.write(cmd + "\n")
print cmd
f.close()
if run_jobs and open("jobs.txt").read().strip() != "":
run("multicore jobs.txt 4") # use only four processors, to avoid RAM use
#================================================================
f = open("jobs.txt", "wb")
for fi in unique_record_files:
fn = unique_record_files[fi]
of = fn + "_lc"
unique_record_files_lc[fi] = of
if os.path.exists(fn):
cmd = "lc " + fn + " > " + of
if not os.path.exists(of):
f.write(cmd + "\n")
print cmd
f.close();
if run_jobs and open("jobs.txt").read().strip() != "":
run("multicore jobs.txt")
#================================================================
f = open("jobs.txt", "wb")
# count studyid
for i in range(start_year, end_year):
fn = "pnet_" + str(i) + ".csv"
if os.path.exists(fn):
cmd = "csv_count_unique_studyid " + fn
of = fn + "_count_unique_studyid"
unique_studyid_count_files[i] = of
if not os.path.exists(of):
f.write(cmd + "\n")
print cmd
f.close()
if run_jobs and open("jobs.txt").read().strip() != "":
run("multicore jobs.txt") # not same ram issue
#================================================================
# check for bad data
f = open("jobs.txt", "wb")
# count studyid
for i in range(start_year, end_year):
fn = "pnet_" + str(i) + ".csv"
if os.path.exists(fn):
cmd = "pnet_check " + fn
of1 = fn + "_status"
of2 = fn + "_bad-data"
status_files[i] = of1
bad_data_files[i] = of2
if not os.path.exists(of1) or not os.path.exists(of2):
f.write(cmd + "\n")
print cmd
f.close()
if run_jobs and open("jobs.txt").read().strip() != "":
run("multicore jobs.txt") # not same ram issue
reversal_count, veterinary_count, total_count = {}, {}, {}
def exe(c):
c = c.strip()
exec("global total_count, veterinary_count, reversal_count; " + c)
for i in status_files:
lines = open(status_files[i]).read().strip().split("\n")
rev, vet, tot = lines[len(lines) - 3], lines[len(lines) - 2], lines[len(lines) - 1]
if rev.split("=")[0].strip() == 'reversal_count' and len(rev.split(";")) == 1:
exe(rev)
if vet.split("=")[0].strip() == 'veterinary_count' and len(vet.split(";")) == 1:
exe(vet)
if tot.split("=")[0].strip() == 'total_count' and len(tot.split(";")) == 1:
exe(tot)
for j in reversal_count:
if str(j.strip().strip('"')) != str(i):
err("unexpected data")
if i not in reversal_counts:
reversal_counts[i] = 0
reversal_counts[i] += reversal_count[str(i)]
for j in veterinary_count:
if str(j.strip().strip('"')) != str(i):
err("unexpected data")
if i not in veterinary_counts:
veterinary_counts[i] = 0
veterinary_counts[i] += veterinary_count[str(i)]
for j in total_count:
if str(j.strip().strip('"')) != str(i):
err("unexpected data")
if i not in total_counts:
total_counts[i] = 0
total_counts[i] += total_count[str(i)]
'''
print total_counts
print reversal_counts
print veterinary_counts
unique_record_files = {}
unique_record_files_lc = {}
unique_studyid_count_files = {}
'''
print "year,distinct studyid,total records,reversal records,reversals %,vet records, vet records %,distinct records,duplicate records,duplicates %"
for i in unique_record_files_lc:
if i > 1980:
n_unique_records = open(unique_record_files_lc[i]).read().strip()
n_unique_studyid = open(unique_studyid_count_files[i]).read().strip()
print ",".join([str(i),# year
n_unique_studyid, # distinct studyid
str(total_counts[i]) if i in total_counts else "", # total records
str(reversal_counts[i]) if i in reversal_counts else "", # reversal records
str(100. * float(reversal_counts[i]) / float(total_counts[i])) if i in reversal_counts else "",
str(veterinary_counts[i]) if i in veterinary_counts else "", # vet records
str(100. * float(veterinary_counts[i]) / float(total_counts[i])) if i in veterinary_counts else "",
n_unique_records,
str(total_counts[i] - int(n_unique_records)),
str( 100.* float(total_counts[i] - int(n_unique_records)) / float(total_counts[i]))
])
print "done" | 36.31875 | 147 | 0.577181 |
8c28a2bf66b845cd4fa91b3be4579ef0bec5e814 | 783 | py | Python | Renames_Files.py | stasvf2278/Rename_Files | ef52a098da4af48168943c9e712546fe3124082f | [
"MIT"
] | null | null | null | Renames_Files.py | stasvf2278/Rename_Files | ef52a098da4af48168943c9e712546fe3124082f | [
"MIT"
] | null | null | null | Renames_Files.py | stasvf2278/Rename_Files | ef52a098da4af48168943c9e712546fe3124082f | [
"MIT"
] | null | null | null | import os, shutil
def main():
print('Enter input directory path') ## Enter first folder file path here
folder1 = input()
print('Enter New File Prefix')
prefix = input()
print('Enter output directory path') ## Enter second folder file path here
folder2 = input()
os.chdir(folder1)
i = 34
for root, dirs, files in os.walk(folder1):
for file in files:
if not os.path.exists(folder2):
os.makedirs(folder2)
shutil.copy(file, str(folder2+'\\'+prefix+'_'+str(i)+os.path.splitext(file)[1]))
print(folder2+'\\'+prefix+'_'+str(i)+os.path.splitext(file)[1] + ' - Success')
i = i + 1
name = 'main'
if name == "main":
main() | 27 | 93 | 0.541507 |
5f04bb4c4c1f4b7d7a74c1967d5ac9e58e7e5964 | 7,482 | py | Python | SchoolApp/apps/public_api/views.py | Ceres445/Team-10-Python-Project | 8a97642f019548d137dff564f9fdcc8f92761cc8 | [
"MIT"
] | 4 | 2021-06-25T04:17:08.000Z | 2022-02-13T14:48:38.000Z | SchoolApp/apps/public_api/views.py | Ceres445/Team-10-Python-Project | 8a97642f019548d137dff564f9fdcc8f92761cc8 | [
"MIT"
] | 20 | 2021-07-05T08:59:41.000Z | 2022-02-07T02:06:33.000Z | SchoolApp/apps/public_api/views.py | Ceres445/Team-10-Python-Project | 8a97642f019548d137dff564f9fdcc8f92761cc8 | [
"MIT"
] | 1 | 2021-07-22T07:38:00.000Z | 2021-07-22T07:38:00.000Z | from django.core.exceptions import PermissionDenied, ObjectDoesNotExist
from rest_framework import generics, permissions
from apps.classes.models import Assignment, Upload
from apps.timetable.models import ClassTime
from .filters import filter_queryset, authenticated_home, anon_home, authenticated_classes, anon, parse_args
from .models import Post, Comment, Category
from .permissions import IsAuthorOrReadOnly, IsInClass
from .serializers import PostSerializer, CommentSerializer, CategorySerializer, \
AssignmentSerializer, UploadSerializer, \
TimeTableSerializer
class PostList(generics.ListCreateAPIView):
serializer_class = PostSerializer
def get_queryset(self):
category_param = self.request.query_params.get('category')
user_param = self.request.query_params.get('author')
queryset = filter_queryset(self.request.user, Post, {'authenticated': authenticated_home, 'anon': anon_home},
name='category__name',
class_in='category__key_class__in'
)
queryset = parse_args(queryset, self.request.user, Post, category_param, user_param, name='category__name',
class_in='category__key_class__in')
class_param = self.request.query_params.get('class')
if category_param == 'Class' and class_param is not None:
queryset = queryset.filter(category__key_class__class_name=class_param)
return queryset
def perform_create(self, serializer):
serializer.save(author=self.request.user) # TODO: block users from creating posts in wrong categories
class PostDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Post.objects.all()
serializer_class = PostSerializer
permission_classes = [IsAuthorOrReadOnly, IsInClass]
class CommentList(generics.ListCreateAPIView):
queryset = Comment.objects.all()
serializer_class = CommentSerializer
permission_classes = [IsAuthorOrReadOnly, permissions.IsAuthenticatedOrReadOnly]
def perform_create(self, serializer):
serializer.save(author=self.request.user)
def get_queryset(self):
user_param = self.request.query_params.get('author')
queryset = filter_queryset(self.request.user, Comment, {'authenticated': authenticated_home, 'anon': anon_home},
class_in='post__category__key_class__in',
name='post__category__name'
)
queryset = parse_args(queryset, self.request.user, Comment, None, user_param,
class_in='post__category__key_class__in',
name='post__category__name')
post = self.request.query_params.get('post', '')
if post != '':
queryset = queryset.filter(post__id=post.strip("'"))
# post = get_object_or_404(Post, id=post.strip("'"))
try:
post = Post.objects.get(id=post.strip("'"))
except ObjectDoesNotExist:
return queryset
else:
if post.category.name == 'Class':
if self.request.user.is_authenticated:
if not self.request.user.is_staff:
queryset = queryset.filter(
post__category__key_class__in=self.request.user.profile.courses.all()
)
else:
raise PermissionDenied("Non authenticated users cannot see classes")
return queryset
class CommentDetail(generics.RetrieveUpdateDestroyAPIView):
serializer_class = CommentSerializer
permission_classes = [IsAuthorOrReadOnly, permissions.IsAuthenticatedOrReadOnly, IsInClass]
def get_queryset(self):
return filter_queryset(self.request.user, Comment, {'authenticated': authenticated_home, 'anon': anon_home},
class_in='post__category__key_class__in',
name='post__category__name'
)
class CategoryList(generics.ListAPIView):
def get_queryset(self):
queryset = filter_queryset(self.request.user, Category,
{'authenticated': authenticated_home, 'anon': anon_home},
class_in='key_class__in', name='name'
)
queryset = parse_args(queryset, self.request.user, Category, class_in='key_class__in', name='name')
name = self.request.query_params.get('name')
if name is not None:
queryset = queryset.filter(name=name)
class_param = self.request.query_params.get('class')
if name == 'Class' and class_param is not None:
queryset = queryset.filter(key_class__class_name=class_param)
return queryset
serializer_class = CategorySerializer
class CategoryDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Category.objects.all()
serializer_class = CategorySerializer
permission_classes = [permissions.IsAdminUser] # only admin users can create new categories manually
class AssignmentList(generics.ListAPIView):
queryset = Assignment.objects.all()
serializer_class = AssignmentSerializer
def get_queryset(self):
return filter_queryset(self.request.user, Assignment,
{'authenticated': authenticated_classes,
'anon': anon},
**{'courses': 'key_class__in', 'teacher': 'key_class__teacher_id'})
class AssignmentDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Assignment.objects.all()
serializer_class = AssignmentSerializer
def get_queryset(self):
return filter_queryset(self.request.user, Assignment,
{'authenticated': authenticated_classes,
'anon': anon},
**{'courses': 'key_class__in', 'teacher': 'key_class__teacher_id'})
class UploadList(generics.ListAPIView):
queryset = Upload.objects.all()
serializer_class = UploadSerializer
def get_queryset(self):
return filter_queryset(self.request.user, Upload,
{'authenticated': authenticated_classes,
'anon': anon},
**{'courses': 'assignment_key_class__in', 'teacher': 'assignment_key_class__teacher_id'})
class UploadDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Upload.objects.all()
serializer_class = UploadSerializer
def get_queryset(self):
return filter_queryset(self.request.user, Upload,
{'authenticated': authenticated_classes,
'anon': anon},
**{'courses': 'assignment_key_class__in', 'teacher': 'assignment_key_class__teacher_id'})
class TimeTableList(generics.ListAPIView):
queryset = ClassTime.objects.all()
serializer_class = TimeTableSerializer
def get_queryset(self):
return filter_queryset(self.request.user, ClassTime,
{'authenticated': authenticated_classes,
'anon': anon},
**{'courses': 'key_class__in', 'teacher': 'key_class__teacher_id'})
| 45.072289 | 120 | 0.631248 |
9ccaf8f43156e8728c346a57bde9f4b00d4e2c4b | 5,698 | py | Python | src/transfer_orientation.py | memento42429/metashape-scripts | 6537719eb0fd52f649d410eb7e7fe8b6b6028302 | [
"MIT"
] | 40 | 2017-10-17T16:18:21.000Z | 2018-12-28T02:37:35.000Z | src/transfer_orientation.py | memento42429/metashape-scripts | 6537719eb0fd52f649d410eb7e7fe8b6b6028302 | [
"MIT"
] | 1 | 2018-07-11T06:41:38.000Z | 2018-07-12T18:15:35.000Z | src/transfer_orientation.py | memento42429/metashape-scripts | 6537719eb0fd52f649d410eb7e7fe8b6b6028302 | [
"MIT"
] | 15 | 2017-11-22T11:16:16.000Z | 2018-12-30T13:50:41.000Z | # Transfers orientations from certain cameras to the corresponding ones.
#
# Usecase: RGB and thermal photos were taken simultaneously and in the same directions.
# Alignment can be computed only with RGB photos and then transferred to thermal.
#
# Important: Calibration for thermal cameras will not be adjusted automatically.
#
# Usage:
# 1. Chunk (right click) -> Add -> Add Folder... to add RGB photos
# 2. Chunk (right click) -> Add -> Add Photos... to add thermal photos (using "Multi-camera system" option)
# 3. Disable all thermal cameras (for example, in the Photos pane)
# 4. Workflow -> Align Photos... (only RGB photos will be aligned)
# 5. Enable all cameras and click Scripts -> Transfer orientations
#
import Metashape
from datetime import datetime, timedelta
# Checking compatibility
compatible_major_version = "1.8"
found_major_version = ".".join(Metashape.app.version.split('.')[:2])
if found_major_version != compatible_major_version:
raise Exception("Incompatible Metashape version: {} != {}".format(found_major_version, compatible_major_version))
def check_camera_master(cam):
return cam.master == cam
def check_camera_transform(cam):
return cam.transform is not None
def get_number(name):
numbers = []
cur_number = ""
for c in name:
if (c in "0123456789"):
cur_number = cur_number + c
else:
if (cur_number != ""):
numbers.append(cur_number)
cur_number = ""
if (cur_number != ""):
numbers.append(cur_number)
number = ""
for n in numbers:
if (len(n) >= len(number)):
number = n
return (int(number) if number != "" else 0)
def parse_datetime(time):
try:
return datetime.strptime(time, "%Y:%m:%d %H:%M:%S")
except:
return datetime(datetime.MINYEAR, 1, 1)
def get_camera_meta(cam):
meta = cam.photo.meta
res = [cam]
res.append(get_number(cam.label))
res.append(parse_datetime(meta["Exif/DateTime"]))
return res
def find_correspondence(cams_0, cams_1):
links_0 = [[] for c in cams_0]
links_1 = [[] for c in cams_1]
shift_stats = {}
pos_0 = 0
for pos_1 in range(len(cams_1)):
t = cams_1[pos_1][2]
t_margin = timedelta(seconds=1)
t_lower = t - t_margin
t_upper = t + t_margin
while (pos_0 > 0 and cams_0[pos_0 - 1][2] >= t_lower):
pos_0 -= 1
while (pos_0 < len(cams_0) and cams_0[pos_0][2] <= t_upper):
if (cams_0[pos_0][2] >= t_lower):
links_1[pos_1].append(pos_0)
links_0[pos_0].append(pos_1)
shift = cams_0[pos_0][1] - cams_1[pos_1][1]
dt = (cams_0[pos_0][2] - cams_1[pos_1][2]).total_seconds()
stat = shift_stats.get(shift, (0, 0))
shift_stats[shift] = (stat[0] + abs(dt), stat[1] + 1)
pos_0 += 1
shift_stats = {shift: (shift_stats[shift][0] / shift_stats[shift][1], -shift_stats[shift][1]) for shift in shift_stats}
shifts = sorted(shift_stats.keys(), key=lambda shift: shift_stats[shift])
res_0 = [None for c in cams_0]
res_1 = [None for c in cams_1]
unpaired = []
unpaired_next = list(range(len(cams_1)))
for shift in shifts:
unpaired = unpaired_next
unpaired_next = []
if (len(unpaired) == 0):
break
for pos_1 in unpaired:
best_pos_0 = None
best_dt = 0
for pos_0 in links_1[pos_1]:
cur_shift = cams_0[pos_0][1] - cams_1[pos_1][1]
if (cur_shift == shift and res_0[pos_0] is None):
cur_dt = abs((cams_0[pos_0][2] - cams_1[pos_1][2]).total_seconds())
if (best_pos_0 is None or cur_dt < best_dt):
best_pos_0 = pos_0
best_dt = cur_dt
if (best_pos_0 is None):
unpaired_next.append(pos_1)
else:
res_0[best_pos_0] = pos_1
res_1[pos_1] = best_pos_0
return res_1
def transfer_orientations():
chunk = Metashape.app.document.chunk
enabled_cameras = list(filter(lambda c: c.enabled, chunk.cameras))
master_cameras = list(filter(check_camera_master, enabled_cameras))
cameras_estimated = list(filter(check_camera_transform, master_cameras))
cameras_not_estimated = list(filter(lambda c: not check_camera_transform(c), master_cameras))
cameras_estimated = [get_camera_meta(c) for c in cameras_estimated]
cameras_not_estimated = [get_camera_meta(c) for c in cameras_not_estimated]
cameras_estimated.sort(key=lambda c: c[2])
cameras_not_estimated.sort(key=lambda c: c[2])
correspondence = find_correspondence(cameras_estimated, cameras_not_estimated)
transferred_cnt = 0
unmatched = []
for pos_1, cam_1 in enumerate(cameras_not_estimated):
pos_0 = correspondence[pos_1]
if (pos_0 is None):
unmatched.append(cam_1[0])
continue
cam_0 = cameras_estimated[pos_0]
cam_1[0].transform = cam_0[0].transform
transferred_cnt += 1
print("------------------")
if (transferred_cnt > 0):
print("Successfully transferred {} orientations".format(transferred_cnt))
else:
print("Transferred {} orientations".format(transferred_cnt))
if (len(unmatched) > 0):
print("{} cameras remain without orientations:".format(len(unmatched)))
for cam in unmatched:
print(cam)
label = "Scripts/Transfer orientations"
Metashape.app.addMenuItem(label, transfer_orientations)
print("To execute this script press {}".format(label))
| 33.916667 | 123 | 0.627589 |
cdcbee451f9652734863fa3c905b3f9cf4a23cac | 36,646 | py | Python | lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py | nanjekyejoannah/pypy | e80079fe13c29eda7b2a6b4cd4557051f975a2d9 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py | nanjekyejoannah/pypy | e80079fe13c29eda7b2a6b4cd4557051f975a2d9 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | lib_pypy/_cffi_ssl/_cffi_src/openssl/ssl.py | nanjekyejoannah/pypy | e80079fe13c29eda7b2a6b4cd4557051f975a2d9 | [
"Apache-2.0",
"OpenSSL"
] | null | null | null | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
INCLUDES = """
#include <openssl/ssl.h>
typedef STACK_OF(SSL_CIPHER) Cryptography_STACK_OF_SSL_CIPHER;
"""
TYPES = """
static const long Cryptography_HAS_SSL_ST;
static const long Cryptography_HAS_TLS_ST;
static const long Cryptography_HAS_SSL2;
static const long Cryptography_HAS_SSL3_METHOD;
static const long Cryptography_HAS_TLSv1_1;
static const long Cryptography_HAS_TLSv1_2;
static const long Cryptography_HAS_TLSv1_3;
static const long Cryptography_HAS_SECURE_RENEGOTIATION;
static const long Cryptography_HAS_COMPRESSION;
static const long Cryptography_HAS_TLSEXT_STATUS_REQ_CB;
static const long Cryptography_HAS_STATUS_REQ_OCSP_RESP;
static const long Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE;
static const long Cryptography_HAS_GET_SERVER_TMP_KEY;
static const long Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE;
static const long Cryptography_HAS_SSL_CTX_CLEAR_OPTIONS;
static const long Cryptography_HAS_DTLS;
static const long Cryptography_HAS_GENERIC_DTLS_METHOD;
static const long Cryptography_HAS_SIGALGS;
static const long Cryptography_HAS_PSK;
static const long Cryptography_HAS_CIPHER_DETAILS;
static const long Cryptography_HAS_CTRL_GET_MAX_PROTO_VERSION;
static const long Crytpography_HAS_OP_IGNORE_UNEXPECTED_EOF;
/* Internally invented symbol to tell us if SNI is supported */
static const long Cryptography_HAS_TLSEXT_HOSTNAME;
/* Internally invented symbol to tell us if SSL_MODE_RELEASE_BUFFERS is
* supported
*/
static const long Cryptography_HAS_RELEASE_BUFFERS;
/* Internally invented symbol to tell us if SSL_OP_NO_COMPRESSION is
* supported
*/
static const long Cryptography_HAS_OP_NO_COMPRESSION;
static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING;
static const long Cryptography_HAS_SSL_SET_SSL_CTX;
static const long Cryptography_HAS_SSL_OP_NO_TICKET;
static const long Cryptography_HAS_ALPN;
static const long Cryptography_HAS_NEXTPROTONEG;
static const long Cryptography_HAS_SET_CERT_CB;
static const long Cryptography_HAS_CUSTOM_EXT;
static const long SSL_FILETYPE_PEM;
static const long SSL_FILETYPE_ASN1;
static const long SSL_ERROR_NONE;
static const long SSL_ERROR_ZERO_RETURN;
static const long SSL_ERROR_WANT_READ;
static const long SSL_ERROR_WANT_WRITE;
static const long SSL_ERROR_WANT_X509_LOOKUP;
static const long SSL_ERROR_WANT_CONNECT;
static const long SSL_ERROR_SYSCALL;
static const long SSL_ERROR_SSL;
static const long SSL_SENT_SHUTDOWN;
static const long SSL_RECEIVED_SHUTDOWN;
static const long SSL_OP_NO_SSLv2;
static const long SSL_OP_NO_SSLv3;
static const long SSL_OP_NO_TLSv1;
static const long SSL_OP_NO_TLSv1_1;
static const long SSL_OP_NO_TLSv1_2;
static const long SSL_OP_NO_TLSv1_3;
static const long SSL_OP_NO_DTLSv1;
static const long SSL_OP_NO_DTLSv1_2;
static const long SSL_OP_NO_COMPRESSION;
static const long SSL_OP_SINGLE_DH_USE;
static const long SSL_OP_EPHEMERAL_RSA;
static const long SSL_OP_MICROSOFT_SESS_ID_BUG;
static const long SSL_OP_NETSCAPE_CHALLENGE_BUG;
static const long SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG;
static const long SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG;
static const long SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER;
static const long SSL_OP_MSIE_SSLV2_RSA_PADDING;
static const long SSL_OP_SSLEAY_080_CLIENT_DH_BUG;
static const long SSL_OP_TLS_D5_BUG;
static const long SSL_OP_TLS_BLOCK_PADDING_BUG;
static const long SSL_OP_IGNORE_UNEXPECTED_EOF;
static const long SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS;
static const long SSL_OP_CIPHER_SERVER_PREFERENCE;
static const long SSL_OP_TLS_ROLLBACK_BUG;
static const long SSL_OP_PKCS1_CHECK_1;
static const long SSL_OP_PKCS1_CHECK_2;
static const long SSL_OP_NETSCAPE_CA_DN_BUG;
static const long SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG;
static const long SSL_OP_NO_QUERY_MTU;
static const long SSL_OP_COOKIE_EXCHANGE;
static const long SSL_OP_NO_TICKET;
static const long SSL_OP_ALL;
static const long SSL_OP_SINGLE_ECDH_USE;
static const long SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION;
static const long SSL_OP_LEGACY_SERVER_CONNECT;
static const long SSL_VERIFY_PEER;
static const long SSL_VERIFY_FAIL_IF_NO_PEER_CERT;
static const long SSL_VERIFY_CLIENT_ONCE;
static const long SSL_VERIFY_NONE;
static const long SSL_VERIFY_POST_HANDSHAKE;
static const long SSL_SESS_CACHE_OFF;
static const long SSL_SESS_CACHE_CLIENT;
static const long SSL_SESS_CACHE_SERVER;
static const long SSL_SESS_CACHE_BOTH;
static const long SSL_SESS_CACHE_NO_AUTO_CLEAR;
static const long SSL_SESS_CACHE_NO_INTERNAL_LOOKUP;
static const long SSL_SESS_CACHE_NO_INTERNAL_STORE;
static const long SSL_SESS_CACHE_NO_INTERNAL;
static const long SSL_ST_CONNECT;
static const long SSL_ST_ACCEPT;
static const long SSL_ST_MASK;
static const long SSL_ST_INIT;
static const long SSL_ST_BEFORE;
static const long SSL_ST_OK;
static const long SSL_ST_RENEGOTIATE;
static const long SSL_CB_LOOP;
static const long SSL_CB_EXIT;
static const long SSL_CB_READ;
static const long SSL_CB_WRITE;
static const long SSL_CB_ALERT;
static const long SSL_CB_READ_ALERT;
static const long SSL_CB_WRITE_ALERT;
static const long SSL_CB_ACCEPT_LOOP;
static const long SSL_CB_ACCEPT_EXIT;
static const long SSL_CB_CONNECT_LOOP;
static const long SSL_CB_CONNECT_EXIT;
static const long SSL_CB_HANDSHAKE_START;
static const long SSL_CB_HANDSHAKE_DONE;
static const long SSL_MODE_RELEASE_BUFFERS;
static const long SSL_MODE_ENABLE_PARTIAL_WRITE;
static const long SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER;
static const long SSL_MODE_AUTO_RETRY;
static const long SSL3_RANDOM_SIZE;
static const long TLS_ST_BEFORE;
static const long TLS_ST_OK;
static const long OPENSSL_NPN_NEGOTIATED;
typedef ... SSL_METHOD;
typedef ... SSL_CTX;
typedef ... SSL_SESSION;
typedef ... SSL;
static const long TLSEXT_NAMETYPE_host_name;
static const long TLSEXT_STATUSTYPE_ocsp;
typedef ... SSL_CIPHER;
typedef ... Cryptography_STACK_OF_SSL_CIPHER;
typedef ... COMP_METHOD;
typedef struct {
const char *name;
unsigned long id;
} SRTP_PROTECTION_PROFILE;
"""
FUNCTIONS = """
/* SSL */
const char *SSL_state_string_long(const SSL *);
SSL_SESSION *SSL_get1_session(SSL *);
int SSL_set_session(SSL *, SSL_SESSION *);
int SSL_get_verify_mode(const SSL *);
void SSL_set_verify(SSL *, int, int (*)(int, X509_STORE_CTX *));
void SSL_set_verify_depth(SSL *, int);
int SSL_get_verify_depth(const SSL *);
int (*SSL_get_verify_callback(const SSL *))(int, X509_STORE_CTX *);
void SSL_set_info_callback(SSL *ssl, void (*)(const SSL *, int, int));
void (*SSL_get_info_callback(const SSL *))(const SSL *, int, int);
SSL *SSL_new(SSL_CTX *);
void SSL_free(SSL *);
int SSL_set_fd(SSL *, int);
SSL_CTX *SSL_get_SSL_CTX(const SSL *);
SSL_CTX *SSL_set_SSL_CTX(SSL *, SSL_CTX *);
BIO *SSL_get_rbio(const SSL *);
BIO *SSL_get_wbio(const SSL *);
void SSL_set_bio(SSL *, BIO *, BIO *);
void SSL_set_connect_state(SSL *);
void SSL_set_accept_state(SSL *);
void SSL_set_shutdown(SSL *, int);
int SSL_get_shutdown(const SSL *);
int SSL_pending(const SSL *);
int SSL_write(SSL *, const void *, int);
int SSL_read(SSL *, void *, int);
int SSL_peek(SSL *, void *, int);
X509 *SSL_get_certificate(const SSL *);
X509 *SSL_get_peer_certificate(const SSL *);
int SSL_get_ex_data_X509_STORE_CTX_idx(void);
int SSL_CTX_set1_param(SSL_CTX *ctx, X509_VERIFY_PARAM *vpm);
int SSL_set1_param(SSL *ssl, X509_VERIFY_PARAM *vpm);
/* Added in 1.0.2 */
X509_VERIFY_PARAM *SSL_get0_param(SSL *);
X509_VERIFY_PARAM *SSL_CTX_get0_param(SSL_CTX *ctx);
int SSL_use_certificate(SSL *, X509 *);
int SSL_use_certificate_ASN1(SSL *, const unsigned char *, int);
int SSL_use_certificate_file(SSL *, const char *, int);
int SSL_use_PrivateKey(SSL *, EVP_PKEY *);
int SSL_use_PrivateKey_ASN1(int, SSL *, const unsigned char *, long);
int SSL_use_PrivateKey_file(SSL *, const char *, int);
int SSL_check_private_key(const SSL *);
int SSL_get_sigalgs(SSL *, int, int *, int *, int *, unsigned char *,
unsigned char *);
Cryptography_STACK_OF_X509 *SSL_get_peer_cert_chain(const SSL *);
Cryptography_STACK_OF_X509_NAME *SSL_get_client_CA_list(const SSL *);
int SSL_get_error(const SSL *, int);
int SSL_do_handshake(SSL *);
int SSL_shutdown(SSL *);
int SSL_renegotiate(SSL *);
int SSL_renegotiate_pending(SSL *);
const char *SSL_get_cipher_list(const SSL *, int);
Cryptography_STACK_OF_SSL_CIPHER *SSL_get_ciphers(const SSL *);
/* context */
void SSL_CTX_free(SSL_CTX *);
long SSL_CTX_set_timeout(SSL_CTX *, long);
int SSL_CTX_set_default_verify_paths(SSL_CTX *);
void SSL_CTX_set_verify(SSL_CTX *, int, int (*)(int, X509_STORE_CTX *));
void SSL_CTX_set_verify_depth(SSL_CTX *, int);
int (*SSL_CTX_get_verify_callback(const SSL_CTX *))(int, X509_STORE_CTX *);
int SSL_CTX_get_verify_mode(const SSL_CTX *);
int SSL_CTX_get_verify_depth(const SSL_CTX *);
int SSL_CTX_set_cipher_list(SSL_CTX *, const char *);
int SSL_CTX_load_verify_locations(SSL_CTX *, const char *, const char *);
void SSL_CTX_set_default_passwd_cb(SSL_CTX *, pem_password_cb *);
void SSL_CTX_set_default_passwd_cb_userdata(SSL_CTX *, void *);
pem_password_cb *SSL_CTX_get_default_passwd_cb(SSL_CTX *ctx);
void *SSL_CTX_get_default_passwd_cb_userdata(SSL_CTX *ctx);
int SSL_CTX_use_certificate(SSL_CTX *, X509 *);
int SSL_CTX_use_certificate_ASN1(SSL_CTX *, int, const unsigned char *);
int SSL_CTX_use_certificate_file(SSL_CTX *, const char *, int);
int SSL_CTX_use_certificate_chain_file(SSL_CTX *, const char *);
int SSL_CTX_use_PrivateKey(SSL_CTX *, EVP_PKEY *);
int SSL_CTX_use_PrivateKey_ASN1(int, SSL_CTX *, const unsigned char *, long);
int SSL_CTX_use_PrivateKey_file(SSL_CTX *, const char *, int);
int SSL_CTX_check_private_key(const SSL_CTX *);
void SSL_CTX_set_cert_verify_callback(SSL_CTX *,
int (*)(X509_STORE_CTX *, void *),
void *);
void SSL_CTX_set_cookie_generate_cb(SSL_CTX *,
int (*)(
SSL *,
unsigned char *,
unsigned int *
));
long SSL_CTX_get_read_ahead(SSL_CTX *);
long SSL_CTX_set_read_ahead(SSL_CTX *, long);
int SSL_CTX_use_psk_identity_hint(SSL_CTX *, const char *);
void SSL_CTX_set_psk_server_callback(SSL_CTX *,
unsigned int (*)(
SSL *,
const char *,
unsigned char *,
unsigned int
));
void SSL_CTX_set_psk_client_callback(SSL_CTX *,
unsigned int (*)(
SSL *,
const char *,
char *,
unsigned int,
unsigned char *,
unsigned int
));
int SSL_CTX_set_session_id_context(SSL_CTX *, const unsigned char *,
unsigned int);
void SSL_CTX_set_cert_store(SSL_CTX *, X509_STORE *);
X509_STORE *SSL_CTX_get_cert_store(const SSL_CTX *);
int SSL_CTX_add_client_CA(SSL_CTX *, X509 *);
void SSL_CTX_set_client_CA_list(SSL_CTX *, Cryptography_STACK_OF_X509_NAME *);
void SSL_CTX_set_info_callback(SSL_CTX *, void (*)(const SSL *, int, int));
void (*SSL_CTX_get_info_callback(SSL_CTX *))(const SSL *, int, int);
long SSL_CTX_set1_sigalgs_list(SSL_CTX *, const char *);
/* SSL_SESSION */
void SSL_SESSION_free(SSL_SESSION *);
/* Information about actually used cipher */
const char *SSL_CIPHER_get_name(const SSL_CIPHER *);
int SSL_CIPHER_get_bits(const SSL_CIPHER *, int *);
/* the modern signature of this is uint32_t, but older openssl declared it
as unsigned long. To make our compiler flags happy we'll declare it as a
64-bit wide value, which should always be safe */
uint64_t SSL_CIPHER_get_id(const SSL_CIPHER *);
int SSL_CIPHER_is_aead(const SSL_CIPHER *);
int SSL_CIPHER_get_cipher_nid(const SSL_CIPHER *);
int SSL_CIPHER_get_digest_nid(const SSL_CIPHER *);
int SSL_CIPHER_get_kx_nid(const SSL_CIPHER *);
int SSL_CIPHER_get_auth_nid(const SSL_CIPHER *);
size_t SSL_get_finished(const SSL *, void *, size_t);
size_t SSL_get_peer_finished(const SSL *, void *, size_t);
Cryptography_STACK_OF_X509_NAME *SSL_load_client_CA_file(const char *);
const char *SSL_get_servername(const SSL *, const int);
/* Function signature changed to const char * in 1.1.0 */
const char *SSL_CIPHER_get_version(const SSL_CIPHER *);
/* These became macros in 1.1.0 */
int SSL_library_init(void);
void SSL_load_error_strings(void);
/* these CRYPTO_EX_DATA functions became macros in 1.1.0 */
int SSL_get_ex_new_index(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *,
CRYPTO_EX_free *);
int SSL_set_ex_data(SSL *, int, void *);
int SSL_CTX_get_ex_new_index(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *,
CRYPTO_EX_free *);
int SSL_CTX_set_ex_data(SSL_CTX *, int, void *);
SSL_SESSION *SSL_get_session(const SSL *);
const unsigned char *SSL_SESSION_get_id(const SSL_SESSION *, unsigned int *);
long SSL_SESSION_get_time(const SSL_SESSION *);
long SSL_SESSION_get_timeout(const SSL_SESSION *);
int SSL_SESSION_has_ticket(const SSL_SESSION *);
long SSL_SESSION_get_ticket_lifetime_hint(const SSL_SESSION *);
/* not a macro, but older OpenSSLs don't pass the args as const */
char *SSL_CIPHER_description(const SSL_CIPHER *, char *, int);
int SSL_SESSION_print(BIO *, const SSL_SESSION *);
/* not macros, but will be conditionally bound so can't live in functions */
const COMP_METHOD *SSL_get_current_compression(SSL *);
const COMP_METHOD *SSL_get_current_expansion(SSL *);
const char *SSL_COMP_get_name(const COMP_METHOD *);
unsigned long SSL_set_mode(SSL *, unsigned long);
unsigned long SSL_get_mode(SSL *);
unsigned long SSL_set_options(SSL *, unsigned long);
unsigned long SSL_get_options(SSL *);
void SSL_set_app_data(SSL *, char *);
char * SSL_get_app_data(SSL *);
void SSL_set_read_ahead(SSL *, int);
int SSL_want_read(const SSL *);
int SSL_want_write(const SSL *);
long SSL_total_renegotiations(SSL *);
long SSL_get_secure_renegotiation_support(SSL *);
/* Defined as unsigned long because SSL_OP_ALL is greater than signed 32-bit
and Windows defines long as 32-bit. */
unsigned long SSL_CTX_set_options(SSL_CTX *, unsigned long);
unsigned long SSL_CTX_clear_options(SSL_CTX *, unsigned long);
unsigned long SSL_CTX_get_options(SSL_CTX *);
unsigned long SSL_CTX_set_mode(SSL_CTX *, unsigned long);
unsigned long SSL_CTX_get_mode(SSL_CTX *);
unsigned long SSL_CTX_set_session_cache_mode(SSL_CTX *, unsigned long);
unsigned long SSL_CTX_get_session_cache_mode(SSL_CTX *);
unsigned long SSL_CTX_set_tmp_dh(SSL_CTX *, DH *);
unsigned long SSL_CTX_set_tmp_ecdh(SSL_CTX *, EC_KEY *);
unsigned long SSL_CTX_add_extra_chain_cert(SSL_CTX *, X509 *);
/*- These aren't macros these functions are all const X on openssl > 1.0.x -*/
/* methods */
/*
* TLSv1_1 and TLSv1_2 are recent additions. Only sufficiently new versions of
* OpenSSL support them.
*/
const SSL_METHOD *TLSv1_1_method(void);
const SSL_METHOD *TLSv1_1_server_method(void);
const SSL_METHOD *TLSv1_1_client_method(void);
const SSL_METHOD *TLSv1_2_method(void);
const SSL_METHOD *TLSv1_2_server_method(void);
const SSL_METHOD *TLSv1_2_client_method(void);
const SSL_METHOD *SSLv3_method(void);
const SSL_METHOD *SSLv3_server_method(void);
const SSL_METHOD *SSLv3_client_method(void);
const SSL_METHOD *TLSv1_method(void);
const SSL_METHOD *TLSv1_server_method(void);
const SSL_METHOD *TLSv1_client_method(void);
const SSL_METHOD *DTLSv1_method(void);
const SSL_METHOD *DTLSv1_server_method(void);
const SSL_METHOD *DTLSv1_client_method(void);
/* Added in 1.0.2 */
const SSL_METHOD *DTLS_method(void);
const SSL_METHOD *DTLS_server_method(void);
const SSL_METHOD *DTLS_client_method(void);
const SSL_METHOD *SSLv23_method(void);
const SSL_METHOD *SSLv23_server_method(void);
const SSL_METHOD *SSLv23_client_method(void);
const SSL_METHOD *TLS_method(void);
const SSL_METHOD *TLS_server_method(void);
const SSL_METHOD *TLS_client_method(void);
/*- These aren't macros these arguments are all const X on openssl > 1.0.x -*/
SSL_CTX *SSL_CTX_new(SSL_METHOD *);
long SSL_CTX_get_timeout(const SSL_CTX *);
const SSL_CIPHER *SSL_get_current_cipher(const SSL *);
const char *SSL_get_version(const SSL *);
int SSL_version(const SSL *);
void *SSL_CTX_get_ex_data(const SSL_CTX *, int);
void *SSL_get_ex_data(const SSL *, int);
int SSL_set_tlsext_host_name(SSL *, char *);
void SSL_CTX_set_tlsext_servername_callback(
SSL_CTX *,
int (*)(SSL *, int *, void *));
void SSL_CTX_set_tlsext_servername_arg(
SSL_CTX *, void *);
long SSL_set_tlsext_status_ocsp_resp(SSL *, unsigned char *, int);
long SSL_get_tlsext_status_ocsp_resp(SSL *, const unsigned char **);
long SSL_set_tlsext_status_type(SSL *, long);
long SSL_CTX_set_tlsext_status_cb(SSL_CTX *, int(*)(SSL *, void *));
long SSL_CTX_set_tlsext_status_arg(SSL_CTX *, void *);
int SSL_CTX_set_tlsext_use_srtp(SSL_CTX *, const char *);
int SSL_set_tlsext_use_srtp(SSL *, const char *);
SRTP_PROTECTION_PROFILE *SSL_get_selected_srtp_profile(SSL *);
long SSL_session_reused(SSL *);
void SSL_CTX_set_next_protos_advertised_cb(SSL_CTX *,
int (*)(SSL *,
const unsigned char **,
unsigned int *,
void *),
void *);
void SSL_CTX_set_next_proto_select_cb(SSL_CTX *,
int (*)(SSL *,
unsigned char **,
unsigned char *,
const unsigned char *,
unsigned int,
void *),
void *);
int SSL_select_next_proto(unsigned char **, unsigned char *,
const unsigned char *, unsigned int,
const unsigned char *, unsigned int);
void SSL_get0_next_proto_negotiated(const SSL *,
const unsigned char **, unsigned *);
int sk_SSL_CIPHER_num(Cryptography_STACK_OF_SSL_CIPHER *);
const SSL_CIPHER *sk_SSL_CIPHER_value(Cryptography_STACK_OF_SSL_CIPHER *, int);
/* ALPN APIs were introduced in OpenSSL 1.0.2. To continue to support earlier
* versions some special handling of these is necessary.
*/
int SSL_CTX_set_alpn_protos(SSL_CTX *, const unsigned char *, unsigned);
int SSL_set_alpn_protos(SSL *, const unsigned char *, unsigned);
void SSL_CTX_set_alpn_select_cb(SSL_CTX *,
int (*) (SSL *,
const unsigned char **,
unsigned char *,
const unsigned char *,
unsigned int,
void *),
void *);
void SSL_get0_alpn_selected(const SSL *, const unsigned char **, unsigned *);
long SSL_get_server_tmp_key(SSL *, EVP_PKEY **);
/* SSL_CTX_set_cert_cb is introduced in OpenSSL 1.0.2. To continue to support
* earlier versions some special handling of these is necessary.
*/
void SSL_CTX_set_cert_cb(SSL_CTX *, int (*)(SSL *, void *), void *);
void SSL_set_cert_cb(SSL *, int (*)(SSL *, void *), void *);
/* Added in 1.0.2 */
const SSL_METHOD *SSL_CTX_get_ssl_method(SSL_CTX *);
int SSL_SESSION_set1_id_context(SSL_SESSION *, const unsigned char *,
unsigned int);
/* Added in 1.1.0 for the great opaquing of structs */
size_t SSL_SESSION_get_master_key(const SSL_SESSION *, unsigned char *,
size_t);
size_t SSL_get_client_random(const SSL *, unsigned char *, size_t);
size_t SSL_get_server_random(const SSL *, unsigned char *, size_t);
int SSL_export_keying_material(SSL *, unsigned char *, size_t, const char *,
size_t, const unsigned char *, size_t, int);
long SSL_CTX_sess_number(SSL_CTX *);
long SSL_CTX_sess_connect(SSL_CTX *);
long SSL_CTX_sess_connect_good(SSL_CTX *);
long SSL_CTX_sess_connect_renegotiate(SSL_CTX *);
long SSL_CTX_sess_accept(SSL_CTX *);
long SSL_CTX_sess_accept_good(SSL_CTX *);
long SSL_CTX_sess_accept_renegotiate(SSL_CTX *);
long SSL_CTX_sess_hits(SSL_CTX *);
long SSL_CTX_sess_cb_hits(SSL_CTX *);
long SSL_CTX_sess_misses(SSL_CTX *);
long SSL_CTX_sess_timeouts(SSL_CTX *);
long SSL_CTX_sess_cache_full(SSL_CTX *);
/* DTLS support */
long Cryptography_DTLSv1_get_timeout(SSL *, time_t *, long *);
long DTLSv1_handle_timeout(SSL *);
long DTLS_set_link_mtu(SSL *, long);
long DTLS_get_link_min_mtu(SSL *);
/* Custom extensions. */
typedef int (*custom_ext_add_cb)(SSL *, unsigned int,
const unsigned char **,
size_t *, int *,
void *);
typedef void (*custom_ext_free_cb)(SSL *, unsigned int,
const unsigned char *,
void *);
typedef int (*custom_ext_parse_cb)(SSL *, unsigned int,
const unsigned char *,
size_t, int *,
void *);
int SSL_CTX_add_client_custom_ext(SSL_CTX *, unsigned int,
custom_ext_add_cb,
custom_ext_free_cb, void *,
custom_ext_parse_cb,
void *);
int SSL_CTX_add_server_custom_ext(SSL_CTX *, unsigned int,
custom_ext_add_cb,
custom_ext_free_cb, void *,
custom_ext_parse_cb,
void *);
int SSL_extension_supported(unsigned int);
int SSL_CTX_set_ciphersuites(SSL_CTX *, const char *);
int SSL_verify_client_post_handshake(SSL *);
void SSL_CTX_set_post_handshake_auth(SSL_CTX *, int);
void SSL_set_post_handshake_auth(SSL *, int);
uint32_t SSL_SESSION_get_max_early_data(const SSL_SESSION *);
int SSL_write_early_data(SSL *, const void *, size_t, size_t *);
int SSL_read_early_data(SSL *, void *, size_t, size_t *);
int SSL_CTX_set_max_early_data(SSL_CTX *, uint32_t);
long SSL_get_verify_result(const SSL *ssl);
int SSL_CTX_set_min_proto_version(SSL_CTX *ctx, int version);
int SSL_CTX_set_max_proto_version(SSL_CTX *ctx, int version);
int SSL_CTX_get_min_proto_version(SSL_CTX *ctx);
int SSL_CTX_get_max_proto_version(SSL_CTX *ctx);
int SSL_set_min_proto_version(SSL *ssl, int version);
int SSL_set_max_proto_version(SSL *ssl, int version);
int SSL_get_min_proto_version(SSL *ssl);
int SSL_get_max_proto_version(SSL *ssl);
ASN1_OCTET_STRING *a2i_IPADDRESS(const char *ipasc);
"""
CUSTOMIZATIONS = """
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102
#error Python 3.7 requires OpenSSL >= 1.0.2
#endif
/* Added in 1.0.2 but we need it in all versions now due to the great
opaquing. */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102
/* from ssl/ssl_lib.c */
const SSL_METHOD *SSL_CTX_get_ssl_method(SSL_CTX *ctx) {
return ctx->method;
}
#endif
/* Added in 1.1.0 in the great opaquing, but we need to define it for older
OpenSSLs. Such is our burden. */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 && !CRYPTOGRAPHY_LIBRESSL_27_OR_GREATER
/* from ssl/ssl_lib.c */
size_t SSL_get_client_random(const SSL *ssl, unsigned char *out, size_t outlen)
{
if (outlen == 0)
return sizeof(ssl->s3->client_random);
if (outlen > sizeof(ssl->s3->client_random))
outlen = sizeof(ssl->s3->client_random);
memcpy(out, ssl->s3->client_random, outlen);
return outlen;
}
/* Added in 1.1.0 as well */
/* from ssl/ssl_lib.c */
size_t SSL_get_server_random(const SSL *ssl, unsigned char *out, size_t outlen)
{
if (outlen == 0)
return sizeof(ssl->s3->server_random);
if (outlen > sizeof(ssl->s3->server_random))
outlen = sizeof(ssl->s3->server_random);
memcpy(out, ssl->s3->server_random, outlen);
return outlen;
}
/* Added in 1.1.0 as well */
/* from ssl/ssl_lib.c */
size_t SSL_SESSION_get_master_key(const SSL_SESSION *session,
unsigned char *out, size_t outlen)
{
if (session->master_key_length < 0) {
/* Should never happen */
return 0;
}
if (outlen == 0)
return session->master_key_length;
if (outlen > (size_t)session->master_key_length)
outlen = session->master_key_length;
memcpy(out, session->master_key, outlen);
return outlen;
}
/* from ssl/ssl_sess.c */
int SSL_SESSION_has_ticket(const SSL_SESSION *s)
{
return (s->tlsext_ticklen > 0) ? 1 : 0;
}
/* from ssl/ssl_sess.c */
unsigned long SSL_SESSION_get_ticket_lifetime_hint(const SSL_SESSION *s)
{
return s->tlsext_tick_lifetime_hint;
}
#endif
static const long Cryptography_HAS_SECURE_RENEGOTIATION = 1;
/* Cryptography now compiles out all SSLv2 bindings. This exists to allow
* clients that use it to check for SSLv2 support to keep functioning as
* expected.
*/
static const long Cryptography_HAS_SSL2 = 0;
#ifdef OPENSSL_NO_SSL3_METHOD
static const long Cryptography_HAS_SSL3_METHOD = 0;
const SSL_METHOD* (*SSLv3_method)(void) = NULL;
const SSL_METHOD* (*SSLv3_client_method)(void) = NULL;
const SSL_METHOD* (*SSLv3_server_method)(void) = NULL;
#else
static const long Cryptography_HAS_SSL3_METHOD = 1;
#endif
static const long Cryptography_HAS_TLSEXT_HOSTNAME = 1;
static const long Cryptography_HAS_TLSEXT_STATUS_REQ_CB = 1;
static const long Cryptography_HAS_STATUS_REQ_OCSP_RESP = 1;
static const long Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE = 1;
static const long Cryptography_HAS_RELEASE_BUFFERS = 1;
static const long Cryptography_HAS_OP_NO_COMPRESSION = 1;
static const long Cryptography_HAS_TLSv1_1 = 1;
static const long Cryptography_HAS_TLSv1_2 = 1;
static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING = 1;
static const long Cryptography_HAS_SSL_OP_NO_TICKET = 1;
static const long Cryptography_HAS_SSL_SET_SSL_CTX = 1;
static const long Cryptography_HAS_NEXTPROTONEG = 1;
/* SSL_get0_param was added in OpenSSL 1.0.2. */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 && !CRYPTOGRAPHY_LIBRESSL_27_OR_GREATER
X509_VERIFY_PARAM *(*SSL_get0_param)(SSL *) = NULL;
X509_VERIFY_PARAM *(*SSL_CTX_get0_param)(SSL_CTX *ctx) = NULL;
#else
#endif
/* ALPN was added in OpenSSL 1.0.2. */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 && !CRYPTOGRAPHY_IS_LIBRESSL
int (*SSL_CTX_set_alpn_protos)(SSL_CTX *,
const unsigned char *,
unsigned) = NULL;
int (*SSL_set_alpn_protos)(SSL *, const unsigned char *, unsigned) = NULL;
void (*SSL_CTX_set_alpn_select_cb)(SSL_CTX *,
int (*) (SSL *,
const unsigned char **,
unsigned char *,
const unsigned char *,
unsigned int,
void *),
void *) = NULL;
void (*SSL_get0_alpn_selected)(const SSL *,
const unsigned char **,
unsigned *) = NULL;
static const long Cryptography_HAS_ALPN = 0;
#else
static const long Cryptography_HAS_ALPN = 1;
#endif
/* SSL_CTX_set_cert_cb was added in OpenSSL 1.0.2. */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102
void (*SSL_CTX_set_cert_cb)(SSL_CTX *, int (*)(SSL *, void *), void *) = NULL;
void (*SSL_set_cert_cb)(SSL *, int (*)(SSL *, void *), void *) = NULL;
static const long Cryptography_HAS_SET_CERT_CB = 0;
#else
static const long Cryptography_HAS_SET_CERT_CB = 1;
#endif
/* In OpenSSL 1.0.2i+ the handling of COMP_METHOD when OPENSSL_NO_COMP was
changed and we no longer need to typedef void */
#if (defined(OPENSSL_NO_COMP) && CRYPTOGRAPHY_OPENSSL_LESS_THAN_102I) || \
CRYPTOGRAPHY_IS_LIBRESSL
static const long Cryptography_HAS_COMPRESSION = 0;
typedef void COMP_METHOD;
#else
static const long Cryptography_HAS_COMPRESSION = 1;
#endif
#if defined(SSL_CTRL_GET_SERVER_TMP_KEY)
static const long Cryptography_HAS_GET_SERVER_TMP_KEY = 1;
#else
static const long Cryptography_HAS_GET_SERVER_TMP_KEY = 0;
long (*SSL_get_server_tmp_key)(SSL *, EVP_PKEY **) = NULL;
#endif
/* The setter functions were added in OpenSSL 1.1.0. The getter functions were
added in OpenSSL 1.1.1. */
#if defined(SSL_CTRL_GET_MAX_PROTO_VERSION)
static const long Cryptography_HAS_CTRL_GET_MAX_PROTO_VERSION = 1;
#else
static const long Cryptography_HAS_CTRL_GET_MAX_PROTO_VERSION = 0;
int (*SSL_CTX_get_min_proto_version)(SSL_CTX *ctx) = NULL;
int (*SSL_CTX_get_max_proto_version)(SSL_CTX *ctx) = NULL;
int (*SSL_get_min_proto_version)(SSL *ssl) = NULL;
int (*SSL_get_max_proto_version)(SSL *ssl) = NULL;
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110
int (*SSL_CTX_set_min_proto_version)(SSL_CTX *ctx, int version) = NULL;
int (*SSL_CTX_set_max_proto_version)(SSL_CTX *ctx, int version) = NULL;
int (*SSL_set_min_proto_version)(SSL *ssl, int version) = NULL;
int (*SSL_set_max_proto_version)(SSL *ssl, int version) = NULL;
#endif
static const long Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE = 1;
static const long Cryptography_HAS_SSL_CTX_CLEAR_OPTIONS = 1;
/* in OpenSSL 1.1.0 the SSL_ST values were renamed to TLS_ST and several were
removed */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110
static const long Cryptography_HAS_SSL_ST = 1;
#else
static const long Cryptography_HAS_SSL_ST = 0;
static const long SSL_ST_BEFORE = 0;
static const long SSL_ST_OK = 0;
static const long SSL_ST_INIT = 0;
static const long SSL_ST_RENEGOTIATE = 0;
#endif
#if CRYPTOGRAPHY_OPENSSL_110_OR_GREATER
static const long Cryptography_HAS_TLS_ST = 1;
#else
static const long Cryptography_HAS_TLS_ST = 0;
static const long TLS_ST_BEFORE = 0;
static const long TLS_ST_OK = 0;
#endif
/* SSLv23_method(), SSLv23_server_method() and SSLv23_client_method() were
deprecated and the preferred TLS_method(), TLS_server_method() and
TLS_client_method() functions were introduced in OpenSSL 1.1.0. */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110
#define TLS_method SSLv23_method
#define TLS_server_method SSLv23_server_method
#define TLS_client_method SSLv23_client_method
#endif
/* LibreSSL 2.9.1 added only the DTLS_*_method functions */
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 && !CRYPTOGRAPHY_LIBRESSL_291_OR_GREATER
static const long Cryptography_HAS_GENERIC_DTLS_METHOD = 0;
const SSL_METHOD *(*DTLS_method)(void) = NULL;
const SSL_METHOD *(*DTLS_server_method)(void) = NULL;
const SSL_METHOD *(*DTLS_client_method)(void) = NULL;
#else
static const long Cryptography_HAS_GENERIC_DTLS_METHOD = 1;
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102
static const long SSL_OP_NO_DTLSv1 = 0;
static const long SSL_OP_NO_DTLSv1_2 = 0;
long (*DTLS_set_link_mtu)(SSL *, long) = NULL;
long (*DTLS_get_link_min_mtu)(SSL *) = NULL;
#endif
static const long Cryptography_HAS_DTLS = 1;
/* Wrap DTLSv1_get_timeout to avoid cffi to handle a 'struct timeval'. */
long Cryptography_DTLSv1_get_timeout(SSL *ssl, time_t *ptv_sec,
long *ptv_usec) {
struct timeval tv = { 0 };
long r = DTLSv1_get_timeout(ssl, &tv);
if (r == 1) {
if (ptv_sec) {
*ptv_sec = tv.tv_sec;
}
if (ptv_usec) {
*ptv_usec = tv.tv_usec;
}
}
return r;
}
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_102
static const long Cryptography_HAS_SIGALGS = 0;
const int (*SSL_get_sigalgs)(SSL *, int, int *, int *, int *, unsigned char *,
unsigned char *) = NULL;
const long (*SSL_CTX_set1_sigalgs_list)(SSL_CTX *, const char *) = NULL;
#else
static const long Cryptography_HAS_SIGALGS = 1;
#endif
#if CRYPTOGRAPHY_IS_LIBRESSL || defined(OPENSSL_NO_PSK)
static const long Cryptography_HAS_PSK = 0;
int (*SSL_CTX_use_psk_identity_hint)(SSL_CTX *, const char *) = NULL;
void (*SSL_CTX_set_psk_server_callback)(SSL_CTX *,
unsigned int (*)(
SSL *,
const char *,
unsigned char *,
unsigned int
)) = NULL;
void (*SSL_CTX_set_psk_client_callback)(SSL_CTX *,
unsigned int (*)(
SSL *,
const char *,
char *,
unsigned int,
unsigned char *,
unsigned int
)) = NULL;
#else
static const long Cryptography_HAS_PSK = 1;
#endif
/*
* Custom extensions were added in 1.0.2. 1.1.1 is adding a more general
* SSL_CTX_add_custom_ext function, but we're not binding that yet.
*/
#if CRYPTOGRAPHY_OPENSSL_102_OR_GREATER
static const long Cryptography_HAS_CUSTOM_EXT = 1;
#else
static const long Cryptography_HAS_CUSTOM_EXT = 0;
typedef int (*custom_ext_add_cb)(SSL *, unsigned int,
const unsigned char **,
size_t *, int *,
void *);
typedef void (*custom_ext_free_cb)(SSL *, unsigned int,
const unsigned char *,
void *);
typedef int (*custom_ext_parse_cb)(SSL *, unsigned int,
const unsigned char *,
size_t, int *,
void *);
int (*SSL_CTX_add_client_custom_ext)(SSL_CTX *, unsigned int,
custom_ext_add_cb,
custom_ext_free_cb, void *,
custom_ext_parse_cb,
void *) = NULL;
int (*SSL_CTX_add_server_custom_ext)(SSL_CTX *, unsigned int,
custom_ext_add_cb,
custom_ext_free_cb, void *,
custom_ext_parse_cb,
void *) = NULL;
int (*SSL_extension_supported)(unsigned int) = NULL;
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_110 && !CRYPTOGRAPHY_LIBRESSL_27_OR_GREATER
int (*SSL_CIPHER_is_aead)(const SSL_CIPHER *) = NULL;
int (*SSL_CIPHER_get_cipher_nid)(const SSL_CIPHER *) = NULL;
int (*SSL_CIPHER_get_digest_nid)(const SSL_CIPHER *) = NULL;
int (*SSL_CIPHER_get_kx_nid)(const SSL_CIPHER *) = NULL;
int (*SSL_CIPHER_get_auth_nid)(const SSL_CIPHER *) = NULL;
static const long Cryptography_HAS_CIPHER_DETAILS = 0;
#else
static const long Cryptography_HAS_CIPHER_DETAILS = 1;
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_111
static const long Cryptography_HAS_TLSv1_3 = 0;
static const long SSL_OP_NO_TLSv1_3 = 0;
static const long SSL_VERIFY_POST_HANDSHAKE = 0;
int (*SSL_CTX_set_ciphersuites)(SSL_CTX *, const char *) = NULL;
int (*SSL_verify_client_post_handshake)(SSL *) = NULL;
void (*SSL_CTX_set_post_handshake_auth)(SSL_CTX *, int) = NULL;
void (*SSL_set_post_handshake_auth)(SSL *, int) = NULL;
uint32_t (*SSL_SESSION_get_max_early_data)(const SSL_SESSION *) = NULL;
int (*SSL_write_early_data)(SSL *, const void *, size_t, size_t *) = NULL;
int (*SSL_read_early_data)(SSL *, void *, size_t, size_t *) = NULL;
int (*SSL_CTX_set_max_early_data)(SSL_CTX *, uint32_t) = NULL;
#else
static const long Cryptography_HAS_TLSv1_3 = 1;
#endif
#if CRYPTOGRAPHY_OPENSSL_LESS_THAN_300
static const long SSL_OP_IGNORE_UNEXPECTED_EOF = 0;
static const long Crytpography_HAS_OP_IGNORE_UNEXPECTED_EOF = 0;
#else
static const long Crytpography_HAS_OP_IGNORE_UNEXPECTED_EOF = 1;
#endif
"""
| 39.789359 | 79 | 0.697184 |
beef288d8a26c1c03e8bf447767b782bde627158 | 3,710 | py | Python | tests/test_add_sinks.py | joshgordon/loguru | 9777f4bec8b03ef074635269224baa3fd263fd09 | [
"MIT"
] | 1 | 2019-10-21T01:59:35.000Z | 2019-10-21T01:59:35.000Z | tests/test_add_sinks.py | joshgordon/loguru | 9777f4bec8b03ef074635269224baa3fd263fd09 | [
"MIT"
] | null | null | null | tests/test_add_sinks.py | joshgordon/loguru | 9777f4bec8b03ef074635269224baa3fd263fd09 | [
"MIT"
] | null | null | null | import pathlib
import sys
import os
import logging
from loguru import logger
import pytest
message = "test message"
expected = message + "\n"
repetitions = pytest.mark.parametrize("rep", [0, 1, 2])
def log(sink, rep=1):
logger.debug("This shouldn't be printed.")
i = logger.add(sink, format="{message}")
for _ in range(rep):
logger.debug(message)
logger.remove(i)
logger.debug("This shouldn't be printed neither.")
@repetitions
def test_stdout_sink(rep, capsys):
log(sys.stdout, rep)
out, err = capsys.readouterr()
assert out == expected * rep
assert err == ""
@repetitions
def test_stderr_sink(rep, capsys):
log(sys.stderr, rep)
out, err = capsys.readouterr()
assert out == ""
assert err == expected * rep
@repetitions
def test_devnull(rep):
log(os.devnull, rep)
@repetitions
@pytest.mark.parametrize(
"sink_from_path",
[str, pathlib.Path, lambda path: open(path, "a"), lambda path: pathlib.Path(path).open("a")],
)
def test_file_sink(rep, sink_from_path, tmpdir):
file = tmpdir.join("test.log")
sink = sink_from_path(str(file))
log(sink, rep)
assert file.read() == expected * rep
@repetitions
def test_file_sink_folder_creation(rep, tmpdir):
file = tmpdir.join("some", "sub", "folder", "not", "existing", "test.log")
log(str(file), rep)
assert file.read() == expected * rep
@repetitions
def test_function_sink(rep):
a = []
func = lambda log_message: a.append(log_message)
log(func, rep)
assert a == [expected] * rep
@repetitions
def test_class_sink(rep):
out = []
class A:
def write(self, m):
out.append(m)
log(A, rep)
assert out == [expected] * rep
@repetitions
def test_file_object_sink(rep):
class A:
def __init__(self):
self.out = ""
def write(self, m):
self.out += m
a = A()
log(a, rep)
assert a.out == expected * rep
@repetitions
def test_standard_handler_sink(rep):
out = []
class H(logging.Handler):
def emit(self, record):
out.append(record.getMessage() + "\n")
h = H()
log(h, rep)
assert out == [expected] * rep
@repetitions
def test_flush(rep):
flushed = []
out = []
class A:
def write(self, m):
out.append(m)
def flush(self):
flushed.append(out[-1])
log(A, rep)
assert flushed == [expected] * rep
def test_file_sink_ascii_encoding(tmpdir):
file = tmpdir.join("test.log")
logger.add(
str(file), encoding="ascii", format="{message}", errors="backslashreplace", catch=False
)
logger.info("天")
logger.remove()
assert file.read() == "\\u5929\n"
def test_file_sink_utf8_encoding(tmpdir):
file = tmpdir.join("test.log")
logger.add(str(file), encoding="utf8", format="{message}", errors="strict", catch=False)
logger.info("天")
logger.remove()
assert file.read() == "天\n"
def test_disabled_logger_in_sink(sink_with_logger):
sink = sink_with_logger(logger)
logger.disable("tests.conftest")
logger.add(sink, format="{message}")
logger.info("Disabled test")
assert sink.out == "Disabled test\n"
@pytest.mark.parametrize("sink", [123, sys, object(), int])
def test_invalid_sink(sink):
with pytest.raises(ValueError):
log(sink, "")
def test_deprecated_start_and_stop(writer):
with pytest.warns(DeprecationWarning):
i = logger.start(writer, format="{message}")
logger.debug("Test")
assert writer.read() == "Test\n"
writer.clear()
with pytest.warns(DeprecationWarning):
logger.stop(i)
logger.debug("Test")
assert writer.read() == ""
| 22.083333 | 97 | 0.628571 |
516f7c20736d09f491482b51a63e563037b5e7b8 | 8,787 | py | Python | tests/st/ops/cpu/test_arithmetic_op.py | Fengjuzhuwhw/mindspore | 553bb6e9d1156d8170f7ce9b9dd376e545f8bf97 | [
"Apache-2.0"
] | null | null | null | tests/st/ops/cpu/test_arithmetic_op.py | Fengjuzhuwhw/mindspore | 553bb6e9d1156d8170f7ce9b9dd376e545f8bf97 | [
"Apache-2.0"
] | null | null | null | tests/st/ops/cpu/test_arithmetic_op.py | Fengjuzhuwhw/mindspore | 553bb6e9d1156d8170f7ce9b9dd376e545f8bf97 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
import mindspore
from mindspore import Tensor
from mindspore.ops import operations as P
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
class SubNet(nn.Cell):
def __init__(self):
super(SubNet, self).__init__()
self.sub = P.Sub()
def construct(self, x, y):
return self.sub(x, y)
class DivNet(nn.Cell):
def __init__(self):
super(DivNet, self).__init__()
self.div = P.Div()
def construct(self, x, y):
return self.div(x, y)
class ModNet(nn.Cell):
def __init__(self):
super(ModNet, self).__init__()
self.mod = P.Mod()
def construct(self, x, y):
return self.mod(x, y)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
def test_sub():
x = np.random.rand(2, 3, 4, 4).astype(np.float32)
y = np.random.rand(4, 1).astype(np.float32)
net = SubNet()
output = net(Tensor(x), Tensor(y, mindspore.float32))
expect_output = x - y
assert np.all(output.asnumpy() == expect_output)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu_training
@pytest.mark.env_onecard
def test_div():
prop = 1 if np.random.random() < 0.5 else -1
x0_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
y0_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
x1_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
y1_np = np.random.randint(1, 100, (2, 1, 4, 4)).astype(np.float32) * prop
x2_np = np.random.randint(1, 100, (2, 1, 1, 4)).astype(np.float16) * prop
y2_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float16) * prop
x3_np = np.random.randint(1, 100, 1).astype(np.float32) * prop
y3_np = np.random.randint(1, 100, 1).astype(np.float32) * prop
x4_np = np.array(768).astype(np.float32) * prop
y4_np = np.array(3072.5).astype(np.float32) * prop
x5_np = np.random.randint(1, 100, (2, 1, 1, 4)).astype(np.int32) * prop
y5_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.int32) * prop
x6_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.int32) * prop
y6_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
x7_np = np.random.randint(1, 100, (2, 1, 1, 4)).astype(np.int64) * prop
y7_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.int64) * prop
x0 = Tensor(x0_np)
y0 = Tensor(y0_np)
x1 = Tensor(x1_np)
y1 = Tensor(y1_np)
x2 = Tensor(x2_np)
y2 = Tensor(y2_np)
x3 = Tensor(x3_np)
y3 = Tensor(y3_np)
x4 = Tensor(x4_np)
y4 = Tensor(y4_np)
x5 = Tensor(x5_np)
y5 = Tensor(y5_np)
x6 = Tensor(x6_np)
y6 = Tensor(y6_np)
x7 = Tensor(x7_np)
y7 = Tensor(y7_np)
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
div = DivNet()
output0 = div(x0, y0)
expect0 = np.divide(x0_np, y0_np)
diff0 = output0.asnumpy() - expect0
error0 = np.ones(shape=expect0.shape) * 1.0e-5
assert np.all(diff0 < error0)
assert output0.shape == expect0.shape
output1 = div(x1, y1)
expect1 = np.divide(x1_np, y1_np)
diff1 = output1.asnumpy() - expect1
error1 = np.ones(shape=expect1.shape) * 1.0e-5
assert np.all(diff1 < error1)
assert output1.shape == expect1.shape
output2 = div(x2, y2)
expect2 = np.divide(x2_np, y2_np).astype(np.float16)
diff2 = output2.asnumpy() - expect2
error2 = np.ones(shape=expect2.shape) * 1.0e-5
assert np.all(diff2 < error2)
assert output2.shape == expect2.shape
output3 = div(x3, y3)
expect3 = np.divide(x3_np, y3_np)
diff3 = output3.asnumpy() - expect3
error3 = np.ones(shape=expect3.shape) * 1.0e-5
assert np.all(diff3 < error3)
assert output3.shape == expect3.shape
output4 = div(x4, y4)
expect4 = np.divide(x4_np, y4_np)
diff4 = output4.asnumpy() - expect4
error4 = np.ones(shape=expect4.shape) * 1.0e-5
assert np.all(diff4 < error4)
assert output4.shape == expect4.shape
output5 = div(x5, y5)
expect5 = x5_np // y5_np
assert np.all(output5.asnumpy() == expect5)
output6 = div(x6, y6)
expect6 = np.divide(x6_np, y6_np)
diff6 = output6.asnumpy() - expect6
error6 = np.ones(shape=expect6.shape) * 1.0e-5
assert np.all(diff6 < error6)
assert output6.shape == expect6.shape
output7 = div(x7, y7)
expect7 = np.divide(x7_np, y7_np).astype(np.int64)
assert np.all(output7.asnumpy() == expect7)
assert output7.shape == expect7.shape
@pytest.mark.level0
@pytest.mark.platform_x86_cpu_training
@pytest.mark.env_onecard
def test_mod():
prop = 1 if np.random.random() < 0.5 else -1
x0_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
y0_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
x1_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
y1_np = np.random.randint(1, 100, (2, 1, 4, 4)).astype(np.float32) * prop
x2_np = np.random.randint(1, 100, (2, 1, 1, 4)).astype(np.float16) * prop
y2_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float16) * prop
x3_np = np.random.randint(1, 100, 1).astype(np.float32) * prop
y3_np = np.random.randint(1, 100, 1).astype(np.float32) * prop
x4_np = np.array(768).astype(np.float32) * prop
y4_np = np.array(3072.5).astype(np.float32) * prop
x5_np = np.random.randint(1, 100, (2, 1, 1, 4)).astype(np.int32) * prop
y5_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.int32) * prop
x6_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.int32) * prop
y6_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.float32) * prop
x7_np = np.random.randint(1, 100, (2, 1, 1, 4)).astype(np.int64) * prop
y7_np = np.random.randint(1, 100, (2, 3, 4, 4)).astype(np.int64) * prop
x0 = Tensor(x0_np)
y0 = Tensor(y0_np)
x1 = Tensor(x1_np)
y1 = Tensor(y1_np)
x2 = Tensor(x2_np)
y2 = Tensor(y2_np)
x3 = Tensor(x3_np)
y3 = Tensor(y3_np)
x4 = Tensor(x4_np)
y4 = Tensor(y4_np)
x5 = Tensor(x5_np)
y5 = Tensor(y5_np)
x6 = Tensor(x6_np)
y6 = Tensor(y6_np)
x7 = Tensor(x7_np)
y7 = Tensor(y7_np)
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
mod = ModNet()
output0 = mod(x0, y0)
expect0 = np.mod(x0_np, y0_np)
diff0 = output0.asnumpy() - expect0
error0 = np.ones(shape=expect0.shape) * 1.0e-5
assert np.all(diff0 < error0)
assert output0.shape == expect0.shape
output1 = mod(x1, y1)
expect1 = np.mod(x1_np, y1_np)
diff1 = output1.asnumpy() - expect1
error1 = np.ones(shape=expect1.shape) * 1.0e-5
assert np.all(diff1 < error1)
assert output1.shape == expect1.shape
output2 = mod(x2, y2)
expect2 = np.mod(x2_np, y2_np).astype(np.float16)
diff2 = output2.asnumpy() - expect2
error2 = np.ones(shape=expect2.shape) * 1.0e-5
assert np.all(diff2 < error2)
assert output2.shape == expect2.shape
output3 = mod(x3, y3)
expect3 = np.mod(x3_np, y3_np)
diff3 = output3.asnumpy() - expect3
error3 = np.ones(shape=expect3.shape) * 1.0e-5
assert np.all(diff3 < error3)
assert output3.shape == expect3.shape
output4 = mod(x4, y4)
expect4 = np.mod(x4_np, y4_np)
diff4 = output4.asnumpy() - expect4
error4 = np.ones(shape=expect4.shape) * 1.0e-5
assert np.all(diff4 < error4)
assert output4.shape == expect4.shape
output5 = mod(x5, y5)
expect5 = np.mod(x5_np, y5_np)
assert np.all(output5.asnumpy() == expect5)
assert output5.shape == expect5.shape
output6 = mod(x6, y6)
expect6 = np.mod(x6_np, y6_np)
diff6 = output6.asnumpy() - expect6
error6 = np.ones(shape=expect6.shape) * 1.0e-5
assert np.all(diff6 < error6)
assert output6.shape == expect6.shape
output7 = mod(x7, y7)
expect7 = np.mod(x7_np, y7_np).astype(np.int64)
assert np.all(output7.asnumpy() == expect7)
assert output6.shape == expect6.shape
test_sub()
test_div()
test_mod()
| 34.458824 | 78 | 0.637988 |
7811179b1ba8bf1dda101ada07a6f2d221875d9d | 4,673 | py | Python | captionGen.py | 4rude/WebVTTCaptionGen | ceb9e763adcbd7b0e0bb3263cbad1b09f09ac1c2 | [
"MIT"
] | null | null | null | captionGen.py | 4rude/WebVTTCaptionGen | ceb9e763adcbd7b0e0bb3263cbad1b09f09ac1c2 | [
"MIT"
] | null | null | null | captionGen.py | 4rude/WebVTTCaptionGen | ceb9e763adcbd7b0e0bb3263cbad1b09f09ac1c2 | [
"MIT"
] | null | null | null | # Example input variables:
# Ex. endSecond = 6000
# Ex. filePath = "images/thumbs/thumb"
def webvttCaptionGenerator(endSecond, filePath):
# This application could have been written to be a lot shorter (more dynamic), however it still works.
# This webvtt caption generator generates captions for videos roughly 1 hour and 39 minutes in length.
# The output of this application is printed to the console. You then will want to copy the console text and
# paste it into a file named: thumbs.vtt
# This algorithm would optimally output the data into a .vtt file. (TODO)
# Default start second. This usually wouldn't change.
startSecond = 1
# The endSecond variable is determined by the user
# First initialization of num1 - 8 & counterA & B. These are all mainly counters.
num1 = 0
num2 = 1
num3 = 0
num4 = 0
num5 = 0
num6 = 0
num7 = 0
num8 = 0
counterA = 1
counterB = 1
# Number of first thumbnail (created with )
thumbNum = 1
# Used to start the webvtt file, which is used to display thumbnails created by the ffmpeg (command line/terminal) tool. This is easy to use tool on the MacOS, Window, and Linux platforms.
# If you need help creating thumbnails for your video this link is a good start: https://stackoverflow.com/questions/8679390/ffmpeg-extracting-20-images-from-a-video-of-variable-length
print("WEBVTT\n")
# For loop to run through each thumbnail (var i is each thumbnail)
for i in range(startSecond, endSecond):
#
if num1 == 60:
num1 = 0
elif num2 == 60:
num2 = 0
if i == 61:
num3 += 1
num1 = 0
if i == 60:
num4 += 1
num2 = 0
#experiment
if i > 121:
counterA += 1
if i > 122:
counterB += 1
if ((counterA % 60) == 0):
num4 += 1
num2 = 0
if ((counterB % 60) == 0):
num3 += 1
num1 = 0
if i == 1:
print("00:0" + str(num1) + ".000 --> 00:0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i > 1 and i < 10:
print("0" + str(num3) + ":0" + str(num1) + ".100 --> 0" + str(num4) + ":0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i == 10:
print("0" + str(num3) + ":0" + str(num1) + ".100 --> 0" + str(num4) + ":" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i > 10 and i < 60:
print("0" + str(num3) + ":" + str(num1) + ".100 --> 0" + str(num4) + ":" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i == 60:
print("0" + str(num3) + ":" + str(num1) + ".100 --> 0" + str(num4) + ":0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i == 61:
print("0" + str(num3) + ":0" + str(num1) + ".100 --> 0" + str(num4) + ":0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i > 61 and i < 70:
#if num1 > 9 and num2 > 9:
print("0" + str(num3) + ":0" + str(num1) + ".100 --> 0" + str(num4) + ":0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i == 70:
print("0" + str(num3) + ":0" + str(num1) + ".100 --> 0" + str(num4) + ":" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i == 71:
print("0" + str(num3) + ":" + str(num1) + ".100 --> 0" + str(num4) + ":" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i > 71 and i < 120:
print("0" + str(num3) + ":" + str(num1) + ".100 --> 0" + str(num4) + ":" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
#edit
if i == 120:
print("Hello")
counterA = 1
num4 += 1
num2 = 0
print("0" + str(num3) + ":" + str(num1) + ".100 --> 0" + str(num4) + ":0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i == 121:
counterB = 1
num3 += 1
num1 = 0
print("0" + str(num3) + ":0" + str(num1) + ".100 --> 0" + str(num4) + ":0" + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i > 121 and i < 600:
if num2 == 10:
num6 = ""
if num1 == 10:
num5 = ""
if num2 == 0:
num6 = 0
if num1 == 0:
num5 = 0
print("0" + str(num3) + ":" + str(num5) + str(num1) + ".100 --> 0" + str(num4) + ":" + str(num6) + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
if i >= 600:
if num2 == 10:
num6 = ""
if num1 == 10:
num5 = ""
if num2 == 0:
num6 = 0
if num1 == 0:
num5 = 0
num8 = ""
print(str(num7) + str(num3) + ":" + str(num5) + str(num1) + ".100 --> " + str(num8) + str(num4) + ":" + str(num6) + str(num2) + ".100" + "\n" + filePath + "" + str(thumbNum) + ".jpg\n")
num7 = ""
# This should happen every iteration
num1 += 1
num2 += 1
thumbNum += 1
| 32.678322 | 189 | 0.536272 |
1d90cb92de95e1daa3cb6006d949026e5680df85 | 828 | py | Python | tests/common/helpers.py | xiao1228/nncf | 307262119ee3f50eec2fa4022b2ef96693fd8448 | [
"Apache-2.0"
] | null | null | null | tests/common/helpers.py | xiao1228/nncf | 307262119ee3f50eec2fa4022b2ef96693fd8448 | [
"Apache-2.0"
] | null | null | null | tests/common/helpers.py | xiao1228/nncf | 307262119ee3f50eec2fa4022b2ef96693fd8448 | [
"Apache-2.0"
] | null | null | null | """
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
def get_cli_dict_args(args):
cli_args = dict()
for key, val in args.items():
cli_key = '--{}'.format(str(key))
cli_args[cli_key] = None
if val is not None:
cli_args[cli_key] = str(val)
return cli_args
| 37.636364 | 73 | 0.711353 |
fc0df5e92609b5c9e8f37c8e0d34a627dfa719e4 | 934 | py | Python | spiritdashboard/urls.py | rashidx/SMUS-Go | 481df8c3d9d59da1e2753d4ac6ce087fd3abb161 | [
"MIT"
] | 1 | 2019-02-14T05:01:53.000Z | 2019-02-14T05:01:53.000Z | spiritdashboard/urls.py | rashidx/SMUS-Go | 481df8c3d9d59da1e2753d4ac6ce087fd3abb161 | [
"MIT"
] | null | null | null | spiritdashboard/urls.py | rashidx/SMUS-Go | 481df8c3d9d59da1e2753d4ac6ce087fd3abb161 | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
from django.contrib.auth.views import LoginView, LogoutView
app_name = 'spiritdashboard'
urlpatterns = [
path('', views.dashboard, name='dashboard'),
path('', views.dashboard, name='index'),
path('dashboard/', views.dashboard),
path('register/', views.register, name='register'),
path('login/', LoginView.as_view(template_name='spiritdashboard/login.html'), name='login'),
path('logout/', LogoutView.as_view(), name='logout'),
path('claim/<str:key>/', views.claim_key, name='claim_key'),
path('claim/', views.claim_key, name='claim_key_post'),
path('user_leaderboard/', views.UserLeaderboard.as_view(), name='user_leaderboard'),
path('grade_leaderboard/', views.GradeLeaderboard.as_view(), name='grade_leaderboard'),
path('completed/', views.completed, name='completed'),
path('privacy/', views.privacy_policy, name='privacy_policy')
]
| 46.7 | 96 | 0.708779 |
6d6fb26722851b5ac0c6b91ee32521ee8d68d6f6 | 1,073 | py | Python | Otree-Base/inicio/__init__.py | abnercasallo/source | 94c14c6a277d1b783a1ceddbc0e0852443ea5847 | [
"MIT"
] | null | null | null | Otree-Base/inicio/__init__.py | abnercasallo/source | 94c14c6a277d1b783a1ceddbc0e0852443ea5847 | [
"MIT"
] | null | null | null | Otree-Base/inicio/__init__.py | abnercasallo/source | 94c14c6a277d1b783a1ceddbc0e0852443ea5847 | [
"MIT"
] | null | null | null | from otree.api import *
c = Currency
doc = """
Your app description
"""
class Constants(BaseConstants):
name_in_url = 'inicio'
players_per_group = None
num_rounds = 1
class Subsession(BaseSubsession):
pass
class Group(BaseGroup):
pass
class Player(BasePlayer):
acepta = models.StringField(choices=['No', 'Sí'], widget=widgets.RadioSelectHorizontal, )
pass
# PAGES
# PAGES
class instrucciones(Page):
pass
class ResultsWaitPage(WaitPage):
pass
class ConsentimientoInformado(Page):
form_model = 'player'
form_fields = ['acepta']
pass
class agradecimiento(Page):
def is_displayed(player: Player):
return player.acepta == "No" ####Esto es diferente a la versión 3, donde se suele usar el Self.
class instrucciones_time(Page):
pass
class info_general(Page):
pass
class calculo(Page):
pass
class test(Page):
pass
page_sequence = [ ConsentimientoInformado, agradecimiento, instrucciones, calculo, instrucciones_time,test,]
| 17.883333 | 110 | 0.664492 |
d0c65748cefbf60b1150c16bbd4ae8ffe3f3a541 | 6,210 | py | Python | ipproxytool/spiders/validator/httpbin.py | xlj44400/IPProxyTool | 416dc2bffeced8344f8156b5606376e93a7be5cf | [
"MIT"
] | 1 | 2017-11-10T15:29:35.000Z | 2017-11-10T15:29:35.000Z | ipproxytool/spiders/validator/httpbin.py | xlj44400/IPProxyTool | 416dc2bffeced8344f8156b5606376e93a7be5cf | [
"MIT"
] | null | null | null | ipproxytool/spiders/validator/httpbin.py | xlj44400/IPProxyTool | 416dc2bffeced8344f8156b5606376e93a7be5cf | [
"MIT"
] | 2 | 2018-09-27T06:36:00.000Z | 2019-04-16T06:06:26.000Z | #-*- coding: utf-8 -*-
import json
import time
import datetime
import logging
import requests
import config
import utils
from scrapy import Request
from validator import Validator
class HttpBinSpider(Validator):
name = 'httpbin'
concurrent_requests = 16
def __init__(self, name = None, **kwargs):
super(HttpBinSpider, self).__init__(name, **kwargs)
self.timeout = 20
self.urls = [
'http://httpbin.org/get?show_env=1',
'https://httpbin.org/get?show_env=1',
]
self.headers = {
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "en-US,en;q=0.5",
"Host": "httpbin.org",
"Upgrade-Insecure-Requests": "1",
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:51.0) Gecko/20100101 Firefox/51.0"
}
self.origin_ip = ''
self.init()
def init(self):
super(HttpBinSpider, self).init()
r = requests.get(url = self.urls[0])
data = json.loads(r.text)
self.origin_ip = data.get('origin', '')
utils.log('origin ip:%s' % self.origin_ip)
def start_requests(self):
count = utils.get_table_length(self.sql, self.name)
count_free = utils.get_table_length(self.sql, config.free_ipproxy_table)
ids = utils.get_table_ids(self.sql, self.name)
ids_free = utils.get_table_ids(self.sql, config.free_ipproxy_table)
for i in range(0, count + count_free):
table = self.name if (i < count) else config.free_ipproxy_table
id = ids[i] if i < count else ids_free[i - len(ids)]
proxy = utils.get_proxy_info(self.sql, table, id)
if proxy == None:
continue
for url in self.urls:
https = 'yes' if 'https' in url else 'no'
yield Request(
url = url,
headers = self.headers,
dont_filter = True,
priority = 0 if https == 'yes' else 10,
meta = {
'cur_time': time.time(),
'download_timeout': self.timeout,
'proxy_info': proxy,
'table': table,
'id': proxy.get('id'),
'https': https,
'proxy': 'http://%s:%s' % (proxy.get('ip'), proxy.get('port')),
'vali_count': proxy.get('vali_count'),
},
callback = self.success_parse,
errback = self.error_parse,
)
def success_parse(self, response):
utils.log('success_parse proxy:%s meta:%s' % (str(response.meta.get('proxy_info')), response.meta))
proxy = response.meta.get('proxy_info')
table = response.meta.get('table')
id = response.meta.get('id')
ip = proxy.get('ip')
https = response.meta.get('https')
self.save_page(ip, response.body)
if response.body.find(self.success_mark) or self.success_mark is '':
speed = time.time() - response.meta.get('cur_time')
utils.log('speed:%s table:%s id:%s https:%s' % (speed, table, id, https))
if https == 'no':
data = json.loads(response.body)
origin = data.get('origin')
headers = data.get('headers')
x_forwarded_for = headers.get('X-Forwarded-For', None)
x_real_ip = headers.get('X-Real-Ip', None)
via = headers.get('Via', None)
anonymity = 3
if self.origin_ip in origin:
anonymity = 3
elif via is not None:
anonymity = 2
elif x_forwarded_for is not None and x_real_ip is not None:
anonymity = 1
if table == self.name:
if speed > self.timeout:
command = utils.get_delete_data_command(table, id)
self.sql.execute(command)
else:
# command = utils.get_update_data_command(table, id, speed)
# self.sql.execute(command)
vali_count = response.meta.get('vali_count', 0) + 1
command = "UPDATE {name} SET speed={speed}, https='{https}', vali_count={vali_count}, " \
"anonymity={anonymity}, save_time={save_time} WHERE id={id}". \
format(name = self.name, speed = speed, https = https, vali_count = vali_count,
anonymity = anonymity, id = id, save_time = 'NOW()')
self.sql.execute(command)
else:
if speed < self.timeout:
command = utils.get_insert_data_command(self.name)
msg = (None, proxy.get('ip'), proxy.get('port'), proxy.get('country'), anonymity,
https, speed, proxy.get('source'), None, 1)
self.sql.insert_data(command, msg, commit = True)
elif https == 'yes':
command = "UPDATE {name} SET https=\'{https}\' WHERE ip=\'{ip}\'". \
format(name = self.name, https = https, ip = ip)
self.sql.execute(command)
else:
pass
def error_parse(self, failure):
request = failure.request
utils.log('error_parse value:%s url:%s meta:%s' % (failure.value, request.url, request.meta))
https = request.meta.get('https')
if https == 'no':
table = request.meta.get('table')
id = request.meta.get('id')
if table == self.name:
command = utils.get_delete_data_command(table, id)
self.sql.execute(command)
else:
# TODO... 如果 ip 验证失败应该针对特定的错误类型,进行处理
pass
| 39.55414 | 113 | 0.501932 |
13eac5cc71a87eafcd0e6e29147adc6dbb52c602 | 1,675 | py | Python | database_setup.py | rubinsaifi/udacity-catalog-project | 2680ddad1aa2e79d2932f996e2f657cf2c98a5e1 | [
"Unlicense"
] | null | null | null | database_setup.py | rubinsaifi/udacity-catalog-project | 2680ddad1aa2e79d2932f996e2f657cf2c98a5e1 | [
"Unlicense"
] | null | null | null | database_setup.py | rubinsaifi/udacity-catalog-project | 2680ddad1aa2e79d2932f996e2f657cf2c98a5e1 | [
"Unlicense"
] | null | null | null | from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
email = Column(String(250), nullable=False)
picture = Column(String(250))
class Sport(Base):
__tablename__ = 'sport'
id = Column(Integer, primary_key=True)
name = Column(String(250), nullable=False)
user_id = Column(Integer, ForeignKey('user.id'))
user = relationship(User)
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable=False)
id = Column(Integer, primary_key=True)
description = Column(String(250))
price = Column(String(8))
sport_item = Column(String(250))
sport_id = Column(Integer, ForeignKey('sport.id'))
sport = relationship(Sport)
user_id = Column(Integer, ForeignKey('user.id'))
user = relationship(User)
@property
def serialize(self):
"""Return object data in easily serializeable format"""
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'sport_item': self.sport_item,
}
engine = create_engine('sqlite:///sportitemwithusers.db')
Base.metadata.create_all(engine)
| 27.016129 | 63 | 0.65194 |
2e5ff9b38b6907b59b833897aa8902368419dda3 | 132 | py | Python | backend/takeout/customer/apps.py | BillBillBillBill/laughing-garbanzo | 27c66dcc4f0e045ae060255679a2aa68c0f744d2 | [
"MIT"
] | 15 | 2016-08-03T08:11:36.000Z | 2022-03-24T03:21:06.000Z | backend/takeout/customer/apps.py | BillBillBillBill/laughing-garbanzo | 27c66dcc4f0e045ae060255679a2aa68c0f744d2 | [
"MIT"
] | 14 | 2020-06-05T23:06:45.000Z | 2022-03-12T00:00:18.000Z | backend/takeout/customer/apps.py | BillBillBillBill/laughing-garbanzo | 27c66dcc4f0e045ae060255679a2aa68c0f744d2 | [
"MIT"
] | 7 | 2016-08-03T08:11:38.000Z | 2020-12-27T08:49:10.000Z | from __future__ import unicode_literals
from django.apps import AppConfig
class CustomerConfig(AppConfig):
name = 'customer'
| 16.5 | 39 | 0.795455 |
4c68fa0b39b8aaa1747c24a54cd119cace8868d8 | 243 | py | Python | ocp_resources/machine_config_pool.py | kbidarkar/openshift-python-wrapper | 3cd4d6d3b71c82ff87f032a51510d9c9d207f6cb | [
"Apache-2.0"
] | 9 | 2021-07-05T18:35:55.000Z | 2021-12-31T03:09:39.000Z | ocp_resources/machine_config_pool.py | kbidarkar/openshift-python-wrapper | 3cd4d6d3b71c82ff87f032a51510d9c9d207f6cb | [
"Apache-2.0"
] | 418 | 2021-07-04T13:12:09.000Z | 2022-03-30T08:37:45.000Z | ocp_resources/machine_config_pool.py | kbidarkar/openshift-python-wrapper | 3cd4d6d3b71c82ff87f032a51510d9c9d207f6cb | [
"Apache-2.0"
] | 28 | 2021-07-04T12:48:18.000Z | 2022-02-22T15:19:30.000Z | from ocp_resources.resource import Resource
class MachineConfigPool(Resource):
api_group = Resource.ApiGroup.MACHINECONFIGURATION_OPENSHIFT_IO
class Status(Resource.Status):
UPDATED = "Updated"
UPDATING = "Updating"
| 24.3 | 67 | 0.748971 |
f9bae9cd8a37b71fee3b98e467c00c5e978c4cfd | 567 | py | Python | examples/example_nexus.py | DanPorter/babelscan | 71fa43f13a8318efbcdb412c4fca533d4b6f9ec9 | [
"Apache-2.0"
] | null | null | null | examples/example_nexus.py | DanPorter/babelscan | 71fa43f13a8318efbcdb412c4fca533d4b6f9ec9 | [
"Apache-2.0"
] | null | null | null | examples/example_nexus.py | DanPorter/babelscan | 71fa43f13a8318efbcdb412c4fca533d4b6f9ec9 | [
"Apache-2.0"
] | null | null | null | """
BabelScan Example
"""
import babelscan
scan = babelscan.file_loader(r"C:\Users\dgpor\Dropbox\Python\ExamplePeaks\810002.nxs")
print(scan)
# HDF options
print(scan.find_address('energy')) # find addresses that match name "energy"
print(scan.address('energy')) # display address that is choosen by default
# Load HDF file (wrapper around h5py.File)
hdf = scan.load()
print(hdf.tree(detail=True))
# Reloading dataset
dataset = scan.dataset('sum')
print(dataset)
print(scan('/entry1/sample/ub_matrix'))
print(scan('azih, azik, azil'))
print(scan('h, k, l'))
| 21.807692 | 86 | 0.730159 |
9cdb03cc122113b06ed77f11de999846c326ff3c | 19,396 | py | Python | edb/server/compiler/dbstate.py | kafein/edgedb | 3cc4c5e6e11a6f25a82b061d7ba294deeb9ccb80 | [
"Apache-2.0"
] | null | null | null | edb/server/compiler/dbstate.py | kafein/edgedb | 3cc4c5e6e11a6f25a82b061d7ba294deeb9ccb80 | [
"Apache-2.0"
] | null | null | null | edb/server/compiler/dbstate.py | kafein/edgedb | 3cc4c5e6e11a6f25a82b061d7ba294deeb9ccb80 | [
"Apache-2.0"
] | null | null | null | #
# This source file is part of the EdgeDB open source project.
#
# Copyright 2016-present MagicStack Inc. and the EdgeDB authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import * # NoQA
import dataclasses
import enum
import time
import uuid
import immutables
from edb import errors
from edb.edgeql import ast as qlast
from edb.edgeql import qltypes
from edb.schema import migrations as s_migrations
from edb.schema import objects as s_obj
from edb.schema import schema as s_schema
from edb.server import config
from . import enums
from . import sertypes
class TxAction(enum.IntEnum):
START = 1
COMMIT = 2
ROLLBACK = 3
DECLARE_SAVEPOINT = 4
RELEASE_SAVEPOINT = 5
ROLLBACK_TO_SAVEPOINT = 6
class MigrationAction(enum.IntEnum):
START = 1
POPULATE = 2
DESCRIBE = 3
ABORT = 4
COMMIT = 5
REJECT_PROPOSED = 6
@dataclasses.dataclass(frozen=True)
class BaseQuery:
sql: Tuple[bytes, ...]
@dataclasses.dataclass(frozen=True)
class NullQuery(BaseQuery):
sql: Tuple[bytes, ...] = tuple()
is_transactional: bool = True
has_dml: bool = False
@dataclasses.dataclass(frozen=True)
class Query(BaseQuery):
sql_hash: bytes
cardinality: enums.Cardinality
out_type_data: bytes
out_type_id: bytes
in_type_data: bytes
in_type_id: bytes
in_type_args: Optional[List[Param]] = None
is_transactional: bool = True
has_dml: bool = False
single_unit: bool = False
cacheable: bool = True
@dataclasses.dataclass(frozen=True)
class SimpleQuery(BaseQuery):
sql: Tuple[bytes, ...]
is_transactional: bool = True
has_dml: bool = False
single_unit: bool = False
@dataclasses.dataclass(frozen=True)
class SessionStateQuery(BaseQuery):
config_scope: Optional[qltypes.ConfigScope] = None
is_backend_setting: bool = False
requires_restart: bool = False
config_op: Optional[config.Operation] = None
is_transactional: bool = True
single_unit: bool = False
@dataclasses.dataclass(frozen=True)
class DDLQuery(BaseQuery):
user_schema: s_schema.FlatSchema
global_schema: Optional[s_schema.FlatSchema] = None
cached_reflection: Any = None
is_transactional: bool = True
single_unit: bool = False
create_db: Optional[str] = None
drop_db: Optional[str] = None
create_db_template: Optional[str] = None
has_role_ddl: bool = False
ddl_stmt_id: Optional[str] = None
@dataclasses.dataclass(frozen=True)
class TxControlQuery(BaseQuery):
action: TxAction
cacheable: bool
modaliases: Optional[immutables.Map]
is_transactional: bool = True
single_unit: bool = False
user_schema: Optional[s_schema.FlatSchema] = None
global_schema: Optional[s_schema.FlatSchema] = None
cached_reflection: Any = None
@dataclasses.dataclass(frozen=True)
class MigrationControlQuery(BaseQuery):
action: MigrationAction
tx_action: Optional[TxAction]
cacheable: bool
modaliases: Optional[immutables.Map]
is_transactional: bool = True
single_unit: bool = False
user_schema: Optional[s_schema.FlatSchema] = None
cached_reflection: Any = None
ddl_stmt_id: Optional[str] = None
@dataclasses.dataclass(frozen=True)
class Param:
name: str
required: bool
array_type_id: Optional[uuid.UUID]
#############################
@dataclasses.dataclass
class QueryUnit:
sql: Tuple[bytes, ...]
# Status-line for the compiled command; returned to front-end
# in a CommandComplete protocol message if the command is
# executed successfully. When a QueryUnit contains multiple
# EdgeQL queries, the status reflects the last query in the unit.
status: bytes
# Set only for units that contain queries that can be cached
# as prepared statements in Postgres.
sql_hash: bytes = b''
# True if all statments in *sql* can be executed inside a transaction.
# If False, they will be executed separately.
is_transactional: bool = True
# Capabilities used in this query
capabilities: enums.Capability = enums.Capability(0)
# True if this unit contains SET commands.
has_set: bool = False
# True if this unit contains ALTER/DROP/CREATE ROLE commands.
has_role_ddl: bool = False
# If tx_id is set, it means that the unit
# starts a new transaction.
tx_id: Optional[int] = None
# True if this unit is single 'COMMIT' command.
# 'COMMIT' is always compiled to a separate QueryUnit.
tx_commit: bool = False
# True if this unit is single 'ROLLBACK' command.
# 'ROLLBACK' is always compiled to a separate QueryUnit.
tx_rollback: bool = False
# True if this unit is single 'ROLLBACK TO SAVEPOINT' command.
# 'ROLLBACK TO SAVEPOINT' is always compiled to a separate QueryUnit.
tx_savepoint_rollback: bool = False
# True if it is safe to cache this unit.
cacheable: bool = False
# If non-None, contains a name of the DB that is about to be
# created/deleted. If it's the former, the IO process needs to
# introspect the new db. If it's the later, the server should
# close all inactive unused pooled connections to it.
create_db: Optional[str] = None
drop_db: Optional[str] = None
# If non-None, contains a name of the DB that will be used as
# a template database to create the database. The server should
# close all inactive unused pooled connections to the template db.
create_db_template: Optional[str] = None
# If non-None, the DDL statement will emit data packets marked
# with the indicated ID.
ddl_stmt_id: Optional[str] = None
# Cardinality of the result set. Set to NO_RESULT if the
# unit represents multiple queries compiled as one script.
cardinality: enums.Cardinality = \
enums.Cardinality.NO_RESULT
out_type_data: bytes = sertypes.NULL_TYPE_DESC
out_type_id: bytes = sertypes.NULL_TYPE_ID
in_type_data: bytes = sertypes.EMPTY_TUPLE_DESC
in_type_id: bytes = sertypes.EMPTY_TUPLE_ID
in_type_args: Optional[List[Param]] = None
# Set only when this unit contains a CONFIGURE SYSTEM command.
system_config: bool = False
# Set only when this unit contains a CONFIGURE DATABASE command.
database_config: bool = False
# Whether any configuration change requires a server restart
config_requires_restart: bool = False
# Set only when this unit contains a CONFIGURE command which
# alters a backend configuration setting.
backend_config: bool = False
config_ops: List[config.Operation] = (
dataclasses.field(default_factory=list))
modaliases: Optional[immutables.Map] = None
# If present, represents the future schema state after
# the command is run. The schema is pickled.
user_schema: Optional[bytes] = None
cached_reflection: Optional[bytes] = None
# If present, represents the future global schema state
# after the command is run. The schema is pickled.
global_schema: Optional[bytes] = None
@property
def has_ddl(self) -> bool:
return bool(self.capabilities & enums.Capability.DDL)
#############################
class ProposedMigrationStep(NamedTuple):
statements: Tuple[str, ...]
confidence: float
prompt: str
prompt_id: str
data_safe: bool
required_user_input: Tuple[Tuple[str, str]]
def to_json(self) -> Dict[str, Any]:
user_input_list = []
for var_name, var_desc in self.required_user_input:
user_input_list.append({
'placeholder': var_name,
'prompt': var_desc,
})
return {
'statements': [{'text': stmt} for stmt in self.statements],
'confidence': self.confidence,
'prompt': self.prompt,
'prompt_id': self.prompt_id,
'data_safe': self.data_safe,
'required_user_input': user_input_list,
}
class MigrationState(NamedTuple):
parent_migration: Optional[s_migrations.Migration]
initial_schema: s_schema.Schema
initial_savepoint: Optional[str]
target_schema: s_schema.Schema
guidance: s_obj.DeltaGuidance
accepted_cmds: Tuple[qlast.Command, ...]
last_proposed: Tuple[ProposedMigrationStep, ...]
class TransactionState(NamedTuple):
id: int
name: Optional[str]
user_schema: s_schema.FlatSchema
global_schema: s_schema.FlatSchema
modaliases: immutables.Map
session_config: immutables.Map
database_config: immutables.Map
system_config: immutables.Map
cached_reflection: immutables.Map[str, Tuple[str, ...]]
tx: Transaction
migration_state: Optional[MigrationState] = None
class Transaction:
_savepoints: Dict[int, TransactionState]
_constate: CompilerConnectionState
def __init__(
self,
constate,
*,
user_schema: s_schema.FlatSchema,
global_schema: s_schema.FlatSchema,
modaliases: immutables.Map,
session_config: immutables.Map,
database_config: immutables.Map,
system_config: immutables.Map,
cached_reflection: immutables.Map[str, Tuple[str, ...]],
implicit: bool = True,
) -> None:
assert not isinstance(user_schema, s_schema.ChainedSchema)
self._constate = constate
self._id = constate._new_txid()
self._implicit = implicit
self._current = TransactionState(
id=self._id,
name=None,
user_schema=user_schema,
global_schema=global_schema,
modaliases=modaliases,
session_config=session_config,
database_config=database_config,
system_config=system_config,
cached_reflection=cached_reflection,
tx=self,
)
self._state0 = self._current
self._savepoints = {}
@property
def id(self):
return self._id
def is_implicit(self):
return self._implicit
def make_explicit(self):
if self._implicit:
self._implicit = False
else:
raise errors.TransactionError('already in explicit transaction')
def declare_savepoint(self, name: str):
if self.is_implicit():
raise errors.TransactionError(
'savepoints can only be used in transaction blocks')
sp_id = self._constate._new_txid()
sp_state = self._current._replace(id=sp_id, name=name)
self._savepoints[sp_id] = sp_state
self._constate._savepoints_log[sp_id] = sp_state
return sp_id
def rollback_to_savepoint(self, name: str) -> TransactionState:
if self.is_implicit():
raise errors.TransactionError(
'savepoints can only be used in transaction blocks')
for sp in reversed(self._savepoints.values()):
if sp.name == name:
self._current = sp
return sp
raise errors.TransactionError(f'there is no {name!r} savepoint')
def release_savepoint(self, name: str):
if self.is_implicit():
raise errors.TransactionError(
'savepoints can only be used in transaction blocks')
for sp in reversed(self._savepoints.values()):
if sp.name == name:
sp_id = sp.id
break
else:
raise errors.TransactionError(f'there is no {name!r} savepoint')
self._savepoints.pop(sp_id)
def get_schema(self, std_schema: s_schema.FlatSchema) -> s_schema.Schema:
assert isinstance(std_schema, s_schema.FlatSchema)
return s_schema.ChainedSchema(
std_schema,
self._current.user_schema,
self._current.global_schema,
)
def get_user_schema(self) -> s_schema.FlatSchema:
return self._current.user_schema
def get_user_schema_if_updated(self) -> Optional[s_schema.FlatSchema]:
if self._current.user_schema is self._state0.user_schema:
return None
else:
return self._current.user_schema
def get_global_schema(self) -> s_schema.FlatSchema:
return self._current.global_schema
def get_global_schema_if_updated(self) -> Optional[s_schema.FlatSchema]:
if self._current.global_schema is self._state0.global_schema:
return None
else:
return self._current.global_schema
def get_modaliases(self) -> immutables.Map:
return self._current.modaliases
def get_session_config(self) -> immutables.Map:
return self._current.session_config
def get_database_config(self) -> immutables.Map:
return self._current.database_config
def get_system_config(self) -> immutables.Map:
return self._current.system_config
def get_cached_reflection_if_updated(self):
if self._current.cached_reflection == self._state0.cached_reflection:
return None
else:
return self._current.cached_reflection
def get_cached_reflection(self) -> immutables.Map[str, Tuple[str, ...]]:
return self._current.cached_reflection
def get_migration_state(self) -> Optional[MigrationState]:
return self._current.migration_state
def update_schema(self, new_schema: s_schema.Schema):
assert isinstance(new_schema, s_schema.ChainedSchema)
self._current = self._current._replace(
user_schema=new_schema.get_top_schema(),
global_schema=new_schema.get_global_schema(),
)
def update_modaliases(self, new_modaliases: immutables.Map):
self._current = self._current._replace(modaliases=new_modaliases)
def update_session_config(self, new_config: immutables.Map):
self._current = self._current._replace(session_config=new_config)
def update_database_config(self, new_config: immutables.Map):
self._current = self._current._replace(database_config=new_config)
def update_cached_reflection(
self,
new: immutables.Map[str, Tuple[str, ...]],
) -> None:
self._current = self._current._replace(cached_reflection=new)
def update_migration_state(
self, mstate: Optional[MigrationState]
) -> None:
self._current = self._current._replace(migration_state=mstate)
class CompilerConnectionState:
__slots__ = ('_savepoints_log', '_current_tx', '_tx_count',)
_savepoints_log: Dict[int, TransactionState]
def __init__(
self,
*,
user_schema: s_schema.Schema,
global_schema: s_schema.Schema,
modaliases: immutables.Map,
session_config: immutables.Map,
database_config: immutables.Map,
system_config: immutables.Map,
cached_reflection: FrozenSet[str]
):
self._tx_count = time.monotonic_ns()
self._init_current_tx(
user_schema=user_schema,
global_schema=global_schema,
modaliases=modaliases,
session_config=session_config,
database_config=database_config,
system_config=system_config,
cached_reflection=cached_reflection,
)
self._savepoints_log = {}
def _new_txid(self):
self._tx_count += 1
return self._tx_count
def _init_current_tx(
self,
*,
user_schema,
global_schema,
modaliases,
session_config,
database_config,
system_config,
cached_reflection
):
assert isinstance(user_schema, s_schema.FlatSchema)
self._current_tx = Transaction(
self,
user_schema=user_schema,
global_schema=global_schema,
modaliases=modaliases,
session_config=session_config,
database_config=database_config,
system_config=system_config,
cached_reflection=cached_reflection,
)
def can_sync_to_savepoint(self, spid):
return spid in self._savepoints_log
def sync_to_savepoint(self, spid: int) -> None:
"""Synchronize the compiler state with the current DB state."""
if not self.can_sync_to_savepoint(spid):
raise RuntimeError(f'failed to lookup savepoint with id={spid}')
sp = self._savepoints_log[spid]
self._current_tx = sp.tx
self._current_tx._current = sp
self._current_tx._id = spid
# Cleanup all savepoints declared after the one we rolled back to
# in the transaction we have now set as current.
for id in tuple(self._current_tx._savepoints):
if id > spid:
self._current_tx._savepoints.pop(id)
# Cleanup all savepoints declared after the one we rolled back to
# in the global savepoints log.
for id in tuple(self._savepoints_log):
if id > spid:
self._savepoints_log.pop(id)
def current_tx(self) -> Transaction:
return self._current_tx
def start_tx(self):
if self._current_tx.is_implicit():
self._current_tx.make_explicit()
else:
raise errors.TransactionError('already in transaction')
def rollback_tx(self):
# Note that we might not be in a transaction as we allow
# ROLLBACKs outside of transaction blocks (just like Postgres).
prior_state = self._current_tx._state0
self._init_current_tx(
user_schema=prior_state.user_schema,
global_schema=prior_state.global_schema,
modaliases=prior_state.modaliases,
session_config=prior_state.session_config,
database_config=prior_state.database_config,
system_config=prior_state.system_config,
cached_reflection=prior_state.cached_reflection,
)
return prior_state
def commit_tx(self):
if self._current_tx.is_implicit():
raise errors.TransactionError('cannot commit: not in transaction')
latest_state = self._current_tx._current
self._init_current_tx(
user_schema=latest_state.user_schema,
global_schema=latest_state.global_schema,
modaliases=latest_state.modaliases,
session_config=latest_state.session_config,
database_config=latest_state.database_config,
system_config=latest_state.system_config,
cached_reflection=latest_state.cached_reflection,
)
return latest_state
def sync_tx(self, txid: int) -> None:
if self._current_tx.id == txid:
return
if self.can_sync_to_savepoint(txid):
self.sync_to_savepoint(txid)
return
raise errors.InternalServerError(
f'failed to lookup transaction or savepoint with id={txid}'
) # pragma: no cover
| 30.30625 | 78 | 0.672922 |
63ba5c5d026c3eac701e3ee0719967edc9151997 | 5,505 | py | Python | tensorflow_probability/python/positive_semidefinite_kernels/kumaraswamy_transformed_test.py | nxdao2000/probability | 33d2bc1cb0e7b6284579ea7f3692b9d056e0d700 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/positive_semidefinite_kernels/kumaraswamy_transformed_test.py | nxdao2000/probability | 33d2bc1cb0e7b6284579ea7f3692b9d056e0d700 | [
"Apache-2.0"
] | null | null | null | tensorflow_probability/python/positive_semidefinite_kernels/kumaraswamy_transformed_test.py | nxdao2000/probability | 33d2bc1cb0e7b6284579ea7f3692b9d056e0d700 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Bijectors Tests."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from tensorflow_probability.python import positive_semidefinite_kernels as tfpk
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import,g-import-not-at-top
def _kumaraswamy_warp(x, c1, c0):
return 1. - (1. - x ** c1) ** c0
def _numpy_exp_quad(amplitude, length_scale, x, y, feature_ndims):
dims = tuple(range(-feature_ndims, 0, 1))
return amplitude ** 2 * np.exp(
-0.5 * np.sum((x - y) ** 2, axis=dims) / length_scale ** 2)
def _numpy_exp_quad_matrix(amplitude, length_scale, x, feature_ndims):
return _numpy_exp_quad(
amplitude,
length_scale,
np.expand_dims(x, -feature_ndims - 2),
np.expand_dims(x, -feature_ndims - 1),
feature_ndims)
@test_util.run_all_in_graph_and_eager_modes
class _KumaraswamyTransformedTest(parameterized.TestCase):
@parameterized.parameters(
{'feature_ndims': 1, 'dims': 3},
{'feature_ndims': 1, 'dims': 4},
{'feature_ndims': 2, 'dims': 2},
{'feature_ndims': 2, 'dims': 3},
{'feature_ndims': 3, 'dims': 2},
{'feature_ndims': 3, 'dims': 3})
def testRetrieveIdentityTransform(self, feature_ndims, dims):
amplitude = np.random.uniform(
low=1., high=10., size=[10, 2]).astype(self.dtype)
inner_length_scale = self.dtype(1.)
kernel = tfpk.ExponentiatedQuadratic(
amplitude, inner_length_scale, feature_ndims)
input_shape = [dims] * feature_ndims
# This is the identity transform.
concentration1 = self.dtype(1.)
concentration0 = self.dtype(1.)
kum_kernel = tfpk.KumaraswamyTransformed(kernel, concentration1,
concentration0)
x = np.random.uniform(size=input_shape).astype(self.dtype)
y = np.random.uniform(size=input_shape).astype(self.dtype)
self.assertAllClose(
_numpy_exp_quad(
amplitude,
inner_length_scale, x, y, feature_ndims=feature_ndims),
self.evaluate(kum_kernel.apply(x, y)))
z = np.random.uniform(size=[10] + input_shape).astype(self.dtype)
self.assertAllClose(
_numpy_exp_quad_matrix(
amplitude[..., None, None], inner_length_scale, z,
feature_ndims=feature_ndims),
self.evaluate(kum_kernel.matrix(z, z)))
@parameterized.parameters(
{'feature_ndims': 1, 'dims': 3},
{'feature_ndims': 1, 'dims': 4},
{'feature_ndims': 2, 'dims': 2},
{'feature_ndims': 2, 'dims': 3},
{'feature_ndims': 3, 'dims': 2},
{'feature_ndims': 3, 'dims': 3})
def testKernelParametersBroadcast(self, feature_ndims, dims):
# Batch shape [10, 2]
amplitude = np.random.uniform(
low=1., high=10., size=[10, 2]).astype(self.dtype)
inner_length_scale = self.dtype(1.)
kernel = tfpk.ExponentiatedQuadratic(
amplitude, inner_length_scale, feature_ndims)
input_shape = [dims] * feature_ndims
# Batch shape [3, 1, 2].
concentration1 = np.random.uniform(
2, 5, size=([3, 1, 2] + input_shape)).astype(self.dtype)
concentration0 = np.random.uniform(
2, 5, size=([3, 1, 2] + input_shape)).astype(self.dtype)
kum_kernel = tfpk.KumaraswamyTransformed(kernel, concentration1,
concentration0)
x = np.random.uniform(size=input_shape).astype(self.dtype)
y = np.random.uniform(size=input_shape).astype(self.dtype)
self.assertAllClose(
_numpy_exp_quad(
amplitude,
inner_length_scale,
_kumaraswamy_warp(x, concentration1, concentration0),
_kumaraswamy_warp(y, concentration1, concentration0),
feature_ndims=feature_ndims
),
self.evaluate(kum_kernel.apply(x, y)), rtol=1e-4, atol=1e-4)
z = np.random.uniform(size=[10] + input_shape).astype(self.dtype)
expanded_c1 = np.expand_dims(concentration1, -(feature_ndims + 1))
expanded_c0 = np.expand_dims(concentration0, -(feature_ndims + 1))
self.assertAllClose(
_numpy_exp_quad_matrix(
amplitude[..., None, None],
inner_length_scale,
_kumaraswamy_warp(z, expanded_c1, expanded_c0),
feature_ndims=feature_ndims
),
self.evaluate(kum_kernel.matrix(z, z)), atol=1e-4, rtol=1e-4)
class KumaraswamyTransformedFloat32Test(
_KumaraswamyTransformedTest, tf.test.TestCase):
dtype = np.float32
class KumaraswamyTransformedFloat64Test(
_KumaraswamyTransformedTest, tf.test.TestCase):
dtype = np.float64
del _KumaraswamyTransformedTest
if __name__ == '__main__':
tf.test.main()
| 34.622642 | 115 | 0.665213 |
c28eef83aa6c2c3009824d6328b8ab4b1889d4b8 | 360 | py | Python | aft/users/admin.py | InDeepShip/InDeepShip | 856f868a6c20223a6b0f221158813059a4479264 | [
"MIT"
] | 1 | 2021-01-03T18:59:29.000Z | 2021-01-03T18:59:29.000Z | aft/users/admin.py | InDeepShip/InDeepShip | 856f868a6c20223a6b0f221158813059a4479264 | [
"MIT"
] | null | null | null | aft/users/admin.py | InDeepShip/InDeepShip | 856f868a6c20223a6b0f221158813059a4479264 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.contrib.auth import get_user_model
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import Group
# Register your models here.
from .forms import CustomUserCreationForm, CustomUserChangeForm
from .models import CustomUser
admin.site.register(CustomUser)
admin.site.unregister(Group)
| 30 | 63 | 0.844444 |
3db64e4dab684938500f018a7f76b461add643e8 | 29,259 | py | Python | venv/lib/python3.10/site-packages/celery/schedules.py | SujalKokh/django-celery-sqs | 5c7a6e79fac2e4585d3973a47eea9349396fcfea | [
"MIT"
] | 1 | 2021-05-29T21:17:56.000Z | 2021-05-29T21:17:56.000Z | venv/lib/python3.10/site-packages/celery/schedules.py | SujalKokh/django-celery-sqs | 5c7a6e79fac2e4585d3973a47eea9349396fcfea | [
"MIT"
] | null | null | null | venv/lib/python3.10/site-packages/celery/schedules.py | SujalKokh/django-celery-sqs | 5c7a6e79fac2e4585d3973a47eea9349396fcfea | [
"MIT"
] | 1 | 2021-06-30T12:43:39.000Z | 2021-06-30T12:43:39.000Z | """Schedules define the intervals at which periodic tasks run."""
import numbers
import re
from bisect import bisect, bisect_left
from collections import namedtuple
from collections.abc import Iterable
from datetime import datetime, timedelta
from kombu.utils.objects import cached_property
from . import current_app
from .utils.collections import AttributeDict
from .utils.time import (ffwd, humanize_seconds, localize, maybe_make_aware,
maybe_timedelta, remaining, timezone, weekday)
__all__ = (
'ParseException', 'schedule', 'crontab', 'crontab_parser',
'maybe_schedule', 'solar',
)
schedstate = namedtuple('schedstate', ('is_due', 'next'))
CRON_PATTERN_INVALID = """\
Invalid crontab pattern. Valid range is {min}-{max}. \
'{value}' was found.\
"""
CRON_INVALID_TYPE = """\
Argument cronspec needs to be of any of the following types: \
int, str, or an iterable type. {type!r} was given.\
"""
CRON_REPR = """\
<crontab: {0._orig_minute} {0._orig_hour} {0._orig_day_of_week} \
{0._orig_day_of_month} {0._orig_month_of_year} (m/h/d/dM/MY)>\
"""
SOLAR_INVALID_LATITUDE = """\
Argument latitude {lat} is invalid, must be between -90 and 90.\
"""
SOLAR_INVALID_LONGITUDE = """\
Argument longitude {lon} is invalid, must be between -180 and 180.\
"""
SOLAR_INVALID_EVENT = """\
Argument event "{event}" is invalid, must be one of {all_events}.\
"""
def cronfield(s):
return '*' if s is None else s
class ParseException(Exception):
"""Raised by :class:`crontab_parser` when the input can't be parsed."""
class BaseSchedule:
def __init__(self, nowfun=None, app=None):
self.nowfun = nowfun
self._app = app
def now(self):
return (self.nowfun or self.app.now)()
def remaining_estimate(self, last_run_at):
raise NotImplementedError()
def is_due(self, last_run_at):
raise NotImplementedError()
def maybe_make_aware(self, dt):
return maybe_make_aware(dt, self.tz)
@property
def app(self):
return self._app or current_app._get_current_object()
@app.setter # noqa
def app(self, app):
self._app = app
@cached_property
def tz(self):
return self.app.timezone
@cached_property
def utc_enabled(self):
return self.app.conf.enable_utc
def to_local(self, dt):
if not self.utc_enabled:
return timezone.to_local_fallback(dt)
return dt
def __eq__(self, other):
if isinstance(other, BaseSchedule):
return other.nowfun == self.nowfun
return NotImplemented
class schedule(BaseSchedule):
"""Schedule for periodic task.
Arguments:
run_every (float, ~datetime.timedelta): Time interval.
relative (bool): If set to True the run time will be rounded to the
resolution of the interval.
nowfun (Callable): Function returning the current date and time
(:class:`~datetime.datetime`).
app (Celery): Celery app instance.
"""
relative = False
def __init__(self, run_every=None, relative=False, nowfun=None, app=None):
self.run_every = maybe_timedelta(run_every)
self.relative = relative
super().__init__(nowfun=nowfun, app=app)
def remaining_estimate(self, last_run_at):
return remaining(
self.maybe_make_aware(last_run_at), self.run_every,
self.maybe_make_aware(self.now()), self.relative,
)
def is_due(self, last_run_at):
"""Return tuple of ``(is_due, next_time_to_check)``.
Notes:
- next time to check is in seconds.
- ``(True, 20)``, means the task should be run now, and the next
time to check is in 20 seconds.
- ``(False, 12.3)``, means the task is not due, but that the
scheduler should check again in 12.3 seconds.
The next time to check is used to save energy/CPU cycles,
it does not need to be accurate but will influence the precision
of your schedule. You must also keep in mind
the value of :setting:`beat_max_loop_interval`,
that decides the maximum number of seconds the scheduler can
sleep between re-checking the periodic task intervals. So if you
have a task that changes schedule at run-time then your next_run_at
check will decide how long it will take before a change to the
schedule takes effect. The max loop interval takes precedence
over the next check at value returned.
.. admonition:: Scheduler max interval variance
The default max loop interval may vary for different schedulers.
For the default scheduler the value is 5 minutes, but for example
the :pypi:`django-celery-beat` database scheduler the value
is 5 seconds.
"""
last_run_at = self.maybe_make_aware(last_run_at)
rem_delta = self.remaining_estimate(last_run_at)
remaining_s = max(rem_delta.total_seconds(), 0)
if remaining_s == 0:
return schedstate(is_due=True, next=self.seconds)
return schedstate(is_due=False, next=remaining_s)
def __repr__(self):
return f'<freq: {self.human_seconds}>'
def __eq__(self, other):
if isinstance(other, schedule):
return self.run_every == other.run_every
return self.run_every == other
def __ne__(self, other):
return not self.__eq__(other)
def __reduce__(self):
return self.__class__, (self.run_every, self.relative, self.nowfun)
@property
def seconds(self):
return max(self.run_every.total_seconds(), 0)
@property
def human_seconds(self):
return humanize_seconds(self.seconds)
class crontab_parser:
"""Parser for Crontab expressions.
Any expression of the form 'groups'
(see BNF grammar below) is accepted and expanded to a set of numbers.
These numbers represent the units of time that the Crontab needs to
run on:
.. code-block:: bnf
digit :: '0'..'9'
dow :: 'a'..'z'
number :: digit+ | dow+
steps :: number
range :: number ( '-' number ) ?
numspec :: '*' | range
expr :: numspec ( '/' steps ) ?
groups :: expr ( ',' expr ) *
The parser is a general purpose one, useful for parsing hours, minutes and
day of week expressions. Example usage:
.. code-block:: pycon
>>> minutes = crontab_parser(60).parse('*/15')
[0, 15, 30, 45]
>>> hours = crontab_parser(24).parse('*/4')
[0, 4, 8, 12, 16, 20]
>>> day_of_week = crontab_parser(7).parse('*')
[0, 1, 2, 3, 4, 5, 6]
It can also parse day of month and month of year expressions if initialized
with a minimum of 1. Example usage:
.. code-block:: pycon
>>> days_of_month = crontab_parser(31, 1).parse('*/3')
[1, 4, 7, 10, 13, 16, 19, 22, 25, 28, 31]
>>> months_of_year = crontab_parser(12, 1).parse('*/2')
[1, 3, 5, 7, 9, 11]
>>> months_of_year = crontab_parser(12, 1).parse('2-12/2')
[2, 4, 6, 8, 10, 12]
The maximum possible expanded value returned is found by the formula:
:math:`max_ + min_ - 1`
"""
ParseException = ParseException
_range = r'(\w+?)-(\w+)'
_steps = r'/(\w+)?'
_star = r'\*'
def __init__(self, max_=60, min_=0):
self.max_ = max_
self.min_ = min_
self.pats = (
(re.compile(self._range + self._steps), self._range_steps),
(re.compile(self._range), self._expand_range),
(re.compile(self._star + self._steps), self._star_steps),
(re.compile('^' + self._star + '$'), self._expand_star),
)
def parse(self, spec):
acc = set()
for part in spec.split(','):
if not part:
raise self.ParseException('empty part')
acc |= set(self._parse_part(part))
return acc
def _parse_part(self, part):
for regex, handler in self.pats:
m = regex.match(part)
if m:
return handler(m.groups())
return self._expand_range((part,))
def _expand_range(self, toks):
fr = self._expand_number(toks[0])
if len(toks) > 1:
to = self._expand_number(toks[1])
if to < fr: # Wrap around max_ if necessary
return (list(range(fr, self.min_ + self.max_)) +
list(range(self.min_, to + 1)))
return list(range(fr, to + 1))
return [fr]
def _range_steps(self, toks):
if len(toks) != 3 or not toks[2]:
raise self.ParseException('empty filter')
return self._expand_range(toks[:2])[::int(toks[2])]
def _star_steps(self, toks):
if not toks or not toks[0]:
raise self.ParseException('empty filter')
return self._expand_star()[::int(toks[0])]
def _expand_star(self, *args):
return list(range(self.min_, self.max_ + self.min_))
def _expand_number(self, s):
if isinstance(s, str) and s[0] == '-':
raise self.ParseException('negative numbers not supported')
try:
i = int(s)
except ValueError:
try:
i = weekday(s)
except KeyError:
raise ValueError(f'Invalid weekday literal {s!r}.')
max_val = self.min_ + self.max_ - 1
if i > max_val:
raise ValueError(
f'Invalid end range: {i} > {max_val}.')
if i < self.min_:
raise ValueError(
f'Invalid beginning range: {i} < {self.min_}.')
return i
class crontab(BaseSchedule):
"""Crontab schedule.
A Crontab can be used as the ``run_every`` value of a
periodic task entry to add :manpage:`crontab(5)`-like scheduling.
Like a :manpage:`cron(5)`-job, you can specify units of time of when
you'd like the task to execute. It's a reasonably complete
implementation of :command:`cron`'s features, so it should provide a fair
degree of scheduling needs.
You can specify a minute, an hour, a day of the week, a day of the
month, and/or a month in the year in any of the following formats:
.. attribute:: minute
- A (list of) integers from 0-59 that represent the minutes of
an hour of when execution should occur; or
- A string representing a Crontab pattern. This may get pretty
advanced, like ``minute='*/15'`` (for every quarter) or
``minute='1,13,30-45,50-59/2'``.
.. attribute:: hour
- A (list of) integers from 0-23 that represent the hours of
a day of when execution should occur; or
- A string representing a Crontab pattern. This may get pretty
advanced, like ``hour='*/3'`` (for every three hours) or
``hour='0,8-17/2'`` (at midnight, and every two hours during
office hours).
.. attribute:: day_of_week
- A (list of) integers from 0-6, where Sunday = 0 and Saturday =
6, that represent the days of a week that execution should
occur.
- A string representing a Crontab pattern. This may get pretty
advanced, like ``day_of_week='mon-fri'`` (for weekdays only).
(Beware that ``day_of_week='*/2'`` does not literally mean
'every two days', but 'every day that is divisible by two'!)
.. attribute:: day_of_month
- A (list of) integers from 1-31 that represents the days of the
month that execution should occur.
- A string representing a Crontab pattern. This may get pretty
advanced, such as ``day_of_month='2-30/2'`` (for every even
numbered day) or ``day_of_month='1-7,15-21'`` (for the first and
third weeks of the month).
.. attribute:: month_of_year
- A (list of) integers from 1-12 that represents the months of
the year during which execution can occur.
- A string representing a Crontab pattern. This may get pretty
advanced, such as ``month_of_year='*/3'`` (for the first month
of every quarter) or ``month_of_year='2-12/2'`` (for every even
numbered month).
.. attribute:: nowfun
Function returning the current date and time
(:class:`~datetime.datetime`).
.. attribute:: app
The Celery app instance.
It's important to realize that any day on which execution should
occur must be represented by entries in all three of the day and
month attributes. For example, if ``day_of_week`` is 0 and
``day_of_month`` is every seventh day, only months that begin
on Sunday and are also in the ``month_of_year`` attribute will have
execution events. Or, ``day_of_week`` is 1 and ``day_of_month``
is '1-7,15-21' means every first and third Monday of every month
present in ``month_of_year``.
"""
def __init__(self, minute='*', hour='*', day_of_week='*',
day_of_month='*', month_of_year='*', **kwargs):
self._orig_minute = cronfield(minute)
self._orig_hour = cronfield(hour)
self._orig_day_of_week = cronfield(day_of_week)
self._orig_day_of_month = cronfield(day_of_month)
self._orig_month_of_year = cronfield(month_of_year)
self._orig_kwargs = kwargs
self.hour = self._expand_cronspec(hour, 24)
self.minute = self._expand_cronspec(minute, 60)
self.day_of_week = self._expand_cronspec(day_of_week, 7)
self.day_of_month = self._expand_cronspec(day_of_month, 31, 1)
self.month_of_year = self._expand_cronspec(month_of_year, 12, 1)
super().__init__(**kwargs)
@staticmethod
def _expand_cronspec(cronspec, max_, min_=0):
"""Expand cron specification.
Takes the given cronspec argument in one of the forms:
.. code-block:: text
int (like 7)
str (like '3-5,*/15', '*', or 'monday')
set (like {0,15,30,45}
list (like [8-17])
And convert it to an (expanded) set representing all time unit
values on which the Crontab triggers. Only in case of the base
type being :class:`str`, parsing occurs. (It's fast and
happens only once for each Crontab instance, so there's no
significant performance overhead involved.)
For the other base types, merely Python type conversions happen.
The argument ``max_`` is needed to determine the expansion of
``*`` and ranges. The argument ``min_`` is needed to determine
the expansion of ``*`` and ranges for 1-based cronspecs, such as
day of month or month of year. The default is sufficient for minute,
hour, and day of week.
"""
if isinstance(cronspec, numbers.Integral):
result = {cronspec}
elif isinstance(cronspec, str):
result = crontab_parser(max_, min_).parse(cronspec)
elif isinstance(cronspec, set):
result = cronspec
elif isinstance(cronspec, Iterable):
result = set(cronspec)
else:
raise TypeError(CRON_INVALID_TYPE.format(type=type(cronspec)))
# assure the result does not preceed the min or exceed the max
for number in result:
if number >= max_ + min_ or number < min_:
raise ValueError(CRON_PATTERN_INVALID.format(
min=min_, max=max_ - 1 + min_, value=number))
return result
def _delta_to_next(self, last_run_at, next_hour, next_minute):
"""Find next delta.
Takes a :class:`~datetime.datetime` of last run, next minute and hour,
and returns a :class:`~celery.utils.time.ffwd` for the next
scheduled day and time.
Only called when ``day_of_month`` and/or ``month_of_year``
cronspec is specified to further limit scheduled task execution.
"""
datedata = AttributeDict(year=last_run_at.year)
days_of_month = sorted(self.day_of_month)
months_of_year = sorted(self.month_of_year)
def day_out_of_range(year, month, day):
try:
datetime(year=year, month=month, day=day)
except ValueError:
return True
return False
def is_before_last_run(year, month, day):
return self.maybe_make_aware(datetime(year,
month,
day)) < last_run_at
def roll_over():
for _ in range(2000):
flag = (datedata.dom == len(days_of_month) or
day_out_of_range(datedata.year,
months_of_year[datedata.moy],
days_of_month[datedata.dom]) or
(is_before_last_run(datedata.year,
months_of_year[datedata.moy],
days_of_month[datedata.dom])))
if flag:
datedata.dom = 0
datedata.moy += 1
if datedata.moy == len(months_of_year):
datedata.moy = 0
datedata.year += 1
else:
break
else:
# Tried 2000 times, we're most likely in an infinite loop
raise RuntimeError('unable to rollover, '
'time specification is probably invalid')
if last_run_at.month in self.month_of_year:
datedata.dom = bisect(days_of_month, last_run_at.day)
datedata.moy = bisect_left(months_of_year, last_run_at.month)
else:
datedata.dom = 0
datedata.moy = bisect(months_of_year, last_run_at.month)
if datedata.moy == len(months_of_year):
datedata.moy = 0
roll_over()
while 1:
th = datetime(year=datedata.year,
month=months_of_year[datedata.moy],
day=days_of_month[datedata.dom])
if th.isoweekday() % 7 in self.day_of_week:
break
datedata.dom += 1
roll_over()
return ffwd(year=datedata.year,
month=months_of_year[datedata.moy],
day=days_of_month[datedata.dom],
hour=next_hour,
minute=next_minute,
second=0,
microsecond=0)
def __repr__(self):
return CRON_REPR.format(self)
def __reduce__(self):
return (self.__class__, (self._orig_minute,
self._orig_hour,
self._orig_day_of_week,
self._orig_day_of_month,
self._orig_month_of_year), self._orig_kwargs)
def __setstate__(self, state):
# Calling super's init because the kwargs aren't necessarily passed in
# the same form as they are stored by the superclass
super().__init__(**state)
def remaining_delta(self, last_run_at, tz=None, ffwd=ffwd):
# pylint: disable=redefined-outer-name
# caching global ffwd
tz = tz or self.tz
last_run_at = self.maybe_make_aware(last_run_at)
now = self.maybe_make_aware(self.now())
dow_num = last_run_at.isoweekday() % 7 # Sunday is day 0, not day 7
execute_this_date = (
last_run_at.month in self.month_of_year and
last_run_at.day in self.day_of_month and
dow_num in self.day_of_week
)
execute_this_hour = (
execute_this_date and
last_run_at.day == now.day and
last_run_at.month == now.month and
last_run_at.year == now.year and
last_run_at.hour in self.hour and
last_run_at.minute < max(self.minute)
)
if execute_this_hour:
next_minute = min(minute for minute in self.minute
if minute > last_run_at.minute)
delta = ffwd(minute=next_minute, second=0, microsecond=0)
else:
next_minute = min(self.minute)
execute_today = (execute_this_date and
last_run_at.hour < max(self.hour))
if execute_today:
next_hour = min(hour for hour in self.hour
if hour > last_run_at.hour)
delta = ffwd(hour=next_hour, minute=next_minute,
second=0, microsecond=0)
else:
next_hour = min(self.hour)
all_dom_moy = (self._orig_day_of_month == '*' and
self._orig_month_of_year == '*')
if all_dom_moy:
next_day = min([day for day in self.day_of_week
if day > dow_num] or self.day_of_week)
add_week = next_day == dow_num
delta = ffwd(
weeks=add_week and 1 or 0,
weekday=(next_day - 1) % 7,
hour=next_hour,
minute=next_minute,
second=0,
microsecond=0,
)
else:
delta = self._delta_to_next(last_run_at,
next_hour, next_minute)
return self.to_local(last_run_at), delta, self.to_local(now)
def remaining_estimate(self, last_run_at, ffwd=ffwd):
"""Estimate of next run time.
Returns when the periodic task should run next as a
:class:`~datetime.timedelta`.
"""
# pylint: disable=redefined-outer-name
# caching global ffwd
return remaining(*self.remaining_delta(last_run_at, ffwd=ffwd))
def is_due(self, last_run_at):
"""Return tuple of ``(is_due, next_time_to_run)``.
Note:
Next time to run is in seconds.
SeeAlso:
:meth:`celery.schedules.schedule.is_due` for more information.
"""
rem_delta = self.remaining_estimate(last_run_at)
rem = max(rem_delta.total_seconds(), 0)
due = rem == 0
if due:
rem_delta = self.remaining_estimate(self.now())
rem = max(rem_delta.total_seconds(), 0)
return schedstate(due, rem)
def __eq__(self, other):
if isinstance(other, crontab):
return (
other.month_of_year == self.month_of_year and
other.day_of_month == self.day_of_month and
other.day_of_week == self.day_of_week and
other.hour == self.hour and
other.minute == self.minute and
super().__eq__(other)
)
return NotImplemented
def __ne__(self, other):
res = self.__eq__(other)
if res is NotImplemented:
return True
return not res
def maybe_schedule(s, relative=False, app=None):
"""Return schedule from number, timedelta, or actual schedule."""
if s is not None:
if isinstance(s, numbers.Number):
s = timedelta(seconds=s)
if isinstance(s, timedelta):
return schedule(s, relative, app=app)
else:
s.app = app
return s
class solar(BaseSchedule):
"""Solar event.
A solar event can be used as the ``run_every`` value of a
periodic task entry to schedule based on certain solar events.
Notes:
Available event valus are:
- ``dawn_astronomical``
- ``dawn_nautical``
- ``dawn_civil``
- ``sunrise``
- ``solar_noon``
- ``sunset``
- ``dusk_civil``
- ``dusk_nautical``
- ``dusk_astronomical``
Arguments:
event (str): Solar event that triggers this task.
See note for available values.
lat (int): The latitude of the observer.
lon (int): The longitude of the observer.
nowfun (Callable): Function returning the current date and time
as a class:`~datetime.datetime`.
app (Celery): Celery app instance.
"""
_all_events = {
'dawn_astronomical',
'dawn_nautical',
'dawn_civil',
'sunrise',
'solar_noon',
'sunset',
'dusk_civil',
'dusk_nautical',
'dusk_astronomical',
}
_horizons = {
'dawn_astronomical': '-18',
'dawn_nautical': '-12',
'dawn_civil': '-6',
'sunrise': '-0:34',
'solar_noon': '0',
'sunset': '-0:34',
'dusk_civil': '-6',
'dusk_nautical': '-12',
'dusk_astronomical': '18',
}
_methods = {
'dawn_astronomical': 'next_rising',
'dawn_nautical': 'next_rising',
'dawn_civil': 'next_rising',
'sunrise': 'next_rising',
'solar_noon': 'next_transit',
'sunset': 'next_setting',
'dusk_civil': 'next_setting',
'dusk_nautical': 'next_setting',
'dusk_astronomical': 'next_setting',
}
_use_center_l = {
'dawn_astronomical': True,
'dawn_nautical': True,
'dawn_civil': True,
'sunrise': False,
'solar_noon': False,
'sunset': False,
'dusk_civil': True,
'dusk_nautical': True,
'dusk_astronomical': True,
}
def __init__(self, event, lat, lon, **kwargs):
self.ephem = __import__('ephem')
self.event = event
self.lat = lat
self.lon = lon
super().__init__(**kwargs)
if event not in self._all_events:
raise ValueError(SOLAR_INVALID_EVENT.format(
event=event, all_events=', '.join(sorted(self._all_events)),
))
if lat < -90 or lat > 90:
raise ValueError(SOLAR_INVALID_LATITUDE.format(lat=lat))
if lon < -180 or lon > 180:
raise ValueError(SOLAR_INVALID_LONGITUDE.format(lon=lon))
cal = self.ephem.Observer()
cal.lat = str(lat)
cal.lon = str(lon)
cal.elev = 0
cal.horizon = self._horizons[event]
cal.pressure = 0
self.cal = cal
self.method = self._methods[event]
self.use_center = self._use_center_l[event]
def __reduce__(self):
return self.__class__, (self.event, self.lat, self.lon)
def __repr__(self):
return '<solar: {} at latitude {}, longitude: {}>'.format(
self.event, self.lat, self.lon,
)
def remaining_estimate(self, last_run_at):
"""Return estimate of next time to run.
Returns:
~datetime.timedelta: when the periodic task should
run next, or if it shouldn't run today (e.g., the sun does
not rise today), returns the time when the next check
should take place.
"""
last_run_at = self.maybe_make_aware(last_run_at)
last_run_at_utc = localize(last_run_at, timezone.utc)
self.cal.date = last_run_at_utc
try:
if self.use_center:
next_utc = getattr(self.cal, self.method)(
self.ephem.Sun(),
start=last_run_at_utc, use_center=self.use_center
)
else:
next_utc = getattr(self.cal, self.method)(
self.ephem.Sun(), start=last_run_at_utc
)
except self.ephem.CircumpolarError: # pragma: no cover
# Sun won't rise/set today. Check again tomorrow
# (specifically, after the next anti-transit).
next_utc = (
self.cal.next_antitransit(self.ephem.Sun()) +
timedelta(minutes=1)
)
next = self.maybe_make_aware(next_utc.datetime())
now = self.maybe_make_aware(self.now())
delta = next - now
return delta
def is_due(self, last_run_at):
"""Return tuple of ``(is_due, next_time_to_run)``.
Note:
next time to run is in seconds.
See Also:
:meth:`celery.schedules.schedule.is_due` for more information.
"""
rem_delta = self.remaining_estimate(last_run_at)
rem = max(rem_delta.total_seconds(), 0)
due = rem == 0
if due:
rem_delta = self.remaining_estimate(self.now())
rem = max(rem_delta.total_seconds(), 0)
return schedstate(due, rem)
def __eq__(self, other):
if isinstance(other, solar):
return (
other.event == self.event and
other.lat == self.lat and
other.lon == self.lon
)
return NotImplemented
def __ne__(self, other):
res = self.__eq__(other)
if res is NotImplemented:
return True
return not res
| 34.998804 | 79 | 0.577873 |
2cb8ffd88f54c2e38b00cfa7bc81bff9f8b9456b | 1,757 | py | Python | postProcess/cut_video_function.py | SnorlaxSE/handtracking-Application | 5af03d9a1797040274ca2a306a7873cd48ea12af | [
"Apache-2.0"
] | null | null | null | postProcess/cut_video_function.py | SnorlaxSE/handtracking-Application | 5af03d9a1797040274ca2a306a7873cd48ea12af | [
"Apache-2.0"
] | null | null | null | postProcess/cut_video_function.py | SnorlaxSE/handtracking-Application | 5af03d9a1797040274ca2a306a7873cd48ea12af | [
"Apache-2.0"
] | null | null | null | import cv2
import glob
import os
import pdb
def get_video_info(video_path):
videoCapture = cv2.VideoCapture()
videoCapture.open(video_path)
# 帧率
fps = videoCapture.get(cv2.CAP_PROP_FPS)
# 尺寸 (分辨率)
size = (int(videoCapture.get(cv2.CAP_PROP_FRAME_WIDTH)), int(videoCapture.get(cv2.CAP_PROP_FRAME_HEIGHT))) # (width, height)
# 总帧数
total_frames = videoCapture.get(cv2.CAP_PROP_FRAME_COUNT)
# 时长
rate = videoCapture.get(5)
total_duration = total_frames/rate
# print("fps=", int(fps), "size=", size, "total_frames=", int(total_frames), "rate=", rate, "total_duration=", total_duration)
return fps, size, total_frames, rate, total_duration
def frames_to_video(frames_path, fps, size, start_index, end_index, save_path):
"""
[start_index, end_index]
"""
fourcc = cv2.VideoWriter_fourcc(*'mp4v')
# fourcc = cv2.VideoWriter_fourcc(*'MJPG') # 'xxx.avi'
videoWriter = cv2.VideoWriter(save_path, fourcc, fps, size) # size 不匹配,将导致生成的文件损坏(无法打开)
for i in range(start_index, end_index+1):
if os.path.isfile("%s/%d.jpg"%(frames_path, i)):
frame = cv2.imread("%s/%d.jpg"%(frames_path, i))
# print("%s/%d.jpg"%(frames_path, i))
# print(frame.shape)
videoWriter.write(frame)
videoWriter.release()
return
if __name__ == '__main__':
video_file = "00004_result_part.mp4"
result_file = video_file.split('.')[0] + '_cut.mp4'
frames_dir = 'frames'
fps, size, total_frames, rate, total_duration = get_video_info(video_file)
# frames → video
frames_to_video(frames_path=frames_dir, fps=fps, size=size, start_index=415, end_index=482, save_path=result_file)
print("frames → video SUCCEED !!!") | 35.14 | 130 | 0.667615 |
3f25048863710e448dbdce1d40d502eab8064684 | 2,549 | py | Python | sdk/python/tests/compiler/testdata/artifact_passing_using_volume.py | k-gupta/pipelines | 4ea6eb66175399ee2bcd4de7a520b13f7b1385a0 | [
"Apache-2.0"
] | null | null | null | sdk/python/tests/compiler/testdata/artifact_passing_using_volume.py | k-gupta/pipelines | 4ea6eb66175399ee2bcd4de7a520b13f7b1385a0 | [
"Apache-2.0"
] | null | null | null | sdk/python/tests/compiler/testdata/artifact_passing_using_volume.py | k-gupta/pipelines | 4ea6eb66175399ee2bcd4de7a520b13f7b1385a0 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
import kfp.deprecated as kfp
from kfp.deprecated.components import load_component_from_file, create_component_from_func
from typing import NamedTuple
test_data_dir = Path(__file__).parent / 'test_data'
producer_op = load_component_from_file(
str(test_data_dir / 'produce_2.component.yaml'))
processor_op = load_component_from_file(
str(test_data_dir / 'process_2_2.component.yaml'))
consumer_op = load_component_from_file(
str(test_data_dir / 'consume_2.component.yaml'))
def metadata_and_metrics() -> NamedTuple(
"Outputs",
[("mlpipeline_ui_metadata", "UI_metadata"), ("mlpipeline_metrics", "Metrics")],
):
metadata = {
"outputs": [
{"storage": "inline", "source": "*this should be bold*", "type": "markdown"}
]
}
metrics = {
"metrics": [
{
"name": "train-accuracy",
"numberValue": 0.9,
},
{
"name": "test-accuracy",
"numberValue": 0.7,
},
]
}
from collections import namedtuple
import json
return namedtuple("output", ["mlpipeline_ui_metadata", "mlpipeline_metrics"])(
json.dumps(metadata), json.dumps(metrics)
)
@kfp.dsl.pipeline()
def artifact_passing_pipeline():
producer_task = producer_op()
processor_task = processor_op(producer_task.outputs['output_1'],
producer_task.outputs['output_2'])
consumer_task = consumer_op(processor_task.outputs['output_1'],
processor_task.outputs['output_2'])
markdown_task = create_component_from_func(func=metadata_and_metrics)()
# This line is only needed for compiling using dsl-compile to work
kfp.dsl.get_pipeline_conf(
).data_passing_method = volume_based_data_passing_method
from kubernetes.client.models import V1Volume, V1PersistentVolumeClaimVolumeSource
from kfp.deprecated.dsl import data_passing_methods
volume_based_data_passing_method = data_passing_methods.KubernetesVolume(
volume=V1Volume(
name='data',
persistent_volume_claim=V1PersistentVolumeClaimVolumeSource(
claim_name='data-volume',),
),
path_prefix='artifact_data/',
)
if __name__ == '__main__':
pipeline_conf = kfp.dsl.PipelineConf()
pipeline_conf.data_passing_method = volume_based_data_passing_method
kfp.compiler.Compiler().compile(
artifact_passing_pipeline,
__file__ + '.yaml',
pipeline_conf=pipeline_conf)
| 33.539474 | 90 | 0.674774 |
ccaf0b2cb68434c8fb96ea38c412740f73e8c8af | 1,337 | py | Python | PythonBaseDemo/CommonModules/10.6/find_test.py | CypHelp/TestNewWorldDemo | ee6f73df05756f191c1c56250fa290461fdd1b9a | [
"Apache-2.0"
] | null | null | null | PythonBaseDemo/CommonModules/10.6/find_test.py | CypHelp/TestNewWorldDemo | ee6f73df05756f191c1c56250fa290461fdd1b9a | [
"Apache-2.0"
] | null | null | null | PythonBaseDemo/CommonModules/10.6/find_test.py | CypHelp/TestNewWorldDemo | ee6f73df05756f191c1c56250fa290461fdd1b9a | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
#########################################################################
# 网站: <a href="http://www.crazyit.org">疯狂Java联盟</a> #
# author yeeku.H.lee kongyeeku@163.com #
# #
# version 1.0 #
# #
# Copyright (C), 2001-2018, yeeku.H.Lee #
# #
# This program is protected by copyright laws. #
# #
# Program Name: #
# #
# <br>Date: #
#########################################################################
import re
# 返回所有匹配pattern的子串组成的列表, 忽略大小写
print(re.findall('fkit', 'FkIt is very good , Fkit.org is my favorite' , re.I))
# 返回所有匹配pattern的子串组成的迭代器, 忽略大小写
it = re.finditer('fkit', 'FkIt is very good , Fkit.org is my favorite' , re.I)
for e in it:
print(str(e.start()) + "-->" + e.group())
| 58.130435 | 79 | 0.267764 |
e4fe4cf9eb6ebea182c5d34e0c0a23be00cd84d8 | 845 | py | Python | adanet/autoensemble/__init__.py | SmallyolkLiu/adanet | 195012067fb0e3a26618f7bfdc388e6aeaed7dff | [
"Apache-2.0"
] | 2 | 2019-01-04T19:23:23.000Z | 2021-02-14T21:48:03.000Z | adanet/autoensemble/__init__.py | SmallyolkLiu/adanet | 195012067fb0e3a26618f7bfdc388e6aeaed7dff | [
"Apache-2.0"
] | 1 | 2019-03-04T16:57:55.000Z | 2019-03-04T16:57:55.000Z | adanet/autoensemble/__init__.py | SmallyolkLiu/adanet | 195012067fb0e3a26618f7bfdc388e6aeaed7dff | [
"Apache-2.0"
] | null | null | null | """The TensorFlow AdaNet autoensemble module.
Copyright 2018 The AdaNet Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from adanet.autoensemble.estimator import AutoEnsembleEstimator
__all__ = ["AutoEnsembleEstimator"]
| 33.8 | 72 | 0.808284 |
5a0f85753a7763e6d5bb2be6474ac08a626d5833 | 4,781 | py | Python | consai2_game/scripts/example/actions/tool.py | ibis-ssl/consai2-ibis | 2b7d67007703fa49fc7290e92e12481ba48a9a93 | [
"MIT"
] | 4 | 2019-12-16T12:17:32.000Z | 2020-02-15T04:45:47.000Z | consai2_game/scripts/example/actions/tool.py | ibis-ssl/consai2-ibis | 2b7d67007703fa49fc7290e92e12481ba48a9a93 | [
"MIT"
] | null | null | null | consai2_game/scripts/example/actions/tool.py | ibis-ssl/consai2-ibis | 2b7d67007703fa49fc7290e92e12481ba48a9a93 | [
"MIT"
] | null | null | null | # MIT License
#
# Copyright (c) 2019 SSL-Roots
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# coding: UTF-8
import math
import cmath
import numpy
import sys,os
from geometry_msgs.msg import Pose2D
sys.path.append(os.pardir)
from field import Field
def distance_2_poses(pose1, pose2):
# 2点間の距離を取る
# pose.theta は使用しない
diff_pose = Pose2D()
diff_pose.x = pose1.x - pose2.x
diff_pose.y = pose1.y - pose2.y
return math.hypot(diff_pose.x, diff_pose.y)
def is_close(pose1, pose2, threshold):
# 2つの姿勢が近いかどうかを判定する
result = False
if math.fabs(pose1.x - pose2.x) < threshold.x:
if math.fabs(pose1.y - pose2.y) < threshold.y:
if math.fabs(angle_normalize(pose1.theta - pose2.theta)) < threshold.theta:
result = True
return result
def angle_normalize(angle):
# 角度をpi ~ -piの範囲に変換する
while angle > math.pi:
angle -= 2*math.pi
while angle < -math.pi:
angle += 2*math.pi
return angle
def get_angle(from_pose, to_pose):
# ワールド座標系でfrom_poseからto_poseを結ぶ直線の角度を得る
diff_pose = Pose2D()
diff_pose.x = to_pose.x - from_pose.x
diff_pose.y = to_pose.y - from_pose.y
return math.atan2(diff_pose.y, diff_pose.x)
def get_angle_from_center(pose):
return math.atan2(pose.y, pose.x)
def get_intersection(pose1, pose2, pose3, pose4):
# get intersection of line1(pose1, pose2) and line2(pose3, pose4)
# reference:http://imagingsolution.blog107.fc2.com/blog-entry-137.html
s1 = ((pose4.x - pose3.x) * (pose1.y - pose3.y) \
- (pose4.y - pose3.y) * (pose1.x - pose3.x)) / 2.0
s2 = ((pose4.x - pose3.x) * (pose3.y - pose2.y) \
- (pose4.y - pose3.y) * (pose3.x - pose2.x)) / 2.0
try:
coefficient = s1 / (s1 + s2)
except ZeroDivisionError:
return None
output = Pose2D(0,0,0)
output.x = pose1.x + (pose2.x - pose1.x) * coefficient
output.y = pose1.y + (pose2.y - pose1.y) * coefficient
return output
def is_in_defense_area(pose, team='our'):
PENALTY_UPPER_FRONT = Field.penalty_pose(team, 'upper_front')
PENALTY_LOWER_FRONT = Field.penalty_pose(team, 'lower_front')
pose_is_in_area = False
if team == 'our':
# 自チームディフェンスエリアに入っているか
if pose.x < PENALTY_UPPER_FRONT.x \
and pose.y < PENALTY_UPPER_FRONT.y \
and pose.y > PENALTY_LOWER_FRONT.y:
pose_is_in_area = True
else:
# 相手チームディフェンスエリアに入っているか
if pose.x > PENALTY_UPPER_FRONT.x \
and pose.y < PENALTY_UPPER_FRONT.y \
and pose.y > PENALTY_LOWER_FRONT.y:
pose_is_in_area = True
return pose_is_in_area
def get_size_from_center(pose):
return math.hypot(pose.x, pose.y)
class Trans():
# 座標系を移動、回転するクラス
def __init__(self, center , theta):
normalized_theta = angle_normalize(theta)
self._c_center = center.x + center.y * 1.0j
self._c_rotate = cmath.rect(1.0,normalized_theta)
self._c_angle = normalized_theta
def transform(self, pose):
c_point = pose.x + pose.y * 1.0j
c_output = (c_point - self._c_center) * numpy.conj(self._c_rotate)
output = Pose2D()
output.x = c_output.real
output.y = c_output.imag
return output
def inverted_transform(self, pose):
c_point = pose.x + pose.y * 1.0j
c_output = c_point * self._c_rotate + self._c_center
output = Pose2D()
output.x = c_output.real
output.y = c_output.imag
return output
def transform_angle(self, angle):
return angle_normalize(angle - self._c_angle)
def inverted_transform_angle(self, angle):
return angle_normalize(angle + self._c_angle)
| 28.975758 | 87 | 0.664296 |
67342f6db56f0281b437f0116e7df78e33c143de | 5,107 | py | Python | Lecture 24/Lecture24Assignment2.py | AtharvaJoshi21/PythonPOC | 6b95eb5bab7b28e9811e43b39e863faf2ee7565b | [
"MIT"
] | 1 | 2019-04-27T15:37:04.000Z | 2019-04-27T15:37:04.000Z | Lecture 24/Lecture24Assignment2.py | AtharvaJoshi21/PythonPOC | 6b95eb5bab7b28e9811e43b39e863faf2ee7565b | [
"MIT"
] | null | null | null | Lecture 24/Lecture24Assignment2.py | AtharvaJoshi21/PythonPOC | 6b95eb5bab7b28e9811e43b39e863faf2ee7565b | [
"MIT"
] | 1 | 2020-08-14T06:57:08.000Z | 2020-08-14T06:57:08.000Z | # WAP to implement Student management system.
# - Student class should have - Name, Address, DOB, Course, Division, ListOfMarks
# - Implement Getters and Setters
# - AddStudent(), SuspendStudent(), UpdateAddress(), UpdateMarks(), PrintAllStudentDetails() and PrintPercentage()
class Student:
autoRollNo = 1
def __init__(self, name, address, dob, course, division):
self.__name = name
self.__address = address
self.__dob = dob
self.__course = course
self.__division = division
self.__marks = dict()
self.__rollNo = Student.autoRollNo
Student.autoRollNo += 1
def __repr__(self):
return "RollNo: {0}\nName: {1}\nAddress:{2}\nDOB: {3}\nCourse: {4}\nDivision: {5}\nMarks: {6}".format(str(self.__rollNo),str(self.__name),str(self.__address),str(self.__dob), str(self.__course), str(self.__division), str(self.__marks))
def getRollNo(self):
return self.__rollNo
def getName(self):
return self.__name
def getDob(self):
return self.__dob
def getAddress(self):
return self.__address
def updateAddress(self, address):
self.__address = address
def getCourse(self):
return self.__course
def updateCourse(self, course):
self.__course = course
def getDivision(self):
return self.__division
def updateDivision(self, division):
self.__division = division
def getListOfMarks(self):
return self.__marks
def updateMarks(self, subject, marks):
self.__marks[subject] = marks
class StudentManager():
def __init__(self, noOfStudents):
self.__noOfStudents = noOfStudents
self.__enrolledStudents = dict()
self.__suspendedStudents = dict()
def getEnrolledStudents(self):
return self.__enrolledStudents
def enrollStudent(self, name, address, dob, course, division):
if len(self.__enrolledStudents) == self.__noOfStudents:
return False
else:
stud = Student(name, address, dob, course, division)
self.__enrolledStudents[stud.getRollNo()] = stud
return True
def getSuspendedStudents(self):
return self.__suspendedStudents
def suspendStudent(self, rollNo):
if rollNo in self.__suspendedStudents:
pass
elif rollNo in self.__enrolledStudents:
self.__suspendedStudents[rollNo] = self.__enrolledStudents.pop(rollNo)
else:
return False
return True
def updateMarks(self, rollNo, subject, marks):
if rollNo in self.__enrolledStudents:
stud = self.__enrolledStudents[rollNo]
stud.updateMarks(subject, marks)
return True
else:
return False
def updateAddress(self, rollNo, address):
if rollNo in self.__enrolledStudents:
stud = self.__enrolledStudents[rollNo]
stud.updateAddress(address)
return True
else:
return False
def updateDivision(self, rollNo, division):
if rollNo in self.__enrolledStudents:
stud = self.__enrolledStudents[rollNo]
stud.updateDivision(division)
return True
else:
return False
def getStudentDetails(self, rollNo):
if rollNo in self.__enrolledStudents:
return self.__enrolledStudents[rollNo]
else:
return None
def UnitTestStudent():
st = Student("Atharva", "Pune", "21/11/1992", "B.E.", "I.T.")
print("Roll No: {}".format(st.getRollNo()))
print("Name: {}".format(st.getName()))
print("Address: {}".format(st.getAddress()))
print("Course: {}".format(st.getCourse()))
print("Division: {}".format(st.getDivision()))
print("DOB: {}".format(st.getDob()))
print("Marks: ", st.getListOfMarks())
st.updateAddress("Sheffield")
st.updateMarks("DELD", 55)
st.updateMarks("OOPS", 60)
st.updateMarks("DSP", 75)
print("Updated Address: {}".format(st.getAddress()))
print("Updated Marks: ", st.getListOfMarks())
def UnitTestStudentManager():
stM = StudentManager(3)
stM.enrollStudent("Amar", "Delhi", "01/01/1990", "BE", "A")
stM.enrollStudent("Akbar", "Delhi", "01/02/1991", "BE", "B")
stM.enrollStudent("Anthony", "Delhi", "01/03/1992", "BE", "C")
print("********** Enrolled Students are: **********")
enStu = stM.getEnrolledStudents()
for st in enStu:
print(enStu[st])
stM.suspendStudent(2)
print("********** Suspended Students are: **********")
susStu = stM.getSuspendedStudents()
for st in susStu:
print(susStu[st])
print("********** Enrolled Students are: **********")
enStu = stM.getEnrolledStudents()
for st in enStu:
print(enStu[st])
print("********** Updating Marks **********")
stM.updateMarks(1, "Algebra", 73)
print(stM.getEnrolledStudents()[1])
def main():
# UnitTestStudent()
UnitTestStudentManager()
if __name__ == "__main__":
main() | 32.737179 | 243 | 0.615626 |
31e8e3f98e314052519d03445e60ae830e374ba1 | 45,778 | py | Python | beta/pyicloud_ic3.py | JavierMartinz/icloud3 | 23792bea461a1226d45a96300929f498833b0a6a | [
"MIT"
] | null | null | null | beta/pyicloud_ic3.py | JavierMartinz/icloud3 | 23792bea461a1226d45a96300929f498833b0a6a | [
"MIT"
] | null | null | null | beta/pyicloud_ic3.py | JavierMartinz/icloud3 | 23792bea461a1226d45a96300929f498833b0a6a | [
"MIT"
] | null | null | null | """
Customized version of pyicloud.py to support iCloud3 Custom Component
Platform that supports importing data from the iCloud Location Services
and Find My Friends api routines. Modifications to pyicloud were made
by various people to include:
- Original pyicloud - picklepete
- https://github.com/picklepete
- Update to 2fa - Peter Hadley
- https://github.com/PeterHedley94/pyicloud
- Persistant Cookies - JiaJiunn Chiou
- https://github.com/chumachuma/iSync
- Find My Friends Update - Z Zeleznick
- https://github.com/picklepete/pyicloud/pull/160
The piclkepete version used imports for the services, utilities and exceptions
modules. These modules have been incorporated into the pyicloud-ic3 version.
"""
VERSION = '0.9.2b1'
import six
import uuid
import hashlib
import inspect
import json
import logging
import requests
import sys
import tempfile
import os
from re import match
from uuid import uuid1 as generateClientID
#from pyicloud.exceptions import (
# PyiCloudFailedLoginException,
# PyiCloudAPIResponseError,
# PyiCloud2SARequiredError,
# PyiCloudServiceNotActivatedErrror
#)
#from . findmyiphone_tst import FindMyiPhoneServiceManager
#from pyicloud.services import (
# FindMyiPhoneServiceManager,
# CalendarService,
# UbiquityService,
# ContactsService,
# RemindersService,
# PhotosService,
# AccountService
#)
#from pyicloud.utils import get_password_from_keyring
if six.PY3:
import http.cookiejar as cookielib
else:
import cookielib
logger = logging.getLogger(__name__)
#==================================================================
class PyiCloudPasswordFilter(logging.Filter):
def __init__(self, password):
self.password = password
def filter(self, record):
message = record.getMessage()
if self.password in message:
record.msg = message.replace(self.password, "*" * 8)
record.args = []
return True
#==================================================================
class PyiCloudSession(requests.Session):
def __init__(self, service):
self.req_no = 0
self.service = service
super(PyiCloudSession, self).__init__()
#------------------------------------------------------------------
def request(self, *args, **kwargs):
try:
# Charge logging to the right service endpoint
callee = inspect.stack()[2]
module = inspect.getmodule(callee[0])
logger = logging.getLogger(module.__name__).getChild('http')
self.req_no+=1
logger.debug("_<warn>____ PyiCloudSession __a___ request beg __%s___",self.req_no)
logger.debug("args=%s", args)
logger.debug("kwargs=%s", kwargs)
if self.service._password_filter not in logger.filters:
logger.addFilter(self.service._password_filter)
logger.debug("--------going to get/post-----1-----")
except Exception as err:
logger.exception(err)
try:
response = super(PyiCloudSession, self).request(*args, **kwargs)
logger.debug("--------back from get/post-----1-----")
logger.debug("response=%s", response)
logger.debug("-------------------------------------")
#logger.debug("response.headers=%s", response.headers)
except Exception as err:
logger.exception(err)
try:
content_type = response.headers.get('Content-Type', '').split(';')[0]
json_mimetypes = ['application/json', 'text/json']
logger.debug("content_type=%s",content_type)
logger.debug("json_mimetypes=%s",json_mimetypes)
logger.debug("response.ok=%s",response.ok)
if not response.ok and content_type not in json_mimetypes:
logger.debug("error raised %s/%s",response.status_code, response.reason)
#return response
self._raise_error(response.status_code, response.reason)
logger.debug("after raise error")
if content_type not in json_mimetypes:
logger.debug("_<warn>____ PyiCloudSession _____ request end.1 __%s___",self.req_no)
self.req_no-=1
return response
except Exception as err:
logger.exception(err)
try:
json = response.json()
except:
logger.warning('Failed to parse response with JSON mimetype')
return response
logger.debug("--------back from get/post------2----")
logger.debug("response.json=%s", json)
reason = json.get('errorMessage')
reason = reason or json.get('reason')
reason = reason or json.get('errorReason')
if not reason and isinstance(json.get('error'), six.string_types):
reason = json.get('error')
if not reason and json.get('error'):
reason = "Unknown reason"
code = json.get('errorCode')
if not code and json.get('serverErrorCode'):
code = json.get('serverErrorCode')
if reason:
acceptable_reason = 'Missing X-APPLE-WEBAUTH-TOKEN cookie'
if reason != acceptable_reason:
return ("Error={}, ({})").format(code, reason)
#self._raise_error(code, reason)
logger.debug("_<warn>____ PyiCloudSession _____ request end.3 __%s___",self.req_no)
self.req_no-=1
return response
#reason = reason or json.get('errorReason')
#acceptable_reason = 'Missing X-APPLE-WEBAUTH-TOKEN cookie'
# if reason != acceptable_reason:
#------------------------------------------------------------------
def _raise_error(self, code, reason):
logger.debug("_<warn>____ PyiCloudSession _____ raise_error _____")
if self.service.requires_2sa and \
reason == 'Missing X-APPLE-WEBAUTH-TOKEN cookie':
raise PyiCloud2SARequiredError(response.url)
if code == 'ZONE_NOT_FOUND' or code == 'AUTHENTICATION_FAILED':
reason = 'Please log into https://icloud.com/ to manually ' \
'finish setting up your iCloud service'
api_error = PyiCloudServiceNotActivatedErrror(reason, code)
logger.error(api_error)
return "Error=" + code, reason
#raise(api_error)
if code == 'ACCESS_DENIED':
reason = reason + '. Please wait a few minutes then try ' \
'again. The remote servers might be trying to ' \
'throttle requests.'
api_error = PyiCloudAPIResponseError(reason, code)
logger.error(api_error)
self.session.cookies.clear()
self.session.cookies.save()
return "Error=" + code, reason
#raise(api_error)
#==================================================================
class PyiCloudService(object):
"""
A base authentication class for the iCloud service. Handles the
authentication required to access iCloud services.
Usage:
from pyicloud import PyiCloudService
pyicloud = PyiCloudService('username@apple.com', 'password')
pyicloud.iphone.location()
"""
def __init__(
self, apple_id, password=None, cookie_directory=None, verify=True,
client_id=None
):
logger.debug("_<warn>____ PyiCloudService ________init_______")
#if password is None:
# password = get_password_from_keyring(apple_id)
self.data = {}
self.client_id = client_id or str(uuid.uuid1()).upper()
self.apple_id = apple_id
self.user = {'apple_id': apple_id, 'password': password}
self.appleWidgetKey = None
self.webservices = None
self.dsid = None
self.account_country = None
self.session_token = None
logger.debug("-->>>> %s",self.user)
self._password_filter = PyiCloudPasswordFilter(password)
logger.addFilter(self._password_filter)
#self.user_agent = 'Opera/9.52 (X11; Linux i686; U; en)'
self.user_agent = ('Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X)'
'AppleWebKit/601.1.46 (KHTML, like Gecko) '
'Version/9.0 Mobile/13G35 Safari/601.1')
self._setup_endpoint = 'https://setup.icloud.com/setup/ws/1'
self.referer = 'https://www.icloud.com'
self.origin = 'https://www.icloud.com'
self.response = None
self._base_login_url = '%s/login' % self._setup_endpoint
if cookie_directory:
self._cookie_directory = os.path.expanduser(
os.path.normpath(cookie_directory)
)
else:
self._cookie_directory = os.path.join(
tempfile.gettempdir(),
'pyicloud',
)
self.session = PyiCloudSession(self)
self.session.verify = verify
self.session.headers.update({
'Origin': self.referer,
'Referer': '%s/' % self.referer,
'User-Agent': self.user_agent
})
self.cookiejar_path = self._get_cookiejar_path()
self.session.cookies = cookielib.LWPCookieJar(filename=self.cookiejar_path)
if os.path.exists(self.cookiejar_path):
logger.debug("_<warn>____ PyiCloudService ______ init/get_session_cookies__")
try:
self.session.cookies.load()
logger.debug("Read cookies from %s", self.cookiejar_path)
logger.debug("loaded_session_cookies=%s", self.session.cookies)
except:
# Most likely a pickled cookiejar from earlier versions.
# The cookiejar will get replaced with a valid one after
# successful authentication.
logger.warning("Failed to read cookiejar %s", self.cookiejar_path)
self.params = {
'clientBuildNumber': '17DHotfix5',
'clientMasteringNumber': '17DHotfix5',
'ckjsBuildVersion': '17DProjectDev77',
'ckjsVersion': '2.0.5',
'clientId': self.client_id,
}
self.clientID = self.generateClientID()
self.setupiCloud = SetupiCloudService(self)
self.idmsaApple = IdmsaAppleService(self)
self.authenticate()
#------------------------------------------------------------------
def authenticate(self):
"""
Handles authentication, and persists the X-APPLE-WEB-KB cookie so that
subsequent logins will not cause additional e-mails from Apple.
"""
logger.debug("_<warn>____ PyiCloudService ______ authenticate _____")
logger.info("Authenticating as %s", self.user['apple_id'])
self.session_token = self.get_session_token()
if self.session_token is None:
logger.info(("Error logging into iCloud account {}"). \
format(self.apple_id))
logger.info("Clearing cookies and retrying")
self.session.cookies.clear()
self.session.cookies.save()
self.session.cookies.load()
self.session_token = self.get_session_token()
if self.session_token is None:
logger.error(("Error logging into iCloud account {}"). \
format(self.apple_id))
logger.error("iCloud API Authentication Failure, Aborted")
#return
msg = 'Invalid username/password'
raise PyiCloudFailedLoginException(msg, 0)
logger.debug("_<warn>____ PyiCloudService ______ authenticate/login _____")
data = {
'accountCountryCode': self.account_country,
'extended_login': True,
'dsWebAuthToken': self.session_token
}
try:
###### Post ==> /acountLogin
req = self.session.post(
self._setup_endpoint + '/accountLogin',
data=json.dumps(data)
)
except PyiCloudAPIResponseError as error:
msg = 'Invalid user email/password credentials'
raise PyiCloudFailedLoginException(msg, error)
response = req.json()
self.dsid = response['dsInfo']['dsid']
self.webservices = response['webservices']
logger.debug("dsid=%s", self.dsid)
logger.debug("webservices=%s", self.webservices)
logger.debug("-----------------------")
self.params.update({'dsid': self.dsid})
logger.debug("_<warn>____ PyiCloudService ______ authenticate/save cookies _____")
if not os.path.exists(self._cookie_directory):
os.mkdir(self._cookie_directory)
self.session.cookies.save()
logger.debug("saving cookies=%s", self.session.cookies)
logger.debug("Cookies saved to %s", self._get_cookiejar_path())
logger.info("Authentication completed successfully")
#------------------------------------------------------------------
def get_session_token(self):
logger.debug("_<warn>____ PyiCloudService ______ get_session_token _____")
self.clientID = self.generateClientID()
status, self.appleWidgetKey = \
self.setupiCloud.requestAppleWidgetKey(self.clientID)
#return self.idmsaApple.requestAppleSessionToken(self.user['apple_id'],
# self.user['password'],
# widgetKey
# )
if status:
session_token, account_country = \
self.idmsaApple.requestAppleSessionToken(
self.user['apple_id'],
self.user['password'],
self.appleWidgetKey)
else:
#self.appleWidgetKey=response('error')
return None
logger.debug("session_token=%s", session_token)
return session_token
#------------------------------------------------------------------
def generateClientID(self):
logger.debug("_<warn>____ PyiCloudService ______ generateClientID _____")
#return str(generateClientID()).upper()
client_id = str(generateClientID()).upper()
logger.debug("client_id=%s", client_id)
return client_id
#------------------------------------------------------------------
def _get_cookiejar_path(self):
# Get path for cookiejar file
logger.debug("_<warn>____ PyiCloudService ______ _get_cookiejar_path _____")
#return os.path.join(
# self._cookie_directory,
# ''.join([c for c in self.user.get('apple_id') if match(r'\w', c)])
#)
cookiejar_path = os.path.join(
self._cookie_directory,
''.join([c for c in self.user.get('apple_id') if match(r'\w', c)])
)
logger.debug("cookiejar_path=%s", cookiejar_path)
return cookiejar_path
#------------------------------------------------------------------
@property
def requires_2fa(self):
return self.requires_2sa()
@property
def requires_2sa(self):
""" Returns True if two-step authentication is required."""
#return self.data.get('hsaChallengeRequired', False) \
# and self.data['dsInfo'].get('hsaVersion', 0) >= 1
# FIXME: Implement 2FA for hsaVersion == 2
rtn = self.data.get('hsaChallengeRequired', False) \
and self.data['dsInfo'].get('hsaVersion', 0) >= 1
return rtn
#------------------------------------------------------------------
@property
def trusted_devices(self):
""" Returns devices trusted for two-step authentication."""
logger.debug("_<warn>____ PyiCloudService ______ trusted_devices _____")
request = self.session.get(
'%s/listDevices' % self._setup_endpoint,
params=self.params
)
#return request.json().get('devices')
rtn_value = request.json().get('devices')
logger.debug("trusted_devices=%s", rtn_value)
return rtn_value
#------------------------------------------------------------------
def send_verification_code(self, device):
""" Requests that a verification code is sent to the given device"""
logger.debug("_<warn>____ PyiCloudService ______ send_verification_code _____")
data = json.dumps(device)
request = self.session.post(
'%s/sendVerificationCode' % self._setup_endpoint,
params=self.params,
data=data
)
#return request.json().get('success', False)
verif_code = request.json().get('success', False)
logger.debug("verif_code=%s", verif_code)
return verif_code
#------------------------------------------------------------------
def validate_verification_code(self, device, code):
""" Verifies a verification code received on a trusted device"""
logger.debug("_<warn>____ PyiCloudService ______ validate_verification_code _____")
device.update({
'verificationCode': code,
'trustBrowser': True
})
data = json.dumps(device)
try:
request = self.session.post(
'%s/validateVerificationCode' % self._setup_endpoint,
params=self.params,
data=data
)
except PyiCloudAPIResponseError as error:
if error.code == -21669:
# Wrong verification code
return False
raise
# Re-authenticate, which will both update the HSA data, and
# ensure that we save the X-APPLE-WEBAUTH-HSA-TRUST cookie.
self.authenticate()
#return not self.requires_2sa
needs_2sa = not self.requires_2sa
logger.debug("needs_2sa=%s", needs_2sa)
return needs_2sa
#------------------------------------------------------------------
@property
def devices(self):
""" Return all devices."""
logger.debug("_<warn>____ PyiCloudService ______ devices _____")
service_root = self.webservices['findme']['url']
logger.debug("service_root=%s", service_root)
logger.debug("session=%s", self.session)
logger.debug("self.params=%s", self.params)
return FindMyiPhoneServiceManager(service_root, self.session,
self.params)
#------------------------------------------------------------------
@property
def account(self):
service_root = self.webservices['account']['url']
return AccountService(service_root, self.session, self.params)
#------------------------------------------------------------------
@property
def friends(self):
service_root = self.webservices['fmf']['url']
return FindFriendsService(service_root, self.session, self.params)
#------------------------------------------------------------------
'''
@property
def calendar(self):
service_root = self.webservices['calendar']['url']
return CalendarService(service_root, self.session, self.params)
@property
def iphone(self):
return self.devices[0]
@property
def files(self):
if not hasattr(self, '_files'):
service_root = self.webservices['ubiquity']['url']
self._files = UbiquityService(
service_root,
self.session,
self.params
)
return self._files
@property
def photos(self):
if not hasattr(self, '_photos'):
service_root = self.webservices['ckdatabasews']['url']
self._photos = PhotosService(
service_root,
self.session,
self.params
)
return self._photos
@property
def contacts(self):
service_root = self.webservices['contacts']['url']
return ContactsService(service_root, self.session, self.params)
@property
def reminders(self):
service_root = self.webservices['reminders']['url']
return RemindersService(service_root, self.session, self.params)
def __unicode__(self):
return 'iCloud API: %s' % self.user.get('apple_id')
def __str__(self):
as_unicode = self.__unicode__()
if sys.version_info[0] >= 3:
return as_unicode
else:
return as_unicode.encode('ascii', 'ignore')
def __repr__(self):
return '<%s>' % str(self)
'''
#==================================================================
class HTTPService:
def __init__(self, session, response=None, origin=None, referer=None):
try:
self.session = session.session
self.response = session.response
self.origin = session.origin
self.referer = session.referer
self.user_agent = session.user_agent
except:
session = session
self.response = response
self.origin = origin
self.referer = referer
self.user_agent = "Python (X11; Linux x86_64)"
#==================================================================
class SetupiCloudService(HTTPService):
def __init__(self, session):
super(SetupiCloudService, self).__init__(session)
self.url = "https://setup.icloud.com/setup/ws/1"
self.urlKey = self.url + "/validate"
self.urlLogin = self.url + "/accountLogin"
self.appleWidgetKey = None
self.cookies = None
self.dsid = None
#------------------------------------------------------------------
def requestAppleWidgetKey(self, clientID):
logger.debug("_<warn>____ SetupiCloudService ______ requestAppleWidgetKey _____")
error_msg = ""
self.session.headers.update(self.getRequestHeader())
apple_widget_params = self.getQueryParameters(clientID)
logger.debug("urlKey=%s",self.urlKey)
logger.debug("apple_widget_params=%s",apple_widget_params)
logger.debug("self.session.headers=%s",self.session.headers)
logger.debug(">>>get")
self.response = self.session.get(self.urlKey,
params=apple_widget_params)
try:
response_json = self.response.json()
logger.debug("-----back from GET (requestAppleWidgetKey)")
logger.debug(">>>>> response_json=%s",response_json)
logger.debug("testing response")
logger.debug(">>>>> response_t/f=%s",('error' in response_json))
logger.debug("tested response")
if ('error' in response_json):
error_msg = str(response_json.get('error'))
if (error_msg != '' and
error_msg != "Missing X-APPLE-WEBAUTH-TOKEN cookie"):
logger.debug(">>>>> error_msg=%s",error_msg)
logger.error("Error requesting Apple Widget Key (%s)",
error_msg)
return False, error_msg
self.appleWidgetKey = self.findQyery(self.response.text,
"widgetKey=")
except Exception as e:
if error_msg == '':
error_msg = "Unknown Error"
return False, error_msg
#raise Exception(err_str,
# self.urlKey, repr(e))
logger.debug("appleWidgetKey=%s",self.appleWidgetKey)
return True, self.appleWidgetKey
#------------------------------------------------------------------
def requestCookies(self, appleSessionToken, clientID):
logger.debug("_<warn>____ SetupiCloudService ______ requestCookies _____")
self.session.headers.update(self.getRequestHeader())
login_payload = self.getLoginRequestPayload(appleSessionToken)
login_params = self.getQueryParameters(clientID)
logger.debug("urlLogin=%s",self.urlLogin)
logger.debug("login_payload=%s",login_payload)
logger.debug("login_params=%s",login_params)
#logger.debug("self.session.headers=%s",self.session.headers)
logger.debug(">>>post")
self.response = self.session.post(self.urlLogin,
login_payload,
params=login_params)
try:
self.cookies = self.response.headers["Set-Cookie"]
except Exception as e:
raise Exception("requestCookies: Cookies query failed",
self.urlLogin, repr(e))
try:
self.dsid = self.response.json()["dsInfo"]["dsid"]
except Exception as e:
raise Exception("requestCookies: dsid query failed",
self.urlLogin, repr(e))
logger.debug("cookies=%s",self.cookies)
logger.debug("dsid=%s",self.dsid)
return self.cookies, self.dsid
#------------------------------------------------------------------
def findQyery(self, data, query):
response = ''
foundAt = data.find(query)
if foundAt == -1:
except_str = "findQyery: " + query + " could not be found in data"
raise Exception(except_str)
foundAt += len(query)
char = data[foundAt]
while char.isalnum():
response += char
foundAt += 1
char = data[foundAt]
return response
#------------------------------------------------------------------
def getRequestHeader(self):
logger.debug("_<warn>____ SetupiCloudService ______ getRequestHeader _____")
header = {
"Accept": "*/*",
"Connection": "keep-alive",
"Content-Type": "application/json",
"User-Agent": self.user_agent,
"Origin": self.origin,
"Referer": self.referer,
}
logger.debug("header=%s",header)
#"Content-Type": "text/plain",
return header
#------------------------------------------------------------------
def getQueryParameters(self, clientID):
logger.debug("_<warn>____ SetupiCloudService ______ getQueryParameters _____")
if not clientID:
raise NameError("getQueryParameters: clientID not found")
#return {
# "clientBuildNumber": "16CHotfix21",
# "clientID": clientID,
# "clientMasteringNumber": "16CHotfix21",
# }
data = {
"clientBuildNumber": "16CHotfix21",
"clientID": clientID,
"clientMasteringNumber": "16CHotfix21",
}
logger.debug("data=%s",data)
return data
#------------------------------------------------------------------
def getLoginRequestPayload(self, appleSessionToken):
logger.debug("_<warn>____ SetupiCloudService ______ getLoginRequestPayload _____")
if not appleSessionToken:
err_str = "getLoginRequestPayload: X-Apple-ID-Session-Id not found"
raise NameError(err_str)
#return json({
# "dsWebAuthToken": appleSessionToken,
# "extended_login": False,
# })
data=json({
"dsWebAuthToken": appleSessionToken,
"extended_login": True,
})
logger.debug("data=%s",data)
return data
#==================================================================
class IdmsaAppleService(HTTPService):
def __init__(self, session):
super(IdmsaAppleService, self).__init__(session)
self.url = "https://idmsa.apple.com"
self.urlAuth = self.url + "/appleauth/auth/signin?widgetKey="
self.url2sv = self.url + "/appleauth/auth/2sv/trust"
self.account_country = 'USA'
self.session_token = None
self.session_id = None
self.request_id = None
self.scnt = None
self.twoSV_trust_eligible = True
self.twoSV_trust_token = None
#------------------------------------------------------------------
def requestAppleSessionToken(self, user, password, appleWidgetKey):
logger.debug("_<warn>____ IdmsaAppleService ______ requestAppleSessionToken ___beg__")
self.session.headers.update(self.getRequestHeader(appleWidgetKey))
url = self.urlAuth + appleWidgetKey
user_pw_payload = self.getRequestPayload(user, password)
logger.debug("going to sess.post >%s", url)
self.response = self.session.post(self.urlAuth + appleWidgetKey,
user_pw_payload)
logger.debug("_<warn>____ IdmsaAppleService ______ requestAppleSessionToken __end___")
try:
headers = self.response.headers
logger.debug("Session headers=%s", headers)
self.session_token = headers["X-Apple-Session-Token"]
self.session_id = headers["X-Apple-ID-Session-Id"]
self.request_id = headers["X-Apple-I-Request-ID"]
self.scnt = headers["scnt"]
if "X-Apple-ID-Account-Country" in headers:
self.account_country = headers["X-Apple-ID-Account-Country"]
if "X-Apple-TwoSV-Trust-Eligible" in headers:
self.twoSV_trust_eligible = headers["X-Apple-TwoSV-Trust-Eligible"]
else:
self.twoSV_trust_eligible = False
except KeyError:
return None, None
except Exception as e:
logger.debug("AppleSessionToken error")
err_str = "requestAppleSessionToken: " + \
"Apple Session Token query failed"
raise Exception(err_str,
self.urlAuth, repr(e))
if self.twoSV_trust_eligible:
self.requestApple2svToken(appleWidgetKey, user_pw_payload)
return self.session_token, self.account_country
#'X-Apple-I-Request-ID': '13edc839-129a-455c-994a-1ee280478d8e'
#'X-Apple-TwoSV-Trust-Eligible': 'true'
#'X-Apple-ID-Session-Id': '982A266AF7E9B9C6D1FB948D4542C687'
#'scnt': 'f0003baa27ac5181307b73a5573e6bd2'
#'X-Apple-ID-Account-Country': 'USA',
#------------------------------------------------------------------
def requestApple2svToken(self, appleWidgetKey, user_pw_payload):
logger.debug("_<warn>____ IdmsaAppleService ______ requestAppleT2svToken ___beg__")
self.session.headers.update(self.get2svRequestHeader(appleWidgetKey))
self.response = self.session.post(self.url2sv, user_pw_payload)
try:
headers = self.response.headers
logger.debug("Session headers=%s", headers)
except Exception as e:
err_str = "requestAppleTwoSVToken: " + \
"Apple Session 2SV Token query failed"
raise Exception(err_str,
self.urlAuth, repr(e))
return
#------------------------------------------------------------------
def getRequestHeader(self, appleWidgetKey):
logger.debug("_<warn>____ IdmsaAppleService ___x___ getRequestHeader _____")
if not appleWidgetKey:
raise NameError("getRequestHeader: clientID not found")
#return {
# "Accept": "application/json, text/javascript",
# "Content-Type": "application/json",
# "User-Agent": self.user_agent,
# "X-Apple-Widget-Key": appleWidgetKey,
# "X-Requested-With": "XMLHttpRequest",
# "Origin": self.origin,
# "Referer": self.referer,
# }
header = {
"Accept": "application/json, text/javascript",
"Content-Type": "application/json",
"User-Agent": self.user_agent,
"X-Apple-Widget-Key": appleWidgetKey,
"X-Requested-With": "XMLHttpRequest",
"Origin": self.origin,
"Referer": self.referer,
}
logger.debug("header=%s", header)
return header
#------------------------------------------------------------------
def get2svRequestHeader(self, appleWidgetKey):
logger.debug("_<warn>____ IdmsaAppleService ___x___ getRequestHeader _____")
if not appleWidgetKey:
logger.error("getRequestHeader: clientID not found")
raise NameError("getRequestHeader: clientID not found")
header = {
"Origin": self.origin,
"Referer": self.referer,
"Content-Type": "application/json",
"Accept": "application/json, text/javascript",
"User-Agent": self.user_agent,
"X-Requested-With": "XMLHttpRequest",
"X-Apple-Widget-Key": appleWidgetKey,
"X-Apple-ID-Session-Id": self.session_id,
"scnt": self.scnt
}
logger.debug("header=%s", header)
return header
#"User-Agent": self.user_ageny,
#"Accept": "application/json, text/javascript",
#"Content-Type": "application/json",
#------------------------------------------------------------------
def getRequestPayload(self, user, password):
logger.debug("_<warn>____ IdmsaAppleService ___x___ getRequestPayload _____")
if not user:
raise NameError("getAuthenticationRequestPayload: user not found")
if not password:
err_str = "getAuthenticationRequestPayload: password not found"
raise NameError(err_str)
#return json.dumps({
# "accountName": user,
# "password": password,
# "rememberMe": False,
# })
data = json.dumps({
"accountName": user,
"password": password,
"rememberMe": True,
})
logger.debug("json=%s", data)
return data
#==================================================================
class FindFriendsService(object):
"""
The 'Find my Friends' iCloud service
This connects to iCloud and returns friend data including the near-realtime
latitude and longitude.
"""
def __init__(self, service_root, session, params):
callee = inspect.stack()[2]
module = inspect.getmodule(callee[0])
logger = logging.getLogger(module.__name__).getChild('http')
logger.debug("_<warn>____ FindFriendsService _____ __init__ _____")
self.session = session
self.params = params
self._service_root = service_root
self._friend_endpoint = '%s/fmipservice/client/fmfWeb/initClient' % (
self._service_root,
)
self._data = {}
def refresh_data(self):
"""
Fetches all data from Find my Friends endpoint
"""
logger.debug("_<warn>____ FindFriendsService ______ refresh_data _____")
params = dict(self.params)
fake_data = json.dumps({
'clientContext': {
'appVersion': '1.0',
'contextApp': 'com.icloud.web.fmf',
'mapkitAvailable': True,
'productType': 'fmfWeb',
'tileServer': 'Apple',
'userInactivityTimeInMS': 537,
'windowInFocus': False,
'windowVisible': True
},
'dataContext': None,
'serverContext': None
})
req = self.session.post(self._friend_endpoint,
data=fake_data, params=params)
logger.debug("fmf-response=%s", req)
self.response = req.json()
return self.response
@property
def data(self):
if not self._data:
self._data = self.refresh_data()
return self._data
@property
def locations(self):
return self.data.get('locations')
@property
def followers(self):
return self.data.get('followers')
@property
def friend_fences(self):
return self.data.get('friendFencesISet')
@property
def my_fences(self):
return self.data.get('myFencesISet')
@property
def contacts(self):
return self.data.get('contactDetails')
@property
def details(self):
return self.data.get('contactDetails')
#==================================================================
class FindMyiPhoneServiceManager(object):
""" The 'Find my iPhone' iCloud service
This connects to iCloud and return phone data including the near-realtime
latitude and longitude.
"""
def __init__(self, service_root, session, params):
callee = inspect.stack()[2]
module = inspect.getmodule(callee[0])
logger = logging.getLogger(module.__name__).getChild('http')
logger.debug("_<warn>____ FindMyiPhoneServiceManager _____ __init__ _____")
self.session = session
self.params = params
self._service_root = service_root
self._fmip_endpoint = '%s/fmipservice/client/web' % self._service_root
self._fmip_refresh_url = '%s/refreshClient' % self._fmip_endpoint
self._fmip_sound_url = '%s/playSound' % self._fmip_endpoint
self._fmip_message_url = '%s/sendMessage' % self._fmip_endpoint
self._fmip_lost_url = '%s/lostDevice' % self._fmip_endpoint
self._devices = {}
self.refresh_client()
#------------------------------------------------------------------
def refresh_client(self):
""" Refreshes the FindMyiPhoneService endpoint,
This ensures that the location data is up-to-date.
"""
callee = inspect.stack()[2]
module = inspect.getmodule(callee[0])
logger = logging.getLogger(module.__name__).getChild('http')
#logger = logging.getLogger(__name__)
logger.debug("_<warn>____ FindMyiPhoneServiceManager _____ refresh_client _____")
logger.debug("self._fmip_refresh_url=%s",self._fmip_refresh_url)
logger.debug("self.params=%s",self.params)
req = self.session.post(
self._fmip_refresh_url,
params=self.params,
data=json.dumps(
{
'clientContext': {
'fmly': True,
'shouldLocate': True,
'selectedDevice': 'all',
}
}
)
)
self.response = req.json()
for device_info in self.response['content']:
device_id = device_info['id']
if device_id not in self._devices:
self._devices[device_id] = AppleDevice(
device_info,
self.session,
self.params,
manager=self,
sound_url=self._fmip_sound_url,
lost_url=self._fmip_lost_url,
message_url=self._fmip_message_url,
)
else:
self._devices[device_id].update(device_info)
if not self._devices:
raise PyiCloudNoDevicesException()
#------------------------------------------------------------------
def __getitem__(self, key):
if isinstance(key, int):
if six.PY3:
key = list(self.keys())[key]
else:
key = self.keys()[key]
return self._devices[key]
def __getattr__(self, attr):
return getattr(self._devices, attr)
def __unicode__(self):
return six.text_type(self._devices)
def __str__(self):
as_unicode = self.__unicode__()
if sys.version_info[0] >= 3:
return as_unicode
else:
return as_unicode.encode('ascii', 'ignore')
def __repr__(self):
return six.text_type(self)
#==================================================================
class AppleDevice(object):
def __init__(
self, content, session, params, manager,
sound_url=None, lost_url=None, message_url=None
):
self.content = content
self.manager = manager
self.session = session
self.params = params
self.sound_url = sound_url
self.lost_url = lost_url
self.message_url = message_url
#------------------------------------------------------------------
def update(self, data):
self.content = data
#------------------------------------------------------------------
def location(self):
self.manager.refresh_client()
return self.content['location']
#------------------------------------------------------------------
def status(self, additional=[]):
""" Returns status information for device.
This returns only a subset of possible properties.
"""
self.manager.refresh_client()
fields = ['batteryLevel', 'deviceDisplayName', 'deviceStatus', 'name']
fields += additional
properties = {}
for field in fields:
properties[field] = self.content.get(field)
return properties
#------------------------------------------------------------------
def play_sound(self, subject='Find My iPhone Alert'):
""" Send a request to the device to play a sound.
It's possible to pass a custom message by changing the `subject`.
"""
data = json.dumps({
'device': self.content['id'],
'subject': subject,
'clientContext': {
'fmly': True
}
})
self.session.post(
self.sound_url,
params=self.params,
data=data
)
#------------------------------------------------------------------
def display_message(
self, subject='Find My iPhone Alert', message="This is a note",
sounds=False
):
""" Send a request to the device to play a sound.
It's possible to pass a custom message by changing the `subject`.
"""
data = json.dumps(
{
'device': self.content['id'],
'subject': subject,
'sound': sounds,
'userText': True,
'text': message
}
)
self.session.post(
self.message_url,
params=self.params,
data=data
)
#------------------------------------------------------------------
def lost_device(
self, number,
text='This iPhone has been lost. Please call me.',
newpasscode=""
):
""" Send a request to the device to trigger 'lost mode'.
The device will show the message in `text`, and if a number has
been passed, then the person holding the device can call
the number without entering the passcode.
"""
data = json.dumps({
'text': text,
'userText': True,
'ownerNbr': number,
'lostModeEnabled': True,
'trackingEnabled': True,
'device': self.content['id'],
'passcode': newpasscode
})
self.session.post(
self.lost_url,
params=self.params,
data=data
)
#------------------------------------------------------------------
@property
def data(self):
return self.content
def __getitem__(self, key):
return self.content[key]
def __getattr__(self, attr):
return getattr(self.content, attr)
def __unicode__(self):
display_name = self['deviceDisplayName']
name = self['name']
return '%s: %s' % (
display_name,
name,
)
def __str__(self):
as_unicode = self.__unicode__()
if sys.version_info[0] >= 3:
return as_unicode
else:
return as_unicode.encode('ascii', 'ignore')
def __repr__(self):
return '<AppleDevice(%s)>' % str(self)
#==================================================================
class PyiCloudException(Exception):
pass
class PyiCloudNoDevicesException(PyiCloudException):
pass
class PyiCloudAPIResponseError(PyiCloudException):
def __init__(self, reason, code):
self.reason = reason
self.code = code
message = reason
if code:
message += " (%s)" % code
super(PyiCloudAPIResponseError, self).__init__(message)
class PyiCloudFailedLoginException(PyiCloudException):
pass
class PyiCloud2SARequiredError(PyiCloudException):
def __init__(self, url):
message = "Two-step authentication required for %s" % url
super(PyiCloud2SARequiredError, self).__init__(message)
class PyiCloudNoDevicesException(Exception):
pass
class NoStoredPasswordAvailable(PyiCloudException):
pass
class PyiCloudServiceNotActivatedErrror(PyiCloudAPIResponseError):
pass
| 36.6224 | 99 | 0.542488 |
b4f096a8be6fa28cefc7714a4f02a23dca797bba | 2,250 | py | Python | python/phonenumbers/data/region_NA.py | vishnuku/python-phonenumbers | 6ac2cdd06b7ccf709a8efb21629cf2c5f030e627 | [
"Apache-2.0"
] | 3 | 2018-12-02T23:09:00.000Z | 2018-12-02T23:16:59.000Z | python/phonenumbers/data/region_NA.py | carljm/python-phonenumbers | 494044aaf75443dbfd62b8d1352b441af6a458ae | [
"Apache-2.0"
] | null | null | null | python/phonenumbers/data/region_NA.py | carljm/python-phonenumbers | 494044aaf75443dbfd62b8d1352b441af6a458ae | [
"Apache-2.0"
] | null | null | null | """Auto-generated file, do not edit by hand. NA metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_NA = PhoneMetadata(id='NA', country_code=264, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[68]\\d{7,8}', possible_number_pattern='\\d{8,9}', possible_length=(8, 9)),
fixed_line=PhoneNumberDesc(national_number_pattern='6(?:1(?:17|2(?:[0189]\\d|[2-6]|7\\d?)|3(?:[01378]|2\\d)|4(?:[024]|10?|3[15]?)|69|7[014])|2(?:17|5(?:[0-36-8]|4\\d?)|69|70)|3(?:17|2(?:[0237]\\d?|[14-689])|34|6[289]|7[01]|81)|4(?:17|2(?:[012]|7\\d?)|4(?:[06]|1\\d?)|5(?:[01357]|[25]\\d?)|69|7[01])|5(?:17|2(?:[0459]|[23678]\\d?)|69|7[01])|6(?:17|2(?:5|6\\d?)|38|42|69|7[01])|7(?:17|2(?:[569]|[234]\\d?)|3(?:0\\d?|[13])|6[89]|7[01]))\\d{4}', example_number='61221234', possible_length=(8, 9)),
mobile=PhoneNumberDesc(national_number_pattern='(?:60|8[125])\\d{7}', possible_number_pattern='\\d{9}', example_number='811234567', possible_length=(9,)),
toll_free=PhoneNumberDesc(),
premium_rate=PhoneNumberDesc(national_number_pattern='8701\\d{5}', possible_number_pattern='\\d{9}', example_number='870123456', possible_length=(9,)),
shared_cost=PhoneNumberDesc(),
personal_number=PhoneNumberDesc(),
voip=PhoneNumberDesc(national_number_pattern='8(?:3\\d{2}|86)\\d{5}', possible_number_pattern='\\d{8,9}', example_number='88612345', possible_length=(8, 9)),
pager=PhoneNumberDesc(),
uan=PhoneNumberDesc(),
voicemail=PhoneNumberDesc(),
no_international_dialling=PhoneNumberDesc(),
national_prefix='0',
national_prefix_for_parsing='0',
number_format=[NumberFormat(pattern='(8\\d)(\\d{3})(\\d{4})', format='\\1 \\2 \\3', leading_digits_pattern=['8[1235]'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(6\\d)(\\d{3})(\\d{3,4})', format='\\1 \\2 \\3', leading_digits_pattern=['6'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(88)(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['88'], national_prefix_formatting_rule='0\\1'),
NumberFormat(pattern='(870)(\\d{3})(\\d{3})', format='\\1 \\2 \\3', leading_digits_pattern=['870'], national_prefix_formatting_rule='0\\1')])
| 97.826087 | 497 | 0.659111 |
f5b18c46738890812e0f004126797488274a0a76 | 1,559 | py | Python | model/encoder.py | wheeltune/kid-neuro | 131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a | [
"MIT"
] | null | null | null | model/encoder.py | wheeltune/kid-neuro | 131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a | [
"MIT"
] | null | null | null | model/encoder.py | wheeltune/kid-neuro | 131ec888e4f0c3ee1d7b4c4ebf57a6b1d5323d8a | [
"MIT"
] | null | null | null | import torch.nn as nn
from .norm_layer import NormLayer
__all__ = ["KeystrokesEncoder"]
#===============================================================================
class KeystrokesEncoder(nn.Module):
#---------------------------------------------------------------------------
def __init__(self, d_codes, d_hidden, n_layers, p_rnn_dropout=0.2, dropout=0.5):
super().__init__()
self.d_codes = d_codes
self.d_times = 4
self.d_model = self.d_codes + self.d_times
self.d_hidden = d_hidden
self.p_dropout = dropout
self.p_rnn_dropout = p_rnn_dropout
self.batch_norm_1 = NormLayer(self.d_model)
self.rnn_1 = nn.LSTM(
self.d_model,
self.d_hidden,
num_layers=n_layers,
dropout=self.p_rnn_dropout,
batch_first=True
)
self.batch_norm_2 = NormLayer(self.d_hidden)
self.dropout = nn.Dropout(self.p_dropout)
self.rnn_2 = nn.LSTM(
self.d_hidden,
self.d_hidden,
num_layers=n_layers,
dropout=self.p_rnn_dropout,
batch_first=True,
)
#---------------------------------------------------------------------------
def forward(self, x):
x = self.batch_norm_1(x)
x, _ = self.rnn_1(x)
x = self.batch_norm_2(x)
x = self.dropout(x)
_, (ht, _) = self.rnn_2(x)
x = ht[-1]
return x
#=============================================================================== | 26.87931 | 84 | 0.452854 |
f049f78355aff1b5300e4011a5c634fd8860d06f | 4,586 | py | Python | guts/rpc.py | smallwormer/stable-liberty-guts | e635b710cdd210f70e9d50c3b85fffdeb53e8f01 | [
"Apache-2.0"
] | null | null | null | guts/rpc.py | smallwormer/stable-liberty-guts | e635b710cdd210f70e9d50c3b85fffdeb53e8f01 | [
"Apache-2.0"
] | null | null | null | guts/rpc.py | smallwormer/stable-liberty-guts | e635b710cdd210f70e9d50c3b85fffdeb53e8f01 | [
"Apache-2.0"
] | 1 | 2022-03-03T05:41:31.000Z | 2022-03-03T05:41:31.000Z | # Copyright (c) 2015 Aptira Pty Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__all__ = [
'init',
'cleanup',
'set_defaults',
'add_extra_exmods',
'clear_extra_exmods',
'get_allowed_exmods',
'RequestContextSerializer',
'get_client',
'get_server',
'get_notifier',
'TRANSPORT_ALIASES',
]
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from osprofiler import profiler
import guts.context
import guts.exception
CONF = cfg.CONF
TRANSPORT = None
NOTIFIER = None
ALLOWED_EXMODS = [
guts.exception.__name__,
]
EXTRA_EXMODS = []
TRANSPORT_ALIASES = {
'guts.openstack.common.rpc.impl_kombu': 'rabbit',
'guts.openstack.common.rpc.impl_qpid': 'qpid',
'guts.openstack.common.rpc.impl_zmq': 'zmq',
'guts.rpc.impl_kombu': 'rabbit',
'guts.rpc.impl_qpid': 'qpid',
'guts.rpc.impl_zmq': 'zmq',
}
def init(conf):
global TRANSPORT, NOTIFIER
exmods = get_allowed_exmods()
TRANSPORT = messaging.get_transport(conf,
allowed_remote_exmods=exmods,
aliases=TRANSPORT_ALIASES)
serializer = RequestContextSerializer(JsonPayloadSerializer())
NOTIFIER = messaging.Notifier(TRANSPORT, serializer=serializer)
def initialized():
return None not in [TRANSPORT, NOTIFIER]
def cleanup():
global TRANSPORT, NOTIFIER
assert TRANSPORT is not None
assert NOTIFIER is not None
TRANSPORT.cleanup()
TRANSPORT = NOTIFIER = None
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def add_extra_exmods(*args):
EXTRA_EXMODS.extend(args)
def clear_extra_exmods():
del EXTRA_EXMODS[:]
def get_allowed_exmods():
return ALLOWED_EXMODS + EXTRA_EXMODS
class JsonPayloadSerializer(messaging.NoOpSerializer):
@staticmethod
def serialize_entity(context, entity):
return jsonutils.to_primitive(entity, convert_instances=True)
class RequestContextSerializer(messaging.Serializer):
def __init__(self, base):
self._base = base
def serialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.serialize_entity(context, entity)
def deserialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.deserialize_entity(context, entity)
def serialize_context(self, context):
_context = context.to_dict()
prof = profiler.get()
if prof:
trace_info = {
"hmac_key": prof.hmac_key,
"base_id": prof.get_base_id(),
"parent_id": prof.get_id()
}
_context.update({"trace_info": trace_info})
return _context
def deserialize_context(self, context):
trace_info = context.pop("trace_info", None)
if trace_info:
profiler.init(**trace_info)
return guts.context.RequestContext.from_dict(context)
def get_client(target, version_cap=None, serializer=None):
assert TRANSPORT is not None
serializer = RequestContextSerializer(serializer)
return messaging.RPCClient(TRANSPORT,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, serializer=None):
assert TRANSPORT is not None
serializer = RequestContextSerializer(serializer)
return messaging.get_rpc_server(TRANSPORT,
target,
endpoints,
executor='eventlet',
serializer=serializer)
def get_notifier(service=None, host=None, publisher_id=None):
assert NOTIFIER is not None
if not publisher_id:
publisher_id = "%s.%s" % (service, host or CONF.host)
return NOTIFIER.prepare(publisher_id=publisher_id)
| 28.484472 | 78 | 0.657436 |
b60f1670ef02380a8deac8a7db616df055900481 | 1,864 | py | Python | flooddrake/boundary_conditions.py | firedrakeproject/flooddrake | 4a78c426a7171a27d02f864ee8e9d7b324163116 | [
"MIT"
] | 6 | 2018-07-27T04:09:53.000Z | 2022-03-07T21:19:43.000Z | flooddrake/boundary_conditions.py | firedrakeproject/flooddrake | 4a78c426a7171a27d02f864ee8e9d7b324163116 | [
"MIT"
] | 19 | 2016-06-14T09:13:01.000Z | 2021-05-20T08:40:55.000Z | flooddrake/boundary_conditions.py | firedrakeproject/flooddrake | 4a78c426a7171a27d02f864ee8e9d7b324163116 | [
"MIT"
] | 4 | 2016-06-09T10:09:28.000Z | 2022-03-07T21:19:43.000Z | """ boundary conditions """
from __future__ import division
from __future__ import absolute_import
# boundary condition options
options = ['solid wall', 'inflow', 'outflow']
# horizontal directions
directions = ['both', 'x', 'y']
class BoundaryConditions(object):
""" Implementation of a weakly imposed boundary conditions for the boundary flux in flooddrake
:param marker: the marker of the boundary (e.g. 1, 2, 3, 4) for 2d domain
:type marker: int
:param option: boundary condition option
:type option: str (either 'solid wall', 'inflow' or 'outflow')
:param value: state vector at marked boundary
:type value: :class:`Function` (None if option='solid wall')
See help(mesh) for details on markers. E.g. The boundary markers for a UnitSquareMesh
are numbered as follows:
* 1: plane x == 0
* 2: plane x == 1
* 3: plane y == 0
* 4: plane y == 1
"""
def __init__(self, marker, option='solid wall', value=None, direction='both'):
self.option = option
self.value = value
self.marker = marker
self.direction = direction
if self.option not in options:
raise ValueError('bc option must be either solid wall, inflow or outflow')
# set any value to None if option is not inflow
if self.option == 'outflow' or self.option == 'solid wall':
self.value = None
if self.option == 'inflow':
if self.value is None:
raise ValueError('inflow bc option needs w specified at boundary')
# check that one of directions is given
if self.direction not in directions:
raise ValueError('horizontal direction of condition must either be both, x or y')
super(BoundaryConditions, self).__init__()
| 31.066667 | 98 | 0.625 |
de9c77f98b0b0f5529406a4a46a2bd009fdc93ea | 3,467 | py | Python | colour/appearance/tests/test_nayatani95.py | colour-science/colour | 6d9b1b8b9e96b5a3c3e3b64d9954be808e4e37a8 | [
"BSD-3-Clause"
] | 1,380 | 2015-01-10T12:30:33.000Z | 2022-03-30T10:19:57.000Z | colour/appearance/tests/test_nayatani95.py | colour-science/colour | 6d9b1b8b9e96b5a3c3e3b64d9954be808e4e37a8 | [
"BSD-3-Clause"
] | 638 | 2015-01-02T10:49:05.000Z | 2022-03-29T10:16:22.000Z | colour/appearance/tests/test_nayatani95.py | colour-science/colour | 6d9b1b8b9e96b5a3c3e3b64d9954be808e4e37a8 | [
"BSD-3-Clause"
] | 250 | 2015-01-21T15:27:19.000Z | 2022-03-30T10:23:58.000Z | # !/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Defines the unit tests for the :mod:`colour.appearance.nayatani95` module.
"""
import numpy as np
from itertools import permutations
from colour.appearance import XYZ_to_Nayatani95
from colour.appearance.tests.common import AbstractColourAppearanceModelTest
from colour.utilities import (as_float_array, domain_range_scale,
ignore_numpy_errors, tstack)
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2021 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = ['TestNayatani95ColourAppearanceModel']
class TestNayatani95ColourAppearanceModel(AbstractColourAppearanceModelTest):
"""
Defines :mod:`colour.appearance.nayatani95` module unit tests methods for
*Nayatani (1995)* colour appearance model.
"""
FIXTURE_BASENAME = 'nayatani95.csv'
OUTPUT_ATTRIBUTES = {
'L_star_P': 'L_star_P',
'C': 'C',
'theta': 'h',
'S': 's',
'B_r': 'Q',
'M': 'M',
'L_star_N': 'L_star_N'
}
def output_specification_from_data(self, data):
"""
Returns the *Nayatani (1995)* colour appearance model output
specification from given data.
Parameters
----------
data : list
Fixture data.
Returns
-------
CAM_Specification_Nayatani95
*Nayatani (1995)* colour appearance model specification.
"""
XYZ = tstack([data['X'], data['Y'], data['Z']])
XYZ_n = tstack([data['X_n'], data['Y_n'], data['Z_n']])
specification = XYZ_to_Nayatani95(XYZ, XYZ_n, data['Y_o'], data['E_o'],
data['E_or'])
return specification
def test_domain_range_scale_XYZ_to_Nayatani95(self):
"""
Tests :func:`colour.appearance.nayatani95.XYZ_to_Nayatani95` definition
domain and range scale support.
"""
XYZ = np.array([19.01, 20.00, 21.78])
XYZ_n = np.array([95.05, 100.00, 108.88])
Y_o = 20.0
E_o = 5000.0
E_or = 1000.0
specification = XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or)
d_r = (
('reference', 1, 1),
(1, 0.01, np.array([1, 1, 1 / 360, 1, 1, 1, np.nan, np.nan, 1])),
(100, 1, np.array([1, 1, 100 / 360, 1, 1, 1, np.nan, np.nan, 1])),
)
for scale, factor_a, factor_b in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(
XYZ_to_Nayatani95(XYZ * factor_a, XYZ_n * factor_a, Y_o,
E_o, E_or),
as_float_array(specification) * factor_b,
decimal=7)
@ignore_numpy_errors
def test_nan_XYZ_to_Nayatani95(self):
"""
Tests :func:`colour.appearance.nayatani95.XYZ_to_Nayatani95` definition
nan support.
"""
cases = [-1.0, 0.0, 1.0, -np.inf, np.inf, np.nan]
cases = set(permutations(cases * 3, r=3))
for case in cases:
XYZ = np.array(case)
XYZ_n = np.array(case)
Y_o = case[0]
E_o = case[0]
E_or = case[0]
XYZ_to_Nayatani95(XYZ, XYZ_n, Y_o, E_o, E_or)
| 31.807339 | 79 | 0.578887 |
0ab38941ef3a643d20313a43b2c6c719ac984551 | 1,242 | py | Python | etc/nasa_apod.py | RedChen/stackstorm-tutorial | 583fa44e869b9e50460b8f50da3b597aff333a12 | [
"Apache-2.0"
] | null | null | null | etc/nasa_apod.py | RedChen/stackstorm-tutorial | 583fa44e869b9e50460b8f50da3b597aff333a12 | [
"Apache-2.0"
] | null | null | null | etc/nasa_apod.py | RedChen/stackstorm-tutorial | 583fa44e869b9e50460b8f50da3b597aff333a12 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
#
# Description:
# Queries NASA's APOD (Astronomy Picture Of the Day) API to get the link to the picture
# of the day.
#
import argparse
import json
import requests
API_URL = "https://api.nasa.gov/planetary/apod"
DEMO_API_KEY = "DEMO_KEY"
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--date',
help='The date [YYYY-MM-DD] of the APOD image to retrieve.')
parser.add_argument('--hd',
help='Retrieve the high resolution image.',
action='store_true')
parser.add_argument('-a', '--api-key',
help='API key to use for api.nasa.gov.',
default=DEMO_API_KEY)
return parser.parse_args()
def get_apod_metadata(args):
params = {'api_key': args.api_key,
'hd': args.hd}
if args.date is not None:
params['date'] = args.date
response = requests.get(API_URL, params=params)
response.raise_for_status()
data = response.json()
if args.hd:
data['url'] = data['hdurl']
return data
if __name__ == "__main__":
args = parse_args()
metadata = get_apod_metadata(args)
print(metadata['url'])
| 28.227273 | 89 | 0.602254 |
8dfe4f792f908062a83f2f3cc3a8906ecf26932c | 10,804 | py | Python | models/pix2pix_tm_in2_model.py | tkuri/pytorch-CycleGAN-and-pix2pix | b00b3f0bcebfb12d3f026c2a61c98ff63175a583 | [
"BSD-3-Clause"
] | null | null | null | models/pix2pix_tm_in2_model.py | tkuri/pytorch-CycleGAN-and-pix2pix | b00b3f0bcebfb12d3f026c2a61c98ff63175a583 | [
"BSD-3-Clause"
] | null | null | null | models/pix2pix_tm_in2_model.py | tkuri/pytorch-CycleGAN-and-pix2pix | b00b3f0bcebfb12d3f026c2a61c98ff63175a583 | [
"BSD-3-Clause"
] | null | null | null | import torch
import torch.nn as nn
from .base_model import BaseModel
from . import networks
from torch.nn import functional as F
class Pix2PixTmIn2Model(BaseModel):
""" This class implements the pix2pix model, for learning a mapping from input images to output images given paired data.
The model training requires '--dataset_mode aligned' dataset.
By default, it uses a '--netG unet256' U-Net generator,
a '--netD basic' discriminator (PatchGAN),
and a '--gan_mode' vanilla GAN loss (the cross-entropy objective used in the orignal GAN paper).
pix2pix paper: https://arxiv.org/pdf/1611.07004.pdf
"""
@staticmethod
def modify_commandline_options(parser, is_train=True):
"""Add new dataset-specific options, and rewrite default values for existing options.
Parameters:
parser -- original option parser
is_train (bool) -- whether training phase or test phase. You can use this flag to add training-specific or test-specific options.
Returns:
the modified parser.
For pix2pix, we do not use image buffer
The training objective is: GAN Loss + lambda_L1 * ||G(A)-B||_1
By default, we use vanilla GAN loss, UNet with batchnorm, and aligned datasets.
"""
# changing the default values to match the pix2pix paper (https://phillipi.github.io/pix2pix/)
parser.set_defaults(norm='batch', netG='unet_256', dataset_mode='aligned3')
if is_train:
parser.set_defaults(pool_size=0, gan_mode='vanilla')
parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss')
return parser
def __init__(self, opt):
"""Initialize the pix2pix class.
Parameters:
opt (Option class)-- stores all the experiment flags; needs to be a subclass of BaseOptions
"""
BaseModel.__init__(self, opt)
# specify the training losses you want to print out. The training/test scripts will call <BaseModel.get_current_losses>
self.loss_names = ['G_GAN', 'G_L1', 'D_real', 'D_fake']
# specify the images you want to save/display. The training/test scripts will call <BaseModel.get_current_visuals>
# self.visual_names = ['real_A', 'fake_B', 'real_B']
self.visual_names = ['real_A', 'fake_B', 'real_B', 'real_C', 'real_C_itp2']
# self.visual_names = ['real_A', 'fake_B', 'real_B', 'real_C']
# specify the models you want to save to the disk. The training/test scripts will call <BaseModel.save_networks> and <BaseModel.load_networks>
if self.isTrain:
self.model_names = ['G', 'D']
else: # during test time, only load G
self.model_names = ['G']
# define networks (both generator and discriminator)
self.output_nc = opt.output_nc
self.light_res = opt.light_res
print('opt.output_nc', opt.output_nc)
print('light_res', self.light_res)
# self.netG = networks.define_G(opt.input_nc, opt.output_nc, opt.ngf, opt.netG, opt.norm,
# not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids)
self.netG = networks.define_G(opt.input_nc + opt.input2_nc, (self.light_res**2)*opt.output_nc, opt.ngf, 'unet_256_lastrelu', opt.norm,
not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids)
if self.isTrain: # define a discriminator; conditional GANs need to take both input and output images; Therefore, #channels for D is input_nc + output_nc
# self.netD = networks.define_D(opt.input_nc + opt.output_nc, opt.ndf, opt.netD,
# opt.n_layers_D, opt.norm, opt.init_type, opt.init_gain, self.gpu_ids)
self.netD = networks.define_D(opt.input_nc + opt.input2_nc + opt.output_nc, opt.ndf, opt.netD,
opt.n_layers_D, opt.norm, opt.init_type, opt.init_gain, self.gpu_ids)
if self.isTrain:
# define loss functions
self.criterionGAN = networks.GANLoss(opt.gan_mode).to(self.device)
self.criterionL1 = torch.nn.L1Loss()
# initialize optimizers; schedulers will be automatically created by function <BaseModel.setup>.
self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
self.optimizer_D = torch.optim.Adam(self.netD.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999))
self.optimizers.append(self.optimizer_G)
self.optimizers.append(self.optimizer_D)
self.light_gain = nn.Parameter(torch.tensor(1.0), requires_grad=True)
self.tanH = nn.Tanh()
def set_input(self, input):
"""Unpack input data from the dataloader and perform necessary pre-processing steps.
Parameters:
input (dict): include the data itself and its metadata information.
The option 'direction' can be used to swap images in domain A and domain B.
"""
AtoB = self.opt.direction == 'AtoB'
self.real_A = input['A' if AtoB else 'B'].to(self.device)
self.real_B = input['B' if AtoB else 'A'].to(self.device)
self.real_C = input['C'].to(self.device)
self.real_C_itp = F.interpolate(self.real_C, (self.light_res, self.light_res), mode='bicubic', align_corners=False)
self.real_C_itp_flat = self.real_C_itp.view(-1, self.light_res**2, 1) # [1, 16, 1]
self.real_C_itp2 = torch.clamp((F.interpolate(self.real_C_itp, (self.real_C.size(-2), self.real_C.size(-1)), mode='nearest')-0.5)/0.5, min=-1.0, max=1.0)
self.real_AC = torch.cat([self.real_A, self.real_C], dim=1)
self.image_paths = input['A_paths' if AtoB else 'B_paths']
def forward_old(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
trans_matrix = self.netG(self.real_AC) # [1, 3*16, 256, 256]
# trans_matrix = self.netG(self.real_A)
trans_matrix = trans_matrix.view(-1, self.output_nc*self.light_res**2, (trans_matrix.size(-1)*trans_matrix.size(-2))) # [1, 3*16, 256x256]
# trans_matrix = torch.transpose(trans_matrix, 1, 2) * 0.5 + 0.5 # [1, 256x256, 3*16]
trans_matrix = torch.transpose(trans_matrix, 1, 2) # [1, 256x256, 3*16]
tmR = trans_matrix[:, :, 0:self.light_res**2] # [1, 256x256, 16]
tmG = trans_matrix[:, :, self.light_res**2:(self.light_res**2)*2]
tmB = trans_matrix[:, :, (self.light_res**2)*2:(self.light_res**2)*3]
bufR = torch.matmul(tmR, self.real_C_itp_flat * self.light_gain) # [1, 256x256, 1]
bufG = torch.matmul(tmG, self.real_C_itp_flat * self.light_gain)
bufB = torch.matmul(tmB, self.real_C_itp_flat * self.light_gain)
buf = torch.cat([bufR, bufG, bufB], dim=2) # [1, 256x256, 3]
buf = torch.transpose(buf, 1, 2) # [1, 3, 256x256]
buf = (buf - 0.5) / 0.5
buf = torch.clamp(buf, min=-1.0, max=1.0)
self.fake_B = buf.view(self.real_B.size()) # [1, 3, 256, 256]
def forward(self):
"""Run forward pass; called by both functions <optimize_parameters> and <test>."""
trans_matrix = self.netG(self.real_AC) # [1, 3*16, 256, 256]
# trans_matrix = self.netG(self.real_A)
trans_matrix = trans_matrix.view(-1, self.output_nc*self.light_res**2, (trans_matrix.size(-1)*trans_matrix.size(-2))) # [1, 3*16, 256x256]
# trans_matrix = torch.transpose(trans_matrix, 1, 2) * 0.5 + 0.5 # [1, 256x256, 3*16]
trans_matrix = torch.transpose(trans_matrix, 1, 2)
tmR = trans_matrix[:, :, 0:self.light_res**2] # [1, 256x256, 16]
tmG = trans_matrix[:, :, self.light_res**2:(self.light_res**2)*2]
tmB = trans_matrix[:, :, (self.light_res**2)*2:(self.light_res**2)*3]
bufR = torch.matmul(tmR, self.real_C_itp_flat) # [1, 256x256, 1]
bufG = torch.matmul(tmG, self.real_C_itp_flat)
bufB = torch.matmul(tmB, self.real_C_itp_flat)
buf = torch.cat([bufR, bufG, bufB], dim=2) # [1, 256x256, 3]
buf = torch.transpose(buf, 1, 2) # [1, 3, 256x256]
buf = (buf - 0.5) / 0.5
buf = torch.clamp(buf, min=-1.0, max=1.0)
# buf = self.tanH(buf)
self.fake_B = buf.view(self.real_B.size()) # [1, 3, 256, 256]
def backward_D(self):
"""Calculate GAN loss for the discriminator"""
# Fake; stop backprop to the generator by detaching fake_B
# fake_AB = torch.cat((self.real_A, self.fake_B), 1) # we use conditional GANs; we need to feed both input and output to the discriminator
# pred_fake = self.netD(fake_AB.detach())
fake_ACB = torch.cat((self.real_AC, self.fake_B), 1) # we use conditional GANs; we need to feed both input and output to the discriminator
pred_fake = self.netD(fake_ACB.detach())
self.loss_D_fake = self.criterionGAN(pred_fake, False)
# Real
# real_AB = torch.cat((self.real_A, self.real_B), 1)
# pred_real = self.netD(real_AB)
real_ACB = torch.cat((self.real_AC, self.real_B), 1)
pred_real = self.netD(real_ACB)
self.loss_D_real = self.criterionGAN(pred_real, True)
# combine loss and calculate gradients
self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5
self.loss_D.backward()
def backward_G(self):
"""Calculate GAN and L1 loss for the generator"""
# First, G(A) should fake the discriminator
# fake_AB = torch.cat((self.real_A, self.fake_B), 1)
# pred_fake = self.netD(fake_AB)
fake_ACB = torch.cat((self.real_AC, self.fake_B), 1)
pred_fake = self.netD(fake_ACB)
self.loss_G_GAN = self.criterionGAN(pred_fake, True)
# Second, G(A) = B
self.loss_G_L1 = self.criterionL1(self.fake_B, self.real_B) * self.opt.lambda_L1
# combine loss and calculate gradients
self.loss_G = self.loss_G_GAN + self.loss_G_L1
self.loss_G.backward()
def optimize_parameters(self):
self.forward() # compute fake images: G(A)
# update D
self.set_requires_grad(self.netD, True) # enable backprop for D
self.optimizer_D.zero_grad() # set D's gradients to zero
self.backward_D() # calculate gradients for D
self.optimizer_D.step() # update D's weights
# update G
self.set_requires_grad(self.netD, False) # D requires no gradients when optimizing G
self.optimizer_G.zero_grad() # set G's gradients to zero
self.backward_G() # calculate graidents for G
self.optimizer_G.step() # udpate G's weights
| 55.979275 | 162 | 0.632729 |
9e10ab3aed5082a7ba30ec4406ef80cbc5ee175a | 7,112 | py | Python | Mind_Game_Parra/mind_noavoid_choices/pages.py | danielfParra/Lying_Online_Parra2022 | 10e8ef6248f684f63e9dea1314ef57f197e48773 | [
"CC0-1.0"
] | null | null | null | Mind_Game_Parra/mind_noavoid_choices/pages.py | danielfParra/Lying_Online_Parra2022 | 10e8ef6248f684f63e9dea1314ef57f197e48773 | [
"CC0-1.0"
] | null | null | null | Mind_Game_Parra/mind_noavoid_choices/pages.py | danielfParra/Lying_Online_Parra2022 | 10e8ef6248f684f63e9dea1314ef57f197e48773 | [
"CC0-1.0"
] | null | null | null | from otree.api import Currency as c, currency_range
from ._builtin import Page, WaitPage
from .models import Constants
class Matching(WaitPage):
group_by_arrival_time = True
class pick_colorA(Page):
def is_displayed(self):
return self.player.id_in_group == 1
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
print(self.player.waiting_too_long())
class R1_draw(Page):
def is_displayed(self):
return self.player.id_in_group == 1
form_model = 'group'
def get_form_fields(self):
return ['randomDraw1']
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
if self.participant.vars.get('is_dropout'):
self.player.not_a_bot = 0
else:
self.player.not_a_bot = 1
class R1_Choice(Page):
def is_displayed(self):
return self.player.id_in_group == 1
form_model = 'group'
form_fields = ['report1']
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
self.group.report1 = False
if self.participant.vars.get('is_dropout'):
self.player.not_a_bot = 0
else:
self.player.not_a_bot = 1
class WaitForP1(WaitPage):
pass
class Inst_guess(Page):
def is_displayed(self):
return self.player.id_in_group == 1
form_model = 'player'
form_fields = ['times_clicked_info']
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 6*60
def before_next_page(self):
import random
self.player.robot = random.randint(1, 100)
self.player.random_num = random.randint(1, 100)
if self.participant.vars.get('is_dropout'):
self.player.not_a_bot = 0
else:
self.player.not_a_bot = 1
class guess(Page):
def is_displayed(self):
return self.player.id_in_group == 1
form_model = 'player'
def get_form_fields(self):
return ['guess_p1', 'confidence_guess']
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
self.player.guess_p1 = 1
self.player.confidence_guess = 1
if self.participant.vars.get('is_dropout'):
self.player.not_a_bot = 0
else:
self.player.not_a_bot = 1
self.player.players_in_group = len(self.player.get_others_in_group()) + 1
class pick_colorB(Page):
def is_displayed(self):
return self.player.id_in_group == 2
form_model = 'group'
def get_form_fields(self):
return ['selected_card_p2']
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
class R2_draw(Page):
def is_displayed(self):
return self.player.id_in_group == 2
form_model = 'group'
def get_form_fields(self):
return ['randomDraw2']
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
self.group.randomDraw2 = 0
if self.group.randomDraw2 == self.group.selected_card_p2:
self.group.report2 = True
else:
self.group.report2 = False
if self.participant.vars.get('is_dropout'):
self.player.not_a_bot = 0
else:
self.player.not_a_bot = 1
class R2_Choice(Page):
def is_displayed(self):
return self.player.id_in_group == 2
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5*60
def before_next_page(self):
if self.timeout_happened:
self.participant.vars['is_dropout'] = True
if self.participant.vars.get('is_dropout'):
self.player.not_a_bot = 0
else:
self.player.not_a_bot = 1
class ResultsWaitPage(WaitPage):
after_all_players_arrive = 'set_payoffs'
class Results(Page):
def app_after_this_page(self, upcoming_apps):
print('upcoming_apps is', upcoming_apps)
return "survey"
def get_timeout_seconds(self):
if self.participant.vars.get('is_dropout'):
return 1 # instant timeout, 1 second
else:
return 5 * 60
def before_next_page(self):
self.player.players_in_group = len(self.player.get_others_in_group()) + 1
if self.player.players_in_group==2:
self.group.p1_is_a_human = self.group.get_player_by_id(1).not_a_bot
self.group.p2_is_a_human = self.group.get_player_by_id(2).not_a_bot
else:
self.group.p1_is_a_human = self.group.get_player_by_id(1).not_a_bot
self.participant.vars['report1'] = self.group.get_report1_display
self.participant.vars['report2'] = self.group.get_report2_display
self.participant.vars['randomDraw1'] = self.group.randomDraw1
self.participant.vars['randomDraw2'] = self.group.randomDraw2
self.participant.vars['guess_payoff'] = self.player.guess_payoff
self.participant.vars['payoff'] = self.player.payoff
self.participant.vars['player_id'] = self.player.id_in_group
self.participant.vars['not_a_bot'] = self.player.not_a_bot
self.participant.vars['players_in_group'] = self.player.players_in_group
page_sequence = [
Matching,
pick_colorA,
R1_draw,
R1_Choice,
WaitForP1,
Inst_guess,
guess,
pick_colorB,
R2_draw,
R2_Choice,
ResultsWaitPage,
Results,
]
| 29.510373 | 82 | 0.608268 |
b78a326c61ef2d353329ed06a701123398cdd698 | 2,067 | py | Python | src/upparat/cli.py | caruhome/upparat | c55990908148478bc0b783671f5bc5f86b038383 | [
"MIT"
] | 4 | 2020-11-06T17:01:47.000Z | 2021-06-03T13:34:25.000Z | src/upparat/cli.py | caruhome/upparat | c55990908148478bc0b783671f5bc5f86b038383 | [
"MIT"
] | null | null | null | src/upparat/cli.py | caruhome/upparat | c55990908148478bc0b783671f5bc5f86b038383 | [
"MIT"
] | 1 | 2021-01-12T14:03:42.000Z | 2021-01-12T14:03:42.000Z | import logging
import signal
import ssl
from pathlib import Path
from queue import Queue
from pysm import Event
from upparat import config
from upparat.config import settings
from upparat.events import EXIT_SIGNAL_SENT
from upparat.mqtt import MQTT
from upparat.statemachine.machine import create_statemachine
BASE = Path(__file__).parent
logger = logging.getLogger(__name__)
def cli(inbox=None):
if not inbox:
inbox = Queue()
if settings.service.sentry:
import sentry_sdk
sentry_sdk.init(settings.service.sentry)
# Graceful shutdown
def _exit(_, __):
inbox.put(Event(EXIT_SIGNAL_SENT))
signal.signal(signal.SIGINT, _exit)
signal.signal(signal.SIGTERM, _exit)
client = MQTT(client_id=settings.broker.client_id, queue=inbox)
cafile = settings.broker.cafile
certfile = settings.broker.certfile
keyfile = settings.broker.keyfile
host = settings.broker.host
port = settings.broker.port
# for client certificate authentication use the TLS
# APLN extension which requires 443 or 8883.
if cafile or certfile or keyfile:
try:
if port not in [443, 8883]:
raise Exception(
"Port must be 443/8883 for TLS APLN client certificate authentication." # noqa
)
ssl_context = ssl.create_default_context()
ssl_context.set_alpn_protocols(["x-amzn-mqtt-ca"])
ssl_context.load_verify_locations(cafile=cafile)
ssl_context.load_cert_chain(certfile=certfile, keyfile=keyfile)
client.tls_set_context(context=ssl_context)
except Exception as e:
logger.exception("Error in TLS ALPN extension setup.")
raise e
client.run(host, port)
state_machine = create_statemachine(inbox, client)
while True:
event = inbox.get()
logger.debug(f"---> Event in inbox {event}")
state_machine.dispatch(event)
def main():
config.USE_SYS_ARGV = True
cli()
if __name__ == "__main__":
main()
| 26.164557 | 99 | 0.676343 |
66e079201e559c4f3969fe78ddaf8ac1cda98daa | 2,247 | py | Python | platform/rss.py | he0119/nonebot-hk-reporter | 27a3c97a7ab79f296eb807c71de415f47a71be0c | [
"MIT"
] | null | null | null | platform/rss.py | he0119/nonebot-hk-reporter | 27a3c97a7ab79f296eb807c71de415f47a71be0c | [
"MIT"
] | null | null | null | platform/rss.py | he0119/nonebot-hk-reporter | 27a3c97a7ab79f296eb807c71de415f47a71be0c | [
"MIT"
] | null | null | null | import calendar
from typing import Any, Optional
import feedparser
import httpx
from bs4 import BeautifulSoup as bs
from ..post import Post
from ..types import RawPost, Target
from .platform import NewMessage, TargetMixin
class RSS(NewMessage, TargetMixin):
categories = {}
enable_tag = False
platform_name = "rss"
name = "RSS"
enabled = True
is_common = True
schedule_type = "interval"
schedule_kw = {"seconds": 30}
async def get_target_name(self, target: Target) -> Optional[str]:
async with httpx.AsyncClient() as client:
res = await client.get(
target,
timeout=10.0,
headers={
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36 Edg/92.0.902.78"
},
)
feed = feedparser.parse(res.text)
return feed["feed"]["title"]
def get_date(self, post: RawPost) -> int:
return calendar.timegm(post.published_parsed)
def get_id(self, post: RawPost) -> Any:
return post.id
async def get_sub_list(self, target: Target) -> list[RawPost]:
async with httpx.AsyncClient() as client:
res = await client.get(
target,
timeout=10.0,
headers={
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.159 Safari/537.36 Edg/92.0.902.78"
},
)
feed = feedparser.parse(res)
entries = feed.entries
for entry in entries:
entry["_target_name"] = feed.feed.title
return feed.entries
async def parse(self, raw_post: RawPost) -> Post:
text = raw_post.get("title", "") + "\n" if raw_post.get("title") else ""
soup = bs(raw_post.description, "html.parser")
text += soup.text.strip()
pics = list(map(lambda x: x.attrs["src"], soup("img")))
return Post(
"rss",
text=text,
url=raw_post.link,
pics=pics,
target_name=raw_post["_target_name"],
)
| 32.565217 | 167 | 0.568313 |
c55842abb8e43c0b16d2df1467b54f79090772fa | 912 | py | Python | scripts/quest/q21016s.py | G00dBye/YYMS | 1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb | [
"MIT"
] | 54 | 2019-04-16T23:24:48.000Z | 2021-12-18T11:41:50.000Z | scripts/quest/q21016s.py | G00dBye/YYMS | 1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb | [
"MIT"
] | 3 | 2019-05-19T15:19:41.000Z | 2020-04-27T16:29:16.000Z | scripts/quest/q21016s.py | G00dBye/YYMS | 1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb | [
"MIT"
] | 49 | 2020-11-25T23:29:16.000Z | 2022-03-26T16:20:24.000Z | # Created by MechAviv
# Quest ID :: 21016
# Basic Fitness Training 2
sm.setSpeakerID(1201000)
if sm.sendAskAccept("Shall we continue with your Basic Training? Before accepting, please make sure you have properly equipped your sword and your skills and potions are readily accessible."):
sm.startQuest(21016)
sm.setSpeakerID(1201000)
sm.removeEscapeButton()
sm.sendNext("Alright. This time, let's have you defeat #r#o0100132#s#k, which are slightly more powerful than #o0100131#s. Head over to #b#m140020100##k and defeat #r15#k of them. That should help you build your strength. Alright! Let's do this!")
# [START_NAVIGATION] [84 89 58 08 00 00 00 00 00 00 ]
else:
sm.setSpeakerID(1201000)
sm.sendNext("Are you not ready to hunt the #o0100132#s yet? Always proceed if and only if you are fully ready. There's nothing worse than engaging in battles without sufficient preparation.") | 57 | 251 | 0.751096 |
c4306e10af7fa77fff0786f77b6a0ff96ab08426 | 1,242 | py | Python | web/main/admin.py | arturgafizov/webtron_social_network | 38dc166ea8d099ca3a0967f378d751f758eae649 | [
"MIT"
] | null | null | null | web/main/admin.py | arturgafizov/webtron_social_network | 38dc166ea8d099ca3a0967f378d751f758eae649 | [
"MIT"
] | null | null | null | web/main/admin.py | arturgafizov/webtron_social_network | 38dc166ea8d099ca3a0967f378d751f758eae649 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.conf import settings
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import Group
from django.utils.translation import gettext_lazy as _
from django.contrib.auth import get_user_model
User = get_user_model()
@admin.register(User)
class CustomUserAdmin(UserAdmin):
ordering = ('-id',)
list_display = ('email', 'full_name', 'is_active', 'gender')
search_fields = ('first_name', 'last_name', 'email', )
fieldsets = (
(_('Personal info'), {'fields': ('id', 'first_name', 'last_name', 'email', 'gender')}),
(_('Secrets'), {'fields': ('password',)}),
(_('Permissions'), {
'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions'),
}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'password1', 'password2'),
}),
)
readonly_fields = ('id',)
title = settings.MICROSERVICE_TITLE
admin.site.site_title = title
admin.site.site_header = title
admin.site.site_url = '/'
admin.site.index_title = title
admin.site.unregister(Group)
| 29.571429 | 95 | 0.634461 |
b10a98372110a4e58afb3e64552d157f78ef8fd6 | 5,352 | py | Python | cnn.py | dgawlik/digit-recognizer | 9f3a4df61b67a8cbb547fec7a0b8d97477dc0817 | [
"MIT"
] | null | null | null | cnn.py | dgawlik/digit-recognizer | 9f3a4df61b67a8cbb547fec7a0b8d97477dc0817 | [
"MIT"
] | null | null | null | cnn.py | dgawlik/digit-recognizer | 9f3a4df61b67a8cbb547fec7a0b8d97477dc0817 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# CNN for MNIST Classification
#
# Standard MNIST: 99.22%
# Kaggle Digit Recognizer: 99.04%
import tensorflow as tf
import numpy as np
import pandas as pd
def next_batch(data, labels, batch_size):
i = 0
n = len(data)
while True:
yield (data[i:i+batch_size], labels[i:i+batch_size])
i += batch_size
if i >= n:
i = 0
mnist = pd.read_csv('mnist.csv', sep=',')
mnist_test = pd.read_csv('test.csv', sep=',')
itrain = mnist.drop(['label'], axis=1).values.astype(np.float32)
ltrain = mnist['label'].values.astype(np.uint8)
# itest = mnist_test.values
itest = itrain[60000:]
ltest = ltrain[60000:]
itrain = itrain[:60000]
ltrain = ltrain[:60000]
itrain = itrain/255.0
itest = itest/255.0
BATCH_SIZE = 100
is_train = tf.placeholder(tf.bool)
def he(shape):
fan_in = shape[0] if len(shape)==2 else np.prod(shape[1:])
return np.sqrt(6.0 / fan_in)
# Normalize distributions between layers
def normalize(inp, isConv):
beta = tf.Variable(tf.fill(inp.get_shape()[-1:], 0.0))
gamma = tf.Variable(tf.fill(inp.get_shape()[-1:], 1.0))
eps = 0.0001
if isConv:
mean, var = tf.nn.moments(inp, axes=[0,1,2])
else:
mean, var = tf.nn.moments(inp, axes=[0])
amean = tf.Variable(tf.fill(inp.get_shape()[1:], 0.0), trainable=False)
avar = tf.Variable(tf.fill(inp.get_shape()[1:], 1.0), trainable=False)
train_amean = tf.assign(amean, (amean+mean)/2)
train_avar = tf.assign(avar, (avar+var)/2)
with tf.control_dependencies([train_amean, train_avar]):
return tf.cond(
is_train,
lambda: tf.nn.batch_normalization(inp, mean, var, beta, gamma, eps),
lambda: tf.nn.batch_normalization(inp, amean, avar, beta, gamma, eps)
)
def prelu(inp, biases, isConv):
alpha = tf.Variable(tf.fill(biases, 0.001))
y = normalize(inp, isConv)
return tf.maximum(0.0, y) + alpha*tf.minimum(0.0, y)
def conv(inp, strides, weights):
W = tf.Variable(tf.random_uniform(weights, -he(weights), he(weights)))
return tf.nn.conv2d(inp, W, strides, padding='SAME')
def pool(inp, ksize, strides):
return tf.nn.max_pool(inp, ksize=ksize, strides=strides, padding='SAME')
def fc(inp, weights):
W = tf.Variable(tf.random_uniform(weights, -he(weights), he(weights)))
return tf.matmul(inp, W)
# Build CNN:
#
# Input (28,28,1,) in batches of 100 (BN)
# Conv [5,5,1]x20
# PRelu (BN)
# MaxPool [2,2]
# Conv [5,5,20]x40
# PRelu (BN)
# MaxPool [2,2]
# FC-PRelu 1600 (BN)
# Dropout 0.2
# FC-PRelu 400 (BN)
# Dropout 0.2
# Softmax 10
dropout_rate = tf.placeholder(tf.float32)
x = tf.placeholder(tf.float32, [BATCH_SIZE, 784])
x2 = normalize(tf.reshape(x, [-1,28,28,1]), True)
conv1 = conv(x2, [1,1,1,1], [5,5,1,20])
relu1 = prelu(conv1, [20], True)
pool1 = pool(relu1, [1,2,2,1], [1,2,2,1])
conv2 = conv(pool1, [1,1,1,1], [5,5,20,40])
relu2 = prelu(conv2, [40], True)
pool2 = pool(relu2, [1,2,2,1], [1,2,2,1])
flat = tf.reshape(pool2, [-1, 7*7*40])
relu3 = prelu(fc(flat, [7*7*40,1600]), [1600], False)
dropout = tf.nn.dropout(relu3, dropout_rate)
relu4 = prelu(fc(dropout, [1600,400]), [400], False)
dropout2 = tf.nn.dropout(relu4, dropout_rate)
b = tf.Variable(tf.random_uniform([10], -he([10,1]), he([10,1])))
o = fc(dropout2, [400, 10]) + b
y = tf.argmax(o, axis=1)
y_ = tf.placeholder(tf.int64, [None,])
y_oh = tf.one_hot(y_, 10)
cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(o, y_oh))
accuracy = 100*tf.reduce_mean(tf.cast(tf.equal(y, y_), tf.float32))
train = tf.train.AdamOptimizer().minimize(cost)
init = tf.global_variables_initializer()
with tf.Session() as sess:
# merged = tf.summary.merge_all()
writer = tf.summary.FileWriter('log/', sess.graph)
sess.run(init)
print('Training...')
next_btch = next_batch(itrain, ltrain, BATCH_SIZE)
for i in range(0, 4000):
batch_xs, batch_ys = next(next_btch)
feed_dict = {
x: batch_xs,
y_: batch_ys,
dropout_rate: 0.2,
is_train: True
}
sess.run(train, feed_dict)
if i % 500 == 0:
perm = np.random.permutation(itrain.shape[0])
itrain = itrain[perm]
ltrain = ltrain[perm]
# Print stats
if i % 100 == 0:
a_, c_ = sess.run([accuracy, cost], feed_dict)
print(
"""Iterations: {}
Accuracy: {}
Cost: {}
""".format(i, a_, c_)
)
next_btch = next_batch(itest, ltest, BATCH_SIZE)
acc = np.zeros([100])
for i in range(0, 100):
batch_xs, batch_ys = next(next_btch)
feed_dict = {
x: batch_xs,
y_: batch_ys,
dropout_rate: 1.0,
is_train: False
}
acc = acc + sess.run(accuracy, feed_dict)
print("Test: {:.2f} accuracy.".format(np.average(acc)/100))
# classified = np.zeros([28000])
#
# for i in range(0, 280):
# batch_xs = itest[i*100:i*100+100]
# feed_dict = {
# x: batch_xs,
# is_train: False,
# dropout_rate: 1.0
# }
# classified[i*100:i*100+100] = sess.run(y, feed_dict)
#
# df = pd.DataFrame(classified.astype(np.int), columns=['Label'], index=range(1,28001))
# df.index.name = 'ImageId'
# df.to_csv('classified.csv')
| 26.364532 | 91 | 0.605381 |
b8ffd72827931da4a759f7940e79201fd6001c1d | 5,447 | py | Python | colour/adaptation/tests/test_cie1994.py | rift-labs-developer/colour | 15112dbe824aab0f21447e0db4a046a28a06f43a | [
"BSD-3-Clause"
] | 1,380 | 2015-01-10T12:30:33.000Z | 2022-03-30T10:19:57.000Z | colour/adaptation/tests/test_cie1994.py | rift-labs-developer/colour | 15112dbe824aab0f21447e0db4a046a28a06f43a | [
"BSD-3-Clause"
] | 638 | 2015-01-02T10:49:05.000Z | 2022-03-29T10:16:22.000Z | colour/adaptation/tests/test_cie1994.py | rift-labs-developer/colour | 15112dbe824aab0f21447e0db4a046a28a06f43a | [
"BSD-3-Clause"
] | 250 | 2015-01-21T15:27:19.000Z | 2022-03-30T10:23:58.000Z | # !/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Defines the unit tests for the :mod:`colour.adaptation.cie1994` module.
"""
import numpy as np
import unittest
from itertools import permutations
from colour.adaptation import chromatic_adaptation_CIE1994
from colour.utilities import domain_range_scale, ignore_numpy_errors
__author__ = 'Colour Developers'
__copyright__ = 'Copyright (C) 2013-2021 - Colour Developers'
__license__ = 'New BSD License - https://opensource.org/licenses/BSD-3-Clause'
__maintainer__ = 'Colour Developers'
__email__ = 'colour-developers@colour-science.org'
__status__ = 'Production'
__all__ = ['TestChromaticAdaptationCIE1994']
class TestChromaticAdaptationCIE1994(unittest.TestCase):
"""
Defines :func:`colour.adaptation.cie1994.chromatic_adaptation_CIE1994`
definition unit tests methods.
"""
def test_chromatic_adaptation_CIE1994(self):
"""
Tests :func:`colour.adaptation.cie1994.chromatic_adaptation_CIE1994`
definition.
"""
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(
XYZ_1=np.array([28.00, 21.26, 5.27]),
xy_o1=np.array([0.44760, 0.40740]),
xy_o2=np.array([0.31270, 0.32900]),
Y_o=20,
E_o1=1000,
E_o2=1000),
np.array([24.03379521, 21.15621214, 17.64301199]),
decimal=7)
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(
XYZ_1=np.array([21.77, 19.18, 16.73]),
xy_o1=np.array([0.31270, 0.32900]),
xy_o2=np.array([0.31270, 0.32900]),
Y_o=50,
E_o1=100,
E_o2=1000),
np.array([21.12891746, 19.42980532, 19.49577765]),
decimal=7)
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(
XYZ_1=np.array([0.07818780, 0.06157201, 0.28099326]) * 100,
xy_o1=np.array([0.31270, 0.32900]),
xy_o2=np.array([0.37208, 0.37529]),
Y_o=20,
E_o1=100,
E_o2=1000),
np.array([9.14287406, 9.35843355, 15.95753504]),
decimal=7)
def test_n_dimensional_chromatic_adaptation_CIE1994(self):
"""
Tests :func:`colour.adaptation.cie1994.chromatic_adaptation_CIE1994`
definition n-dimensional arrays support.
"""
XYZ_1 = np.array([28.00, 21.26, 5.27])
xy_o1 = np.array([0.44760, 0.40740])
xy_o2 = np.array([0.31270, 0.32900])
Y_o = 20
E_o1 = 1000
E_o2 = 1000
XYZ_2 = chromatic_adaptation_CIE1994(XYZ_1, xy_o1, xy_o2, Y_o, E_o1,
E_o2)
XYZ_1 = np.tile(XYZ_1, (6, 1))
XYZ_2 = np.tile(XYZ_2, (6, 1))
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(XYZ_1, xy_o1, xy_o2, Y_o, E_o1, E_o2),
XYZ_2,
decimal=7)
xy_o1 = np.tile(xy_o1, (6, 1))
xy_o2 = np.tile(xy_o2, (6, 1))
Y_o = np.tile(Y_o, 6)
E_o1 = np.tile(E_o1, 6)
E_o2 = np.tile(E_o2, 6)
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(XYZ_1, xy_o1, xy_o2, Y_o, E_o1, E_o2),
XYZ_2,
decimal=7)
XYZ_1 = np.reshape(XYZ_1, (2, 3, 3))
xy_o1 = np.reshape(xy_o1, (2, 3, 2))
xy_o2 = np.reshape(xy_o2, (2, 3, 2))
Y_o = np.reshape(Y_o, (2, 3))
E_o1 = np.reshape(E_o1, (2, 3))
E_o2 = np.reshape(E_o2, (2, 3))
XYZ_2 = np.reshape(XYZ_2, (2, 3, 3))
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(XYZ_1, xy_o1, xy_o2, Y_o, E_o1, E_o2),
XYZ_2,
decimal=7)
def test_domain_range_scale_chromatic_adaptation_CIE1994(self):
"""
Tests :func:`colour.adaptation.cie1994.chromatic_adaptation_CIE1994`
definition domain and range scale support.
"""
XYZ_1 = np.array([28.00, 21.26, 5.27])
xy_o1 = np.array([0.44760, 0.40740])
xy_o2 = np.array([0.31270, 0.32900])
Y_o = 20
E_o1 = 1000
E_o2 = 1000
XYZ_2 = chromatic_adaptation_CIE1994(XYZ_1, xy_o1, xy_o2, Y_o, E_o1,
E_o2)
d_r = (('reference', 1), (1, 0.01), (100, 1))
for scale, factor in d_r:
with domain_range_scale(scale):
np.testing.assert_almost_equal(
chromatic_adaptation_CIE1994(XYZ_1 * factor, xy_o1, xy_o2,
Y_o * factor, E_o1, E_o2),
XYZ_2 * factor,
decimal=7)
@ignore_numpy_errors
def test_nan_chromatic_adaptation_CIE1994(self):
"""
Tests :func:`colour.adaptation.cie1994.chromatic_adaptation_CIE1994`
definition nan support.
"""
cases = [-1.0, 0.0, 1.0, -np.inf, np.inf, np.nan]
cases = set(permutations(cases * 3, r=3))
for case in cases:
XYZ_1 = np.array(case)
xy_o1 = np.array(case[0:2])
xy_o2 = np.array(case[0:2])
Y_o = case[0]
E_o1 = case[0]
E_o2 = case[0]
chromatic_adaptation_CIE1994(XYZ_1, xy_o1, xy_o2, Y_o, E_o1, E_o2)
if __name__ == '__main__':
unittest.main()
| 34.474684 | 79 | 0.562695 |
d733b9f25cf3e42195c18a2072b6fce5b9109171 | 2,064 | py | Python | src/stack-hci/azext_stack_hci/vendored_sdks/azurestackhci/models/__init__.py | M-zg/azure-cli-extensions | 3a96c863185aff173fafb2cb4fa99bb8c0abb55b | [
"MIT"
] | null | null | null | src/stack-hci/azext_stack_hci/vendored_sdks/azurestackhci/models/__init__.py | M-zg/azure-cli-extensions | 3a96c863185aff173fafb2cb4fa99bb8c0abb55b | [
"MIT"
] | null | null | null | src/stack-hci/azext_stack_hci/vendored_sdks/azurestackhci/models/__init__.py | M-zg/azure-cli-extensions | 3a96c863185aff173fafb2cb4fa99bb8c0abb55b | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import Cluster
from ._models_py3 import ClusterList
from ._models_py3 import ClusterNode
from ._models_py3 import ClusterUpdate
from ._models_py3 import ErrorAdditionalInfo
from ._models_py3 import ErrorResponse
from ._models_py3 import ErrorResponseError
from ._models_py3 import Operation
from ._models_py3 import OperationDisplay
from ._models_py3 import OperationList
from ._models_py3 import Resource
from ._models_py3 import TrackedResource
except (SyntaxError, ImportError):
from ._models import Cluster # type: ignore
from ._models import ClusterList # type: ignore
from ._models import ClusterNode # type: ignore
from ._models import ClusterUpdate # type: ignore
from ._models import ErrorAdditionalInfo # type: ignore
from ._models import ErrorResponse # type: ignore
from ._models import ErrorResponseError # type: ignore
from ._models import Operation # type: ignore
from ._models import OperationDisplay # type: ignore
from ._models import OperationList # type: ignore
from ._models import Resource # type: ignore
from ._models import TrackedResource # type: ignore
from ._azure_stack_hci_client_enums import (
ProvisioningState,
Status,
)
__all__ = [
'Cluster',
'ClusterList',
'ClusterNode',
'ClusterUpdate',
'ErrorAdditionalInfo',
'ErrorResponse',
'ErrorResponseError',
'Operation',
'OperationDisplay',
'OperationList',
'Resource',
'TrackedResource',
'ProvisioningState',
'Status',
]
| 36.210526 | 94 | 0.680233 |
88c175457c0609629dce62c447f434cd7c8f6065 | 255 | py | Python | Problems/4. Selection/Questions/5. Output Python program/answer.py | wilpola/Introduction-to-programming_Python3 | c7c684167806ec87e22d5bb5ca31d7e8f9d328f1 | [
"MIT"
] | null | null | null | Problems/4. Selection/Questions/5. Output Python program/answer.py | wilpola/Introduction-to-programming_Python3 | c7c684167806ec87e22d5bb5ca31d7e8f9d328f1 | [
"MIT"
] | null | null | null | Problems/4. Selection/Questions/5. Output Python program/answer.py | wilpola/Introduction-to-programming_Python3 | c7c684167806ec87e22d5bb5ca31d7e8f9d328f1 | [
"MIT"
] | null | null | null | """ Arrange the code lines in order where the program outputs \"Python Program\""""
def main():
a = 3
b = 5
s = "Python Program"
a = b - 1
b = b - 3
b = b + a + a % 3
a = a % 4
if s[a] == s[b]:
print(s)
main() | 15 | 83 | 0.454902 |
eebf2d21ba236d4ca08f573d7abfe997915a01c1 | 784 | py | Python | indra/sources/trrust/api.py | zebulon2/indra | 7727ddcab52ad8012eb6592635bfa114e904bd48 | [
"BSD-2-Clause"
] | 136 | 2016-02-11T22:06:37.000Z | 2022-03-31T17:26:20.000Z | indra/sources/trrust/api.py | zebulon2/indra | 7727ddcab52ad8012eb6592635bfa114e904bd48 | [
"BSD-2-Clause"
] | 748 | 2016-02-03T16:27:56.000Z | 2022-03-09T14:27:54.000Z | indra/sources/trrust/api.py | zebulon2/indra | 7727ddcab52ad8012eb6592635bfa114e904bd48 | [
"BSD-2-Clause"
] | 56 | 2015-08-28T14:03:44.000Z | 2022-02-04T06:15:55.000Z | __all__ = ['process_from_web']
import io
import pandas
import logging
import requests
from .processor import TrrustProcessor
trrust_human_url = 'https://www.grnpedia.org/trrust/data/trrust_rawdata' \
'.human.tsv'
logger = logging.getLogger(__name__)
def process_from_web():
"""Return a TrrustProcessor based on the online interaction table.
Returns
-------
TrrustProcessor
A TrrustProcessor object that has a list of INDRA Statements in its
statements attribute.
"""
logger.info('Downloading table from %s' % trrust_human_url)
res = requests.get(trrust_human_url)
res.raise_for_status()
df = pandas.read_table(io.StringIO(res.text))
tp = TrrustProcessor(df)
tp.extract_statements()
return tp
| 23.058824 | 75 | 0.700255 |
5f5d6a06b366cf5770f1ccbb40e67af23bde3ab0 | 11,406 | py | Python | sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py | roofurmston/argo-workflows | 79a95f223396ecab408d831781ab2d38d1fa6de0 | [
"Apache-2.0"
] | 7,643 | 2017-08-22T22:10:45.000Z | 2021-02-09T17:13:12.000Z | sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py | roofurmston/argo-workflows | 79a95f223396ecab408d831781ab2d38d1fa6de0 | [
"Apache-2.0"
] | 4,183 | 2017-08-22T22:45:29.000Z | 2021-02-09T17:41:29.000Z | sdks/python/client/argo_workflows/model/io_argoproj_events_v1alpha1_catchup_configuration.py | isubasinghe/argo-workflows | 1a6e94f1d490e2265c977514d698a1ca70e14fe3 | [
"Apache-2.0"
] | 1,524 | 2017-08-22T22:10:53.000Z | 2021-02-09T16:26:16.000Z | """
Argo Server API
You can get examples of requests and responses by using the CLI with `--gloglevel=9`, e.g. `argo list --gloglevel=9` # noqa: E501
The version of the OpenAPI document: VERSION
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from argo_workflows.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
from ..model_utils import OpenApiModel
from argo_workflows.exceptions import ApiAttributeError
class IoArgoprojEventsV1alpha1CatchupConfiguration(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'enabled': (bool,), # noqa: E501
'max_duration': (str,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'enabled': 'enabled', # noqa: E501
'max_duration': 'maxDuration', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""IoArgoprojEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
enabled (bool): [optional] # noqa: E501
max_duration (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""IoArgoprojEventsV1alpha1CatchupConfiguration - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
enabled (bool): [optional] # noqa: E501
max_duration (str): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| 43.869231 | 134 | 0.573032 |
63640b1fb4812c1afc0b69096c3c9a1e687d7933 | 13,031 | py | Python | solum-6.0.0/solum/common/clients.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 39 | 2015-09-26T01:30:52.000Z | 2021-05-20T23:37:43.000Z | solum-6.0.0/solum/common/clients.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | solum-6.0.0/solum/common/clients.py | scottwedge/OpenStack-Stein | 7077d1f602031dace92916f14e36b124f474de15 | [
"Apache-2.0"
] | 30 | 2015-10-25T18:06:39.000Z | 2020-01-14T12:14:06.000Z | # Copyright 2014 - Rackspace Hosting.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from glanceclient import client as glanceclient
from heatclient import client as heatclient
from mistralclient.api import client as mistralclient
from neutronclient.neutron import client as neutronclient
from oslo_config import cfg
from swiftclient import client as swiftclient
from zaqarclient.queues.v1 import client as zaqarclient
from solum.common import exception
from solum.common import solum_barbicanclient
from solum.common import solum_keystoneclient
from solum.i18n import _
GLOBAL_CLIENT_OPTS = [
cfg.StrOpt('region_name',
default='RegionOne',
help=_(
'Region of endpoint in Identity service catalog to use'
' for all clients.')),
]
barbican_client_opts = [
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate for barbican "
"will not be verified.")), ]
# Note: this config is duplicated in many projects that use OpenStack
# clients. This should really be in the client.
# There is a place holder bug here:
# https://bugs.launchpad.net/solum/+bug/1292334
# that we use to track this.
glance_client_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the Glance service.')),
cfg.StrOpt('region_name',
default='',
help=_(
'Region of endpoint in Identity service catalog to use.'))]
heat_client_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the OpenStack service.')),
cfg.StrOpt('region_name',
default='',
help=_(
'Region of endpoint in Identity service catalog to use.')),
cfg.StrOpt('ca_file',
help=_('Optional CA cert file to use in SSL connections.')),
cfg.StrOpt('cert_file',
help=_('Optional PEM-formatted certificate chain file.')),
cfg.StrOpt('key_file',
help=_('Optional PEM-formatted file that contains the '
'private key.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate will not "
"be verified."))]
zaqar_client_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Queue service catalog to use '
'for communication with the Zaqar service.')),
cfg.StrOpt('region_name',
default='',
help=_(
'Region of endpoint in Identity service catalog to use.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate for zaqar "
"will not be verified."))]
neutron_client_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the Neutron service.')),
cfg.StrOpt('region_name',
default='',
help=_(
'Region of endpoint in Identity service catalog to use.')),
cfg.StrOpt('ca_cert',
help=_('Optional CA bundle file to use in SSL connections.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set, then the server's certificate for neutron "
"will not be verified."))]
swift_client_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the Swift service.')),
cfg.StrOpt('region_name',
default='',
help=_(
'Region of endpoint in Identity service catalog to use.')),
cfg.StrOpt('cacert',
help=_('Optional CA cert file to use in SSL connections.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set the server certificate will not be verified."))]
mistral_client_opts = [
cfg.StrOpt('endpoint_type',
default='publicURL',
help=_(
'Type of endpoint in Identity service catalog to use '
'for communication with the mistral service.')),
cfg.StrOpt('region_name',
default='',
help=_(
'Region of endpoint in Identity service catalog to use.')),
cfg.StrOpt('cacert',
help=_('Optional CA cert file to use in SSL connections '
'with Mistral.')),
cfg.BoolOpt('insecure',
default=False,
help=_("If set the server certificate will not be verified "
"while using Mistral."))]
def list_opts():
yield None, GLOBAL_CLIENT_OPTS
yield 'barbican_client', barbican_client_opts
yield 'glance_client', glance_client_opts
yield 'heat_client', heat_client_opts
yield 'zaqar_client', zaqar_client_opts
yield 'neutron_client', neutron_client_opts
yield 'swift_client', swift_client_opts
yield 'mistral_client', mistral_client_opts
cfg.CONF.register_opts(GLOBAL_CLIENT_OPTS)
cfg.CONF.register_opts(barbican_client_opts, group='barbican_client')
cfg.CONF.register_opts(glance_client_opts, group='glance_client')
cfg.CONF.register_opts(heat_client_opts, group='heat_client')
cfg.CONF.register_opts(zaqar_client_opts, group='zaqar_client')
cfg.CONF.register_opts(neutron_client_opts, group='neutron_client')
cfg.CONF.register_opts(swift_client_opts, group='swift_client')
cfg.CONF.register_opts(mistral_client_opts, group='mistral_client')
def get_client_option(client, option):
value = getattr(getattr(cfg.CONF, '%s_client' % client), option)
if option == 'region_name':
global_region = cfg.CONF.get(option)
return value or global_region
else:
return value
class OpenStackClients(object):
"""Convenience class to create and cache client instances."""
def __init__(self, context):
self.context = context
self._barbican = None
self._keystone = None
self._glance = None
self._heat = None
self._neutron = None
self._zaqar = None
self._mistral = None
def url_for(self, **kwargs):
return self.keystone().client.service_catalog.url_for(**kwargs)
@property
def auth_url(self):
return self.keystone().endpoint
@property
def auth_token(self):
return self.context.auth_token or self.keystone().auth_token
@exception.wrap_keystone_exception
def barbican(self):
if self._barbican:
return self._barbican
insecure = get_client_option('barbican', 'insecure')
self._barbican = solum_barbicanclient.BarbicanClient(
verify=not insecure)
return self._barbican
def keystone(self):
if self._keystone:
return self._keystone
self._keystone = solum_keystoneclient.KeystoneClient(self.context)
return self._keystone
@exception.wrap_keystone_exception
def zaqar(self):
if self._zaqar:
return self._zaqar
endpoint_type = get_client_option('zaqar', 'endpoint_type')
region_name = get_client_option('zaqar', 'region_name')
endpoint_url = self.url_for(service_type='queuing',
interface=endpoint_type,
region_name=region_name)
conf = {'auth_opts':
{'backend': 'keystone',
'options': {'os_auth_token': self.auth_token,
'os_auth_url': self.auth_url,
'insecure': get_client_option('zaqar',
'insecure')}
}
}
self._zaqar = zaqarclient.Client(endpoint_url, conf=conf)
return self._zaqar
@exception.wrap_keystone_exception
def neutron(self):
if self._neutron:
return self._neutron
endpoint_type = get_client_option('neutron', 'endpoint_type')
region_name = get_client_option('neutron', 'region_name')
endpoint_url = self.url_for(service_type='network',
interface=endpoint_type,
region_name=region_name)
args = {
'auth_url': self.auth_url,
'endpoint_url': endpoint_url,
'token': self.auth_token,
'username': None,
'password': None,
'insecure': get_client_option('neutron', 'insecure'),
'ca_cert': get_client_option('neutron', 'ca_cert')
}
self._neutron = neutronclient.Client('2.0', **args)
return self._neutron
@exception.wrap_keystone_exception
def glance(self):
if self._glance:
return self._glance
args = {
'token': self.auth_token,
}
endpoint_type = get_client_option('glance', 'endpoint_type')
region_name = get_client_option('glance', 'region_name')
endpoint = self.url_for(service_type='image',
interface=endpoint_type,
region_name=region_name)
self._glance = glanceclient.Client('2', endpoint, **args)
return self._glance
@exception.wrap_keystone_exception
def mistral(self):
if self._mistral:
return self._mistral
args = {
'auth_token': self.auth_token,
}
endpoint_type = get_client_option('mistral', 'endpoint_type')
region_name = get_client_option('mistral', 'region_name')
endpoint = self.url_for(service_type='workflow',
interface=endpoint_type,
region_name=region_name)
self._mistral = mistralclient.client(mistral_url=endpoint, **args)
return self._mistral
@exception.wrap_keystone_exception
def heat(self, username=None, password=None, token=None):
if self._heat:
return self._heat
if token:
token_to_use = token
else:
token_to_use = self.auth_token
endpoint_type = get_client_option('heat', 'endpoint_type')
args = {
'auth_url': self.auth_url,
'token': token_to_use,
'username': username,
'password': password,
'ca_file': get_client_option('heat', 'ca_file'),
'cert_file': get_client_option('heat', 'cert_file'),
'key_file': get_client_option('heat', 'key_file'),
'insecure': get_client_option('heat', 'insecure')
}
region_name = get_client_option('heat', 'region_name')
endpoint = self.url_for(service_type='orchestration',
interface=endpoint_type,
region_name=region_name)
self._heat = heatclient.Client('1', endpoint, **args)
return self._heat
@exception.wrap_keystone_exception
def swift(self):
# Not caching swift connections because of range requests
# Check how glance_store uses swift client for a reference
endpoint_type = get_client_option('swift', 'endpoint_type')
region_name = get_client_option('swift', 'region_name')
args = {
'auth_version': '2.0',
'preauthtoken': self.auth_token,
'preauthurl': self.url_for(service_type='object-store',
interface=endpoint_type,
region_name=region_name),
'os_options': {'endpoint_type': endpoint_type,
'region_name': region_name},
'cacert': get_client_option('swift', 'cacert'),
'insecure': get_client_option('swift', 'insecure')
}
return swiftclient.Connection(**args)
| 37.880814 | 79 | 0.59627 |
396f9210af4c012b4383414dc093e76ab4105fd3 | 2,365 | py | Python | portinus/__init__.py | justin8/portinus | b6832e5f5e2a52492ca46503ae9ef6fbe7125745 | [
"MIT"
] | null | null | null | portinus/__init__.py | justin8/portinus | b6832e5f5e2a52492ca46503ae9ef6fbe7125745 | [
"MIT"
] | null | null | null | portinus/__init__.py | justin8/portinus | b6832e5f5e2a52492ca46503ae9ef6fbe7125745 | [
"MIT"
] | null | null | null | import logging
from operator import attrgetter
import pathlib
from jinja2 import Template
from .cli import task
from . import restart, monitor
from .environmentfile import EnvironmentFile
from .composesource import ComposeSource
from .service import Service
_script_dir = pathlib.Path(__file__).resolve().parent
template_dir = _script_dir.joinpath("templates")
service_dir = pathlib.Path("/usr/local/portinus-services")
def list():
"""
List the available services
"""
_ensure_service_dir()
print("Available portinus services:")
for i in sorted(service_dir.iterdir()):
if i.is_dir():
print(i.name)
def get_instance_dir(name):
"""
Get the directory used for storing the service files
"""
return service_dir.joinpath(name)
def get_template(file_name):
"""
Returns the named template
"""
template_file = template_dir.joinpath(file_name)
with template_file.open() as f:
template_contents = f.read()
return Template(template_contents)
def _ensure_service_dir():
"""
Make sure that the service dir exists
"""
service_dir.mkdir(exist_ok=True)
class Application(object):
"""
A portinus Application. This contains all the pieces of a portinus service
including the restart timer, monitor server, environment file and
service files themselves
"""
log = logging.getLogger()
def __init__(self, name, source=None, environment_file=None, restart_schedule=None):
self.name = name
self.environment_file = EnvironmentFile(name, environment_file)
self.service = Service(name, source)
self.restart_timer = restart.Timer(name, restart_schedule=restart_schedule)
self.monitor_service = monitor.Service(name)
def exists(self):
return self.service.exists()
def ensure(self):
"""
Ensure all the application components are in the correct state
"""
_ensure_service_dir()
self.environment_file.ensure()
self.service.ensure()
self.restart_timer.ensure()
self.monitor_service.ensure()
def remove(self):
"""
Remove all the application components
"""
self.service.remove()
self.environment_file.remove()
self.restart_timer.remove()
self.monitor_service.remove()
| 25.989011 | 88 | 0.682875 |
b18e53f9834ade5dec98c0ad19f909c63540a73a | 421 | py | Python | mails/tests.py | kermox/schronisko-krakow | 8f8c546894e4b683ce463debad27db72ef820f90 | [
"MIT"
] | 1 | 2020-11-17T18:50:44.000Z | 2020-11-17T18:50:44.000Z | mails/tests.py | kermox/schronisko-krakow | 8f8c546894e4b683ce463debad27db72ef820f90 | [
"MIT"
] | 9 | 2020-10-23T18:42:45.000Z | 2022-03-12T00:39:57.000Z | mails/tests.py | kermox/schronisko-krakow | 8f8c546894e4b683ce463debad27db72ef820f90 | [
"MIT"
] | null | null | null | from django.test import TestCase
from mails.models import EmailTemplate
from mails.tasks import send_email
class TestEmailSendFunction(TestCase):
def setUp(self):
EmailTemplate.objects.create(
title='test title',
body='test body'
)
def test_email_send(self):
address = 'bboykermo@gmail.com'
self.assertEquals(send_email(address), 'Message was sent')
| 21.05 | 66 | 0.672209 |
c20854a3ebc0588ddc2c7f0d85cb1322f09b7c18 | 8,258 | py | Python | falcon/testing/resource.py | RioAtHome/falcon | edd9352e630dbbb6272370281fc5fa6d792df057 | [
"Apache-2.0"
] | null | null | null | falcon/testing/resource.py | RioAtHome/falcon | edd9352e630dbbb6272370281fc5fa6d792df057 | [
"Apache-2.0"
] | null | null | null | falcon/testing/resource.py | RioAtHome/falcon | edd9352e630dbbb6272370281fc5fa6d792df057 | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 by Rackspace Hosting, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Mock resource classes.
This module contains mock resource classes and associated hooks for use
in Falcon framework tests. The classes and hooks may be referenced
directly from the `testing` package::
from falcon import testing
resource = testing.SimpleTestResource()
"""
from json import dumps as json_dumps
import falcon
def capture_responder_args(req, resp, resource, params):
"""Before hook for capturing responder arguments.
Adds the following attributes to the hooked responder's resource
class:
* `captured_req`
* `captured_resp`
* `captured_kwargs`
In addition, if the capture-req-body-bytes header is present in the
request, the following attribute is added:
* `captured_req_body`
Including the capture-req-media header in the request (set to any
value) will add the following attribute:
* `capture-req-media`
"""
resource.captured_req = req
resource.captured_resp = resp
resource.captured_kwargs = params
resource.captured_req_media = None
resource.captured_req_body = None
num_bytes = req.get_header('capture-req-body-bytes')
if num_bytes:
resource.captured_req_body = req.stream.read(int(num_bytes))
elif req.get_header('capture-req-media'):
resource.captured_req_media = req.get_media()
async def capture_responder_args_async(req, resp, resource, params):
"""Before hook for capturing responder arguments.
An asynchronous version of :meth:`~falcon.testing.capture_responder_args`.
"""
resource.captured_req = req
resource.captured_resp = resp
resource.captured_kwargs = params
resource.captured_req_media = None
resource.captured_req_body = None
num_bytes = req.get_header('capture-req-body-bytes')
if num_bytes:
resource.captured_req_body = await req.stream.read(int(num_bytes))
elif req.get_header('capture-req-media'):
resource.captured_req_media = await req.get_media()
def set_resp_defaults(req, resp, resource, params):
"""Before hook for setting default response properties.
This hook simply sets the the response body, status,
and headers to the `_default_status`,
`_default_body`, and `_default_headers` attributes
that are assumed to be defined on the resource
object.
"""
if resource._default_status is not None:
resp.status = resource._default_status
if resource._default_body is not None:
resp.text = resource._default_body
if resource._default_headers is not None:
resp.set_headers(resource._default_headers)
async def set_resp_defaults_async(req, resp, resource, params):
"""Wrap :meth:`~falcon.testing.set_resp_defaults` in a coroutine."""
set_resp_defaults(req, resp, resource, params)
class SimpleTestResource:
"""Mock resource for functional testing of framework components.
This class implements a simple test resource that can be extended
as needed to test middleware, hooks, and the Falcon framework
itself.
Only noop ``on_get()`` and ``on_post()`` responders are implemented;
when overriding these, or adding additional responders in child
classes, they can be decorated with the
:py:meth:`falcon.testing.capture_responder_args` hook in
order to capture the *req*, *resp*, and *params* arguments that
are passed to the responder. Responders may also be decorated with
the :py:meth:`falcon.testing.set_resp_defaults` hook in order to
set *resp* properties to default *status*, *body*, and *header*
values.
Keyword Arguments:
status (str): Default status string to use in responses
body (str): Default body string to use in responses
json (JSON serializable): Default JSON document to use in responses.
Will be serialized to a string and encoded as UTF-8. Either
*json* or *body* may be specified, but not both.
headers (dict): Default set of additional headers to include in
responses
Attributes:
called (bool): Whether or not a req/resp was captured.
captured_req (falcon.Request): The last Request object passed
into any one of the responder methods.
captured_resp (falcon.Response): The last Response object passed
into any one of the responder methods.
captured_kwargs (dict): The last dictionary of kwargs, beyond
``req`` and ``resp``, that were passed into any one of the
responder methods.
"""
def __init__(self, status=None, body=None, json=None, headers=None):
self._default_status = status
self._default_headers = headers
if json is not None:
if body is not None:
msg = 'Either json or body may be specified, but not both'
raise ValueError(msg)
self._default_body = json_dumps(json, ensure_ascii=False)
else:
self._default_body = body
self.captured_req = None
self.captured_resp = None
self.captured_kwargs = None
@property
def called(self):
return self.captured_req is not None
@falcon.before(capture_responder_args)
@falcon.before(set_resp_defaults)
def on_get(self, req, resp, **kwargs):
pass
@falcon.before(capture_responder_args)
@falcon.before(set_resp_defaults)
def on_post(self, req, resp, **kwargs):
pass
class SimpleTestResourceAsync(SimpleTestResource):
"""Mock resource for functional testing of ASGI framework components.
This class implements a simple test resource that can be extended
as needed to test middleware, hooks, and the Falcon framework
itself. It is identical to SimpleTestResource, except that it implements
asynchronous responders for use with the ASGI interface.
Only noop ``on_get()`` and ``on_post()`` responders are implemented;
when overriding these, or adding additional responders in child
classes, they can be decorated with the
:py:meth:`falcon.testing.capture_responder_args` hook in
order to capture the *req*, *resp*, and *params* arguments that
are passed to the responder. Responders may also be decorated with
the :py:meth:`falcon.testing.set_resp_defaults` hook in order to
set *resp* properties to default *status*, *body*, and *header*
values.
Keyword Arguments:
status (str): Default status string to use in responses
body (str): Default body string to use in responses
json (JSON serializable): Default JSON document to use in responses.
Will be serialized to a string and encoded as UTF-8. Either
*json* or *body* may be specified, but not both.
headers (dict): Default set of additional headers to include in
responses
Attributes:
called (bool): Whether or not a req/resp was captured.
captured_req (falcon.Request): The last Request object passed
into any one of the responder methods.
captured_resp (falcon.Response): The last Response object passed
into any one of the responder methods.
captured_kwargs (dict): The last dictionary of kwargs, beyond
``req`` and ``resp``, that were passed into any one of the
responder methods.
"""
@falcon.before(capture_responder_args_async)
@falcon.before(set_resp_defaults_async)
async def on_get(self, req, resp, **kwargs):
pass
@falcon.before(capture_responder_args_async)
@falcon.before(set_resp_defaults_async)
async def on_post(self, req, resp, **kwargs):
pass
| 36.219298 | 78 | 0.700048 |
437a95bccc0d1cd0a88d1b1e54278166a9118558 | 166,965 | py | Python | salt/minion.py | kaelaworthen/salt | 702a04bb7f04ea243dd5e38ae5839db7182b3a58 | [
"Apache-2.0"
] | null | null | null | salt/minion.py | kaelaworthen/salt | 702a04bb7f04ea243dd5e38ae5839db7182b3a58 | [
"Apache-2.0"
] | 1 | 2018-09-21T05:06:03.000Z | 2018-09-21T05:06:03.000Z | salt/minion.py | kaelaworthen/salt | 702a04bb7f04ea243dd5e38ae5839db7182b3a58 | [
"Apache-2.0"
] | 1 | 2015-07-28T15:22:01.000Z | 2015-07-28T15:22:01.000Z | # -*- coding: utf-8 -*-
'''
Routines to set up a minion
'''
# Import python libs
from __future__ import absolute_import, print_function, with_statement, unicode_literals
import functools
import os
import re
import sys
import copy
import time
import types
import signal
import random
import fnmatch
import logging
import threading
import traceback
import contextlib
import multiprocessing
from random import randint, shuffle
from stat import S_IMODE
import salt.serializers.msgpack
from binascii import crc32
# Import Salt Libs
# pylint: disable=import-error,no-name-in-module,redefined-builtin
from salt.ext import six
if six.PY3:
import ipaddress
else:
import salt.ext.ipaddress as ipaddress
from salt.ext.six.moves import range
from salt.utils.zeromq import zmq, ZMQDefaultLoop, install_zmq, ZMQ_VERSION_INFO
import salt.defaults.exitcodes
from salt.utils.ctx import RequestContext
# pylint: enable=no-name-in-module,redefined-builtin
import tornado
HAS_RANGE = False
try:
import seco.range
HAS_RANGE = True
except ImportError:
pass
HAS_PSUTIL = False
try:
import salt.utils.psutil_compat as psutil
HAS_PSUTIL = True
except ImportError:
pass
HAS_RESOURCE = False
try:
import resource
HAS_RESOURCE = True
except ImportError:
pass
try:
import zmq.utils.monitor
HAS_ZMQ_MONITOR = True
except ImportError:
HAS_ZMQ_MONITOR = False
try:
import salt.utils.win_functions
HAS_WIN_FUNCTIONS = True
except ImportError:
HAS_WIN_FUNCTIONS = False
# pylint: enable=import-error
# Import salt libs
import salt
import salt.client
import salt.crypt
import salt.loader
import salt.beacons
import salt.engines
import salt.payload
import salt.pillar
import salt.syspaths
import salt.utils.args
import salt.utils.context
import salt.utils.data
import salt.utils.error
import salt.utils.event
import salt.utils.files
import salt.utils.jid
import salt.utils.minion
import salt.utils.minions
import salt.utils.network
import salt.utils.platform
import salt.utils.process
import salt.utils.schedule
import salt.utils.ssdp
import salt.utils.user
import salt.utils.zeromq
import salt.defaults.exitcodes
import salt.cli.daemons
import salt.log.setup
import salt.utils.dictupdate
from salt.config import DEFAULT_MINION_OPTS
from salt.defaults import DEFAULT_TARGET_DELIM
from salt.utils.debug import enable_sigusr1_handler
from salt.utils.event import tagify
from salt.utils.odict import OrderedDict
from salt.utils.process import (default_signals,
SignalHandlingMultiprocessingProcess,
ProcessManager)
from salt.exceptions import (
CommandExecutionError,
CommandNotFoundError,
SaltInvocationError,
SaltReqTimeoutError,
SaltClientError,
SaltSystemExit,
SaltDaemonNotRunning,
SaltException,
)
import tornado.gen # pylint: disable=F0401
import tornado.ioloop # pylint: disable=F0401
log = logging.getLogger(__name__)
# To set up a minion:
# 1. Read in the configuration
# 2. Generate the function mapping dict
# 3. Authenticate with the master
# 4. Store the AES key
# 5. Connect to the publisher
# 6. Handle publications
def resolve_dns(opts, fallback=True):
'''
Resolves the master_ip and master_uri options
'''
ret = {}
check_dns = True
if (opts.get('file_client', 'remote') == 'local' and
not opts.get('use_master_when_local', False)):
check_dns = False
# Since salt.log is imported below, salt.utils.network needs to be imported here as well
import salt.utils.network
if check_dns is True:
try:
if opts['master'] == '':
raise SaltSystemExit
ret['master_ip'] = salt.utils.network.dns_check(
opts['master'],
int(opts['master_port']),
True,
opts['ipv6'])
except SaltClientError:
if opts['retry_dns']:
while True:
import salt.log
msg = ('Master hostname: \'{0}\' not found or not responsive. '
'Retrying in {1} seconds').format(opts['master'], opts['retry_dns'])
if salt.log.setup.is_console_configured():
log.error(msg)
else:
print('WARNING: {0}'.format(msg))
time.sleep(opts['retry_dns'])
try:
ret['master_ip'] = salt.utils.network.dns_check(
opts['master'],
int(opts['master_port']),
True,
opts['ipv6'])
break
except SaltClientError:
pass
else:
if fallback:
ret['master_ip'] = '127.0.0.1'
else:
raise
except SaltSystemExit:
unknown_str = 'unknown address'
master = opts.get('master', unknown_str)
if master == '':
master = unknown_str
if opts.get('__role') == 'syndic':
err = 'Master address: \'{0}\' could not be resolved. Invalid or unresolveable address. ' \
'Set \'syndic_master\' value in minion config.'.format(master)
else:
err = 'Master address: \'{0}\' could not be resolved. Invalid or unresolveable address. ' \
'Set \'master\' value in minion config.'.format(master)
log.error(err)
raise SaltSystemExit(code=42, msg=err)
else:
ret['master_ip'] = '127.0.0.1'
if 'master_ip' in ret and 'master_ip' in opts:
if ret['master_ip'] != opts['master_ip']:
log.warning(
'Master ip address changed from %s to %s',
opts['master_ip'], ret['master_ip']
)
if opts['source_interface_name']:
log.trace('Custom source interface required: %s', opts['source_interface_name'])
interfaces = salt.utils.network.interfaces()
log.trace('The following interfaces are available on this Minion:')
log.trace(interfaces)
if opts['source_interface_name'] in interfaces:
if interfaces[opts['source_interface_name']]['up']:
addrs = interfaces[opts['source_interface_name']]['inet'] if not opts['ipv6'] else\
interfaces[opts['source_interface_name']]['inet6']
ret['source_ip'] = addrs[0]['address']
log.debug('Using %s as source IP address', ret['source_ip'])
else:
log.warning('The interface %s is down so it cannot be used as source to connect to the Master',
opts['source_interface_name'])
else:
log.warning('%s is not a valid interface. Ignoring.', opts['source_interface_name'])
elif opts['source_address']:
ret['source_ip'] = salt.utils.network.dns_check(
opts['source_address'],
int(opts['source_ret_port']),
True,
opts['ipv6'])
log.debug('Using %s as source IP address', ret['source_ip'])
if opts['source_ret_port']:
ret['source_ret_port'] = int(opts['source_ret_port'])
log.debug('Using %d as source port for the ret server', ret['source_ret_port'])
if opts['source_publish_port']:
ret['source_publish_port'] = int(opts['source_publish_port'])
log.debug('Using %d as source port for the master pub', ret['source_publish_port'])
ret['master_uri'] = 'tcp://{ip}:{port}'.format(
ip=ret['master_ip'], port=opts['master_port'])
log.debug('Master URI: %s', ret['master_uri'])
return ret
def prep_ip_port(opts):
ret = {}
# Use given master IP if "ip_only" is set or if master_ip is an ipv6 address without
# a port specified. The is_ipv6 check returns False if brackets are used in the IP
# definition such as master: '[::1]:1234'.
if opts['master_uri_format'] == 'ip_only' or salt.utils.network.is_ipv6(opts['master']):
ret['master'] = opts['master']
else:
ip_port = opts['master'].rsplit(':', 1)
if len(ip_port) == 1:
# e.g. master: mysaltmaster
ret['master'] = ip_port[0]
else:
# e.g. master: localhost:1234
# e.g. master: 127.0.0.1:1234
# e.g. master: [::1]:1234
# Strip off brackets for ipv6 support
ret['master'] = ip_port[0].strip('[]')
# Cast port back to an int! Otherwise a TypeError is thrown
# on some of the socket calls elsewhere in the minion and utils code.
ret['master_port'] = int(ip_port[1])
return ret
def get_proc_dir(cachedir, **kwargs):
'''
Given the cache directory, return the directory that process data is
stored in, creating it if it doesn't exist.
The following optional Keyword Arguments are handled:
mode: which is anything os.makedir would accept as mode.
uid: the uid to set, if not set, or it is None or -1 no changes are
made. Same applies if the directory is already owned by this
uid. Must be int. Works only on unix/unix like systems.
gid: the gid to set, if not set, or it is None or -1 no changes are
made. Same applies if the directory is already owned by this
gid. Must be int. Works only on unix/unix like systems.
'''
fn_ = os.path.join(cachedir, 'proc')
mode = kwargs.pop('mode', None)
if mode is None:
mode = {}
else:
mode = {'mode': mode}
if not os.path.isdir(fn_):
# proc_dir is not present, create it with mode settings
os.makedirs(fn_, **mode)
d_stat = os.stat(fn_)
# if mode is not an empty dict then we have an explicit
# dir mode. So lets check if mode needs to be changed.
if mode:
mode_part = S_IMODE(d_stat.st_mode)
if mode_part != mode['mode']:
os.chmod(fn_, (d_stat.st_mode ^ mode_part) | mode['mode'])
if hasattr(os, 'chown'):
# only on unix/unix like systems
uid = kwargs.pop('uid', -1)
gid = kwargs.pop('gid', -1)
# if uid and gid are both -1 then go ahead with
# no changes at all
if (d_stat.st_uid != uid or d_stat.st_gid != gid) and \
[i for i in (uid, gid) if i != -1]:
os.chown(fn_, uid, gid)
return fn_
def load_args_and_kwargs(func, args, data=None, ignore_invalid=False):
'''
Detect the args and kwargs that need to be passed to a function call, and
check them against what was passed.
'''
argspec = salt.utils.args.get_function_argspec(func)
_args = []
_kwargs = {}
invalid_kwargs = []
for arg in args:
if isinstance(arg, dict) and arg.pop('__kwarg__', False) is True:
# if the arg is a dict with __kwarg__ == True, then its a kwarg
for key, val in six.iteritems(arg):
if argspec.keywords or key in argspec.args:
# Function supports **kwargs or is a positional argument to
# the function.
_kwargs[key] = val
else:
# **kwargs not in argspec and parsed argument name not in
# list of positional arguments. This keyword argument is
# invalid.
invalid_kwargs.append('{0}={1}'.format(key, val))
continue
else:
string_kwarg = salt.utils.args.parse_input([arg], condition=False)[1] # pylint: disable=W0632
if string_kwarg:
if argspec.keywords or next(six.iterkeys(string_kwarg)) in argspec.args:
# Function supports **kwargs or is a positional argument to
# the function.
_kwargs.update(string_kwarg)
else:
# **kwargs not in argspec and parsed argument name not in
# list of positional arguments. This keyword argument is
# invalid.
for key, val in six.iteritems(string_kwarg):
invalid_kwargs.append('{0}={1}'.format(key, val))
else:
_args.append(arg)
if invalid_kwargs and not ignore_invalid:
salt.utils.args.invalid_kwargs(invalid_kwargs)
if argspec.keywords and isinstance(data, dict):
# this function accepts **kwargs, pack in the publish data
for key, val in six.iteritems(data):
_kwargs['__pub_{0}'.format(key)] = val
return _args, _kwargs
def eval_master_func(opts):
'''
Evaluate master function if master type is 'func'
and save it result in opts['master']
'''
if '__master_func_evaluated' not in opts:
# split module and function and try loading the module
mod_fun = opts['master']
mod, fun = mod_fun.split('.')
try:
master_mod = salt.loader.raw_mod(opts, mod, fun)
if not master_mod:
raise KeyError
# we take whatever the module returns as master address
opts['master'] = master_mod[mod_fun]()
# Check for valid types
if not isinstance(opts['master'], (six.string_types, list)):
raise TypeError
opts['__master_func_evaluated'] = True
except KeyError:
log.error('Failed to load module %s', mod_fun)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
except TypeError:
log.error('%s returned from %s is not a string', opts['master'], mod_fun)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
log.info('Evaluated master from module: %s', mod_fun)
def master_event(type, master=None):
'''
Centralized master event function which will return event type based on event_map
'''
event_map = {'connected': '__master_connected',
'disconnected': '__master_disconnected',
'failback': '__master_failback',
'alive': '__master_alive'}
if type == 'alive' and master is not None:
return '{0}_{1}'.format(event_map.get(type), master)
return event_map.get(type, None)
class MinionBase(object):
def __init__(self, opts):
self.opts = opts
@staticmethod
def process_schedule(minion, loop_interval):
try:
if hasattr(minion, 'schedule'):
minion.schedule.eval()
else:
log.error('Minion scheduler not initialized. Scheduled jobs will not be run.')
return
# Check if scheduler requires lower loop interval than
# the loop_interval setting
if minion.schedule.loop_interval < loop_interval:
loop_interval = minion.schedule.loop_interval
log.debug(
'Overriding loop_interval because of scheduled jobs.'
)
except Exception as exc:
log.error('Exception %s occurred in scheduled job', exc)
return loop_interval
def process_beacons(self, functions):
'''
Evaluate all of the configured beacons, grab the config again in case
the pillar or grains changed
'''
if 'config.merge' in functions:
b_conf = functions['config.merge']('beacons', self.opts['beacons'], omit_opts=True)
if b_conf:
return self.beacons.process(b_conf, self.opts['grains']) # pylint: disable=no-member
return []
@tornado.gen.coroutine
def eval_master(self,
opts,
timeout=60,
safe=True,
failed=False,
failback=False):
'''
Evaluates and returns a tuple of the current master address and the pub_channel.
In standard mode, just creates a pub_channel with the given master address.
With master_type=func evaluates the current master address from the given
module and then creates a pub_channel.
With master_type=failover takes the list of masters and loops through them.
The first one that allows the minion to create a pub_channel is then
returned. If this function is called outside the minions initialization
phase (for example from the minions main event-loop when a master connection
loss was detected), 'failed' should be set to True. The current
(possibly failed) master will then be removed from the list of masters.
'''
# return early if we are not connecting to a master
if opts['master_type'] == 'disable':
log.warning('Master is set to disable, skipping connection')
self.connected = False
raise tornado.gen.Return((None, None))
# Run masters discovery over SSDP. This may modify the whole configuration,
# depending of the networking and sets of masters.
self._discover_masters()
# check if master_type was altered from its default
if opts['master_type'] != 'str' and opts['__role'] != 'syndic':
# check for a valid keyword
if opts['master_type'] == 'func':
eval_master_func(opts)
# if failover or distributed is set, master has to be of type list
elif opts['master_type'] in ('failover', 'distributed'):
if isinstance(opts['master'], list):
log.info(
'Got list of available master addresses: %s',
opts['master']
)
if opts['master_type'] == 'distributed':
master_len = len(opts['master'])
if master_len > 1:
secondary_masters = opts['master'][1:]
master_idx = crc32(opts['id']) % master_len
try:
preferred_masters = opts['master']
preferred_masters[0] = opts['master'][master_idx]
preferred_masters[1:] = [m for m in opts['master'] if m != preferred_masters[0]]
opts['master'] = preferred_masters
log.info('Distributed to the master at \'{0}\'.'.format(opts['master'][0]))
except (KeyError, AttributeError, TypeError):
log.warning('Failed to distribute to a specific master.')
else:
log.warning('master_type = distributed needs more than 1 master.')
if opts['master_shuffle']:
log.warning(
'Use of \'master_shuffle\' detected. \'master_shuffle\' is deprecated in favor '
'of \'random_master\'. Please update your minion config file.'
)
opts['random_master'] = opts['master_shuffle']
opts['auth_tries'] = 0
if opts['master_failback'] and opts['master_failback_interval'] == 0:
opts['master_failback_interval'] = opts['master_alive_interval']
# if opts['master'] is a str and we have never created opts['master_list']
elif isinstance(opts['master'], six.string_types) and ('master_list' not in opts):
# We have a string, but a list was what was intended. Convert.
# See issue 23611 for details
opts['master'] = [opts['master']]
elif opts['__role'] == 'syndic':
log.info('Syndic setting master_syndic to \'%s\'', opts['master'])
# if failed=True, the minion was previously connected
# we're probably called from the minions main-event-loop
# because a master connection loss was detected. remove
# the possibly failed master from the list of masters.
elif failed:
if failback:
# failback list of masters to original config
opts['master'] = opts['master_list']
else:
log.info(
'Moving possibly failed master %s to the end of '
'the list of masters', opts['master']
)
if opts['master'] in opts['local_masters']:
# create new list of master with the possibly failed
# one moved to the end
failed_master = opts['master']
opts['master'] = [x for x in opts['local_masters'] if opts['master'] != x]
opts['master'].append(failed_master)
else:
opts['master'] = opts['master_list']
else:
msg = ('master_type set to \'failover\' but \'master\' '
'is not of type list but of type '
'{0}'.format(type(opts['master'])))
log.error(msg)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# If failover is set, minion have to failover on DNS errors instead of retry DNS resolve.
# See issue 21082 for details
if opts['retry_dns'] and opts['master_type'] == 'failover':
msg = ('\'master_type\' set to \'failover\' but \'retry_dns\' is not 0. '
'Setting \'retry_dns\' to 0 to failover to the next master on DNS errors.')
log.critical(msg)
opts['retry_dns'] = 0
else:
msg = ('Invalid keyword \'{0}\' for variable '
'\'master_type\''.format(opts['master_type']))
log.error(msg)
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
# FIXME: if SMinion don't define io_loop, it can't switch master see #29088
# Specify kwargs for the channel factory so that SMinion doesn't need to define an io_loop
# (The channel factories will set a default if the kwarg isn't passed)
factory_kwargs = {'timeout': timeout, 'safe': safe}
if getattr(self, 'io_loop', None):
factory_kwargs['io_loop'] = self.io_loop # pylint: disable=no-member
tries = opts.get('master_tries', 1)
attempts = 0
# if we have a list of masters, loop through them and be
# happy with the first one that allows us to connect
if isinstance(opts['master'], list):
conn = False
last_exc = None
opts['master_uri_list'] = []
opts['local_masters'] = copy.copy(opts['master'])
# shuffle the masters and then loop through them
if opts['random_master']:
# master_failback is only used when master_type is set to failover
if opts['master_type'] == 'failover' and opts['master_failback']:
secondary_masters = opts['local_masters'][1:]
shuffle(secondary_masters)
opts['local_masters'][1:] = secondary_masters
else:
shuffle(opts['local_masters'])
# This sits outside of the connection loop below because it needs to set
# up a list of master URIs regardless of which masters are available
# to connect _to_. This is primarily used for masterless mode, when
# we need a list of master URIs to fire calls back to.
for master in opts['local_masters']:
opts['master'] = master
opts.update(prep_ip_port(opts))
opts['master_uri_list'].append(resolve_dns(opts)['master_uri'])
while True:
if attempts != 0:
# Give up a little time between connection attempts
# to allow the IOLoop to run any other scheduled tasks.
yield tornado.gen.sleep(opts['acceptance_wait_time'])
attempts += 1
if tries > 0:
log.debug(
'Connecting to master. Attempt %s of %s',
attempts, tries
)
else:
log.debug(
'Connecting to master. Attempt %s (infinite attempts)',
attempts
)
for master in opts['local_masters']:
opts['master'] = master
opts.update(prep_ip_port(opts))
opts.update(resolve_dns(opts))
# on first run, update self.opts with the whole master list
# to enable a minion to re-use old masters if they get fixed
if 'master_list' not in opts:
opts['master_list'] = copy.copy(opts['local_masters'])
self.opts = opts
try:
pub_channel = salt.transport.client.AsyncPubChannel.factory(opts, **factory_kwargs)
yield pub_channel.connect()
conn = True
break
except SaltClientError as exc:
last_exc = exc
if exc.strerror.startswith('Could not access'):
msg = (
'Failed to initiate connection with Master '
'%s: check ownership/permissions. Error '
'message: %s', opts['master'], exc
)
else:
msg = ('Master %s could not be reached, trying next '
'next master (if any)', opts['master'])
log.info(msg)
continue
if not conn:
if attempts == tries:
# Exhausted all attempts. Return exception.
self.connected = False
self.opts['master'] = copy.copy(self.opts['local_masters'])
log.error(
'No master could be reached or all masters '
'denied the minion\'s connection attempt.'
)
# If the code reaches this point, 'last_exc'
# should already be set.
raise last_exc # pylint: disable=E0702
else:
self.tok = pub_channel.auth.gen_token(b'salt')
self.connected = True
raise tornado.gen.Return((opts['master'], pub_channel))
# single master sign in
else:
if opts['random_master']:
log.warning('random_master is True but there is only one master specified. Ignoring.')
while True:
if attempts != 0:
# Give up a little time between connection attempts
# to allow the IOLoop to run any other scheduled tasks.
yield tornado.gen.sleep(opts['acceptance_wait_time'])
attempts += 1
if tries > 0:
log.debug(
'Connecting to master. Attempt %s of %s',
attempts, tries
)
else:
log.debug(
'Connecting to master. Attempt %s (infinite attempts)',
attempts
)
opts.update(prep_ip_port(opts))
opts.update(resolve_dns(opts))
try:
if self.opts['transport'] == 'detect':
self.opts['detect_mode'] = True
for trans in ('zeromq', 'tcp'):
if trans == 'zeromq' and not zmq:
continue
self.opts['transport'] = trans
pub_channel = salt.transport.client.AsyncPubChannel.factory(self.opts, **factory_kwargs)
yield pub_channel.connect()
if not pub_channel.auth.authenticated:
continue
del self.opts['detect_mode']
break
else:
pub_channel = salt.transport.client.AsyncPubChannel.factory(self.opts, **factory_kwargs)
yield pub_channel.connect()
self.tok = pub_channel.auth.gen_token(b'salt')
self.connected = True
raise tornado.gen.Return((opts['master'], pub_channel))
except SaltClientError as exc:
if attempts == tries:
# Exhausted all attempts. Return exception.
self.connected = False
raise exc
def _discover_masters(self):
'''
Discover master(s) and decide where to connect, if SSDP is around.
This modifies the configuration on the fly.
:return:
'''
if self.opts['master'] == DEFAULT_MINION_OPTS['master'] and self.opts['discovery'] is not False:
master_discovery_client = salt.utils.ssdp.SSDPDiscoveryClient()
masters = {}
for att in range(self.opts['discovery'].get('attempts', 3)):
try:
att += 1
log.info('Attempting {0} time{1} to discover masters'.format(att, (att > 1 and 's' or '')))
masters.update(master_discovery_client.discover())
if not masters:
time.sleep(self.opts['discovery'].get('pause', 5))
else:
break
except Exception as err:
log.error('SSDP discovery failure: {0}'.format(err))
break
if masters:
policy = self.opts.get('discovery', {}).get('match', 'any')
if policy not in ['any', 'all']:
log.error('SSDP configuration matcher failure: unknown value "{0}". '
'Should be "any" or "all"'.format(policy))
else:
mapping = self.opts['discovery'].get('mapping', {})
for addr, mappings in masters.items():
for proto_data in mappings:
cnt = len([key for key, value in mapping.items()
if proto_data.get('mapping', {}).get(key) == value])
if policy == 'any' and bool(cnt) or cnt == len(mapping):
self.opts['master'] = proto_data['master']
return
def _return_retry_timer(self):
'''
Based on the minion configuration, either return a randomized timer or
just return the value of the return_retry_timer.
'''
msg = 'Minion return retry timer set to {0} seconds'
# future lint: disable=str-format-in-logging
if self.opts.get('return_retry_timer_max'):
try:
random_retry = randint(self.opts['return_retry_timer'], self.opts['return_retry_timer_max'])
log.debug(msg.format(random_retry) + ' (randomized)')
return random_retry
except ValueError:
# Catch wiseguys using negative integers here
log.error(
'Invalid value (return_retry_timer: %s or '
'return_retry_timer_max: %s). Both must be positive '
'integers.',
self.opts['return_retry_timer'],
self.opts['return_retry_timer_max'],
)
log.debug(msg.format(DEFAULT_MINION_OPTS['return_retry_timer']))
return DEFAULT_MINION_OPTS['return_retry_timer']
else:
log.debug(msg.format(self.opts.get('return_retry_timer')))
return self.opts.get('return_retry_timer')
# future lint: enable=str-format-in-logging
class SMinion(MinionBase):
'''
Create an object that has loaded all of the minion module functions,
grains, modules, returners etc. The SMinion allows developers to
generate all of the salt minion functions and present them with these
functions for general use.
'''
def __init__(self, opts):
# Late setup of the opts grains, so we can log from the grains module
import salt.loader
opts['grains'] = salt.loader.grains(opts)
super(SMinion, self).__init__(opts)
# Clean out the proc directory (default /var/cache/salt/minion/proc)
if (self.opts.get('file_client', 'remote') == 'remote'
or self.opts.get('use_master_when_local', False)):
install_zmq()
io_loop = ZMQDefaultLoop.current()
io_loop.run_sync(
lambda: self.eval_master(self.opts, failed=True)
)
self.gen_modules(initial_load=True)
# If configured, cache pillar data on the minion
if self.opts['file_client'] == 'remote' and self.opts.get('minion_pillar_cache', False):
import salt.utils.yaml
pdir = os.path.join(self.opts['cachedir'], 'pillar')
if not os.path.isdir(pdir):
os.makedirs(pdir, 0o700)
ptop = os.path.join(pdir, 'top.sls')
if self.opts['saltenv'] is not None:
penv = self.opts['saltenv']
else:
penv = 'base'
cache_top = {penv: {self.opts['id']: ['cache']}}
with salt.utils.files.fopen(ptop, 'wb') as fp_:
salt.utils.yaml.safe_dump(cache_top, fp_)
os.chmod(ptop, 0o600)
cache_sls = os.path.join(pdir, 'cache.sls')
with salt.utils.files.fopen(cache_sls, 'wb') as fp_:
salt.utils.yaml.safe_dump(self.opts['pillar'], fp_)
os.chmod(cache_sls, 0o600)
def gen_modules(self, initial_load=False):
'''
Tell the minion to reload the execution modules
CLI Example:
.. code-block:: bash
salt '*' sys.reload_modules
'''
self.opts['pillar'] = salt.pillar.get_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['saltenv'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
self.utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils)
self.serializers = salt.loader.serializers(self.opts)
self.returners = salt.loader.returners(self.opts, self.functions)
self.proxy = salt.loader.proxy(self.opts, self.functions, self.returners, None)
# TODO: remove
self.function_errors = {} # Keep the funcs clean
self.states = salt.loader.states(self.opts,
self.functions,
self.utils,
self.serializers)
self.rend = salt.loader.render(self.opts, self.functions)
self.matcher = Matcher(self.opts, self.functions)
self.functions['sys.reload_modules'] = self.gen_modules
self.executors = salt.loader.executors(self.opts)
class MasterMinion(object):
'''
Create a fully loaded minion function object for generic use on the
master. What makes this class different is that the pillar is
omitted, otherwise everything else is loaded cleanly.
'''
def __init__(
self,
opts,
returners=True,
states=True,
rend=True,
matcher=True,
whitelist=None,
ignore_config_errors=True):
self.opts = salt.config.minion_config(
opts['conf_file'],
ignore_config_errors=ignore_config_errors,
role='master'
)
self.opts.update(opts)
self.whitelist = whitelist
self.opts['grains'] = salt.loader.grains(opts)
self.opts['pillar'] = {}
self.mk_returners = returners
self.mk_states = states
self.mk_rend = rend
self.mk_matcher = matcher
self.gen_modules(initial_load=True)
def gen_modules(self, initial_load=False):
'''
Tell the minion to reload the execution modules
CLI Example:
.. code-block:: bash
salt '*' sys.reload_modules
'''
self.utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(
self.opts,
utils=self.utils,
whitelist=self.whitelist,
initial_load=initial_load)
self.serializers = salt.loader.serializers(self.opts)
if self.mk_returners:
self.returners = salt.loader.returners(self.opts, self.functions)
if self.mk_states:
self.states = salt.loader.states(self.opts,
self.functions,
self.utils,
self.serializers)
if self.mk_rend:
self.rend = salt.loader.render(self.opts, self.functions)
if self.mk_matcher:
self.matcher = Matcher(self.opts, self.functions)
self.functions['sys.reload_modules'] = self.gen_modules
class MinionManager(MinionBase):
'''
Create a multi minion interface, this creates as many minions as are
defined in the master option and binds each minion object to a respective
master.
'''
def __init__(self, opts):
super(MinionManager, self).__init__(opts)
self.auth_wait = self.opts['acceptance_wait_time']
self.max_auth_wait = self.opts['acceptance_wait_time_max']
self.minions = []
self.jid_queue = []
install_zmq()
self.io_loop = ZMQDefaultLoop.current()
self.process_manager = ProcessManager(name='MultiMinionProcessManager')
self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True}) # Tornado backward compat
def __del__(self):
self.destroy()
def _bind(self):
# start up the event publisher, so we can see events during startup
self.event_publisher = salt.utils.event.AsyncEventPublisher(
self.opts,
io_loop=self.io_loop,
)
self.event = salt.utils.event.get_event('minion', opts=self.opts, io_loop=self.io_loop)
self.event.subscribe('')
self.event.set_event_handler(self.handle_event)
@tornado.gen.coroutine
def handle_event(self, package):
yield [minion.handle_event(package) for minion in self.minions]
def _create_minion_object(self, opts, timeout, safe,
io_loop=None, loaded_base_name=None,
jid_queue=None):
'''
Helper function to return the correct type of object
'''
return Minion(opts,
timeout,
safe,
io_loop=io_loop,
loaded_base_name=loaded_base_name,
jid_queue=jid_queue)
def _spawn_minions(self):
'''
Spawn all the coroutines which will sign in to masters
'''
masters = self.opts['master']
if (self.opts['master_type'] in ('failover', 'distributed')) or not isinstance(self.opts['master'], list):
masters = [masters]
for master in masters:
s_opts = copy.deepcopy(self.opts)
s_opts['master'] = master
s_opts['multimaster'] = True
minion = self._create_minion_object(s_opts,
s_opts['auth_timeout'],
False,
io_loop=self.io_loop,
loaded_base_name='salt.loader.{0}'.format(s_opts['master']),
jid_queue=self.jid_queue,
)
self.minions.append(minion)
self.io_loop.spawn_callback(self._connect_minion, minion)
@tornado.gen.coroutine
def _connect_minion(self, minion):
'''
Create a minion, and asynchronously connect it to a master
'''
last = 0 # never have we signed in
auth_wait = minion.opts['acceptance_wait_time']
failed = False
while True:
try:
if minion.opts.get('beacons_before_connect', False):
minion.setup_beacons(before_connect=True)
if minion.opts.get('scheduler_before_connect', False):
minion.setup_scheduler(before_connect=True)
yield minion.connect_master(failed=failed)
minion.tune_in(start=False)
break
except SaltClientError as exc:
failed = True
log.error(
'Error while bringing up minion for multi-master. Is '
'master at %s responding?', minion.opts['master']
)
last = time.time()
if auth_wait < self.max_auth_wait:
auth_wait += self.auth_wait
yield tornado.gen.sleep(auth_wait) # TODO: log?
except Exception as e:
failed = True
log.critical(
'Unexpected error while connecting to %s',
minion.opts['master'], exc_info=True
)
# Multi Master Tune In
def tune_in(self):
'''
Bind to the masters
This loop will attempt to create connections to masters it hasn't connected
to yet, but once the initial connection is made it is up to ZMQ to do the
reconnect (don't know of an API to get the state here in salt)
'''
self._bind()
# Fire off all the minion coroutines
self._spawn_minions()
# serve forever!
self.io_loop.start()
@property
def restart(self):
for minion in self.minions:
if minion.restart:
return True
return False
def stop(self, signum):
for minion in self.minions:
minion.process_manager.stop_restarting()
minion.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
minion.process_manager.kill_children()
minion.destroy()
def destroy(self):
for minion in self.minions:
minion.destroy()
class Minion(MinionBase):
'''
This class instantiates a minion, runs connections for a minion,
and loads all of the functions into the minion
'''
def __init__(self, opts, timeout=60, safe=True, loaded_base_name=None, io_loop=None, jid_queue=None): # pylint: disable=W0231
'''
Pass in the options dict
'''
# this means that the parent class doesn't know *which* master we connect to
super(Minion, self).__init__(opts)
self.timeout = timeout
self.safe = safe
self._running = None
self.win_proc = []
self.loaded_base_name = loaded_base_name
self.connected = False
self.restart = False
# Flag meaning minion has finished initialization including first connect to the master.
# True means the Minion is fully functional and ready to handle events.
self.ready = False
self.jid_queue = [] if jid_queue is None else jid_queue
self.periodic_callbacks = {}
if io_loop is None:
install_zmq()
self.io_loop = ZMQDefaultLoop.current()
else:
self.io_loop = io_loop
# Warn if ZMQ < 3.2
if zmq:
if ZMQ_VERSION_INFO < (3, 2):
log.warning(
'You have a version of ZMQ less than ZMQ 3.2! There are '
'known connection keep-alive issues with ZMQ < 3.2 which '
'may result in loss of contact with minions. Please '
'upgrade your ZMQ!'
)
# Late setup of the opts grains, so we can log from the grains
# module. If this is a proxy, however, we need to init the proxymodule
# before we can get the grains. We do this for proxies in the
# post_master_init
if not salt.utils.platform.is_proxy():
self.opts['grains'] = salt.loader.grains(opts)
else:
if self.opts.get('beacons_before_connect', False):
log.warning(
'\'beacons_before_connect\' is not supported '
'for proxy minions. Setting to False'
)
self.opts['beacons_before_connect'] = False
if self.opts.get('scheduler_before_connect', False):
log.warning(
'\'scheduler_before_connect\' is not supported '
'for proxy minions. Setting to False'
)
self.opts['scheduler_before_connect'] = False
log.info('Creating minion process manager')
if self.opts['random_startup_delay']:
sleep_time = random.randint(0, self.opts['random_startup_delay'])
log.info(
'Minion sleeping for %s seconds due to configured '
'startup_delay between 0 and %s seconds',
sleep_time, self.opts['random_startup_delay']
)
time.sleep(sleep_time)
self.process_manager = ProcessManager(name='MinionProcessManager')
self.io_loop.spawn_callback(self.process_manager.run, **{'asynchronous': True})
# We don't have the proxy setup yet, so we can't start engines
# Engines need to be able to access __proxy__
if not salt.utils.platform.is_proxy():
self.io_loop.spawn_callback(salt.engines.start_engines, self.opts,
self.process_manager)
# Install the SIGINT/SIGTERM handlers if not done so far
if signal.getsignal(signal.SIGINT) is signal.SIG_DFL:
# No custom signal handling was added, install our own
signal.signal(signal.SIGINT, self._handle_signals)
if signal.getsignal(signal.SIGTERM) is signal.SIG_DFL:
# No custom signal handling was added, install our own
signal.signal(signal.SIGTERM, self._handle_signals)
def _handle_signals(self, signum, sigframe): # pylint: disable=unused-argument
self._running = False
# escalate the signals to the process manager
self.process_manager.stop_restarting()
self.process_manager.send_signal_to_processes(signum)
# kill any remaining processes
self.process_manager.kill_children()
time.sleep(1)
sys.exit(0)
def sync_connect_master(self, timeout=None, failed=False):
'''
Block until we are connected to a master
'''
self._sync_connect_master_success = False
log.debug("sync_connect_master")
def on_connect_master_future_done(future):
self._sync_connect_master_success = True
self.io_loop.stop()
self._connect_master_future = self.connect_master(failed=failed)
# finish connecting to master
self._connect_master_future.add_done_callback(on_connect_master_future_done)
if timeout:
self.io_loop.call_later(timeout, self.io_loop.stop)
try:
self.io_loop.start()
except KeyboardInterrupt:
self.destroy()
# I made the following 3 line oddity to preserve traceback.
# Please read PR #23978 before changing, hopefully avoiding regressions.
# Good luck, we're all counting on you. Thanks.
if self._connect_master_future.done():
future_exception = self._connect_master_future.exception()
if future_exception:
# This needs to be re-raised to preserve restart_on_error behavior.
raise six.reraise(*future_exception)
if timeout and self._sync_connect_master_success is False:
raise SaltDaemonNotRunning('Failed to connect to the salt-master')
@tornado.gen.coroutine
def connect_master(self, failed=False):
'''
Return a future which will complete when you are connected to a master
'''
master, self.pub_channel = yield self.eval_master(self.opts, self.timeout, self.safe, failed)
yield self._post_master_init(master)
# TODO: better name...
@tornado.gen.coroutine
def _post_master_init(self, master):
'''
Function to finish init after connecting to a master
This is primarily loading modules, pillars, etc. (since they need
to know which master they connected to)
If this function is changed, please check ProxyMinion._post_master_init
to see if those changes need to be propagated.
Minions and ProxyMinions need significantly different post master setups,
which is why the differences are not factored out into separate helper
functions.
'''
if self.connected:
self.opts['master'] = master
# Initialize pillar before loader to make pillar accessible in modules
self.opts['pillar'] = yield salt.pillar.get_async_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['saltenv'],
pillarenv=self.opts.get('pillarenv')
).compile_pillar()
if not self.ready:
self._setup_core()
elif self.connected and self.opts['pillar']:
# The pillar has changed due to the connection to the master.
# Reload the functions so that they can use the new pillar data.
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
if hasattr(self, 'schedule'):
self.schedule.functions = self.functions
self.schedule.returners = self.returners
if not hasattr(self, 'schedule'):
self.schedule = salt.utils.schedule.Schedule(
self.opts,
self.functions,
self.returners,
cleanup=[master_event(type='alive')])
# add default scheduling jobs to the minions scheduler
if self.opts['mine_enabled'] and 'mine.update' in self.functions:
self.schedule.add_job({
'__mine_interval':
{
'function': 'mine.update',
'minutes': self.opts['mine_interval'],
'jid_include': True,
'maxrunning': 2,
'run_on_start': True,
'return_job': self.opts.get('mine_return_job', False)
}
}, persist=True)
log.info('Added mine.update to scheduler')
else:
self.schedule.delete_job('__mine_interval', persist=True)
# add master_alive job if enabled
if (self.opts['transport'] != 'tcp' and
self.opts['master_alive_interval'] > 0 and
self.connected):
self.schedule.add_job({
master_event(type='alive', master=self.opts['master']):
{
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
}, persist=True)
if self.opts['master_failback'] and \
'master_list' in self.opts and \
self.opts['master'] != self.opts['master_list'][0]:
self.schedule.add_job({
master_event(type='failback'):
{
'function': 'status.ping_master',
'seconds': self.opts['master_failback_interval'],
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master_list'][0]}
}
}, persist=True)
else:
self.schedule.delete_job(master_event(type='failback'), persist=True)
else:
self.schedule.delete_job(master_event(type='alive', master=self.opts['master']), persist=True)
self.schedule.delete_job(master_event(type='failback'), persist=True)
def _prep_mod_opts(self):
'''
Returns a copy of the opts with key bits stripped out
'''
mod_opts = {}
for key, val in six.iteritems(self.opts):
if key == 'logger':
continue
mod_opts[key] = val
return mod_opts
def _load_modules(self, force_refresh=False, notify=False, grains=None):
'''
Return the functions and the returners loaded up from the loader
module
'''
# if this is a *nix system AND modules_max_memory is set, lets enforce
# a memory limit on module imports
# this feature ONLY works on *nix like OSs (resource module doesn't work on windows)
modules_max_memory = False
if self.opts.get('modules_max_memory', -1) > 0 and HAS_PSUTIL and HAS_RESOURCE:
log.debug(
'modules_max_memory set, enforcing a maximum of %s',
self.opts['modules_max_memory']
)
modules_max_memory = True
old_mem_limit = resource.getrlimit(resource.RLIMIT_AS)
rss, vms = psutil.Process(os.getpid()).memory_info()[:2]
mem_limit = rss + vms + self.opts['modules_max_memory']
resource.setrlimit(resource.RLIMIT_AS, (mem_limit, mem_limit))
elif self.opts.get('modules_max_memory', -1) > 0:
if not HAS_PSUTIL:
log.error('Unable to enforce modules_max_memory because psutil is missing')
if not HAS_RESOURCE:
log.error('Unable to enforce modules_max_memory because resource is missing')
# This might be a proxy minion
if hasattr(self, 'proxy'):
proxy = self.proxy
else:
proxy = None
if grains is None:
self.opts['grains'] = salt.loader.grains(self.opts, force_refresh, proxy=proxy)
self.utils = salt.loader.utils(self.opts, proxy=proxy)
if self.opts.get('multimaster', False):
s_opts = copy.deepcopy(self.opts)
functions = salt.loader.minion_mods(s_opts, utils=self.utils, proxy=proxy,
loaded_base_name=self.loaded_base_name, notify=notify)
else:
functions = salt.loader.minion_mods(self.opts, utils=self.utils, notify=notify, proxy=proxy)
returners = salt.loader.returners(self.opts, functions, proxy=proxy)
errors = {}
if '_errors' in functions:
errors = functions['_errors']
functions.pop('_errors')
# we're done, reset the limits!
if modules_max_memory is True:
resource.setrlimit(resource.RLIMIT_AS, old_mem_limit)
executors = salt.loader.executors(self.opts, functions, proxy=proxy)
return functions, returners, errors, executors
def _send_req_sync(self, load, timeout):
if self.opts['minion_sign_messages']:
log.trace('Signing event to be published onto the bus.')
minion_privkey_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
sig = salt.crypt.sign_message(minion_privkey_path, salt.serializers.msgpack.serialize(load))
load['sig'] = sig
channel = salt.transport.Channel.factory(self.opts)
return channel.send(load, timeout=timeout)
@tornado.gen.coroutine
def _send_req_async(self, load, timeout):
if self.opts['minion_sign_messages']:
log.trace('Signing event to be published onto the bus.')
minion_privkey_path = os.path.join(self.opts['pki_dir'], 'minion.pem')
sig = salt.crypt.sign_message(minion_privkey_path, salt.serializers.msgpack.serialize(load))
load['sig'] = sig
channel = salt.transport.client.AsyncReqChannel.factory(self.opts)
ret = yield channel.send(load, timeout=timeout)
raise tornado.gen.Return(ret)
def _fire_master(self, data=None, tag=None, events=None, pretag=None, timeout=60, sync=True, timeout_handler=None):
'''
Fire an event on the master, or drop message if unable to send.
'''
load = {'id': self.opts['id'],
'cmd': '_minion_event',
'pretag': pretag,
'tok': self.tok}
if events:
load['events'] = events
elif data and tag:
load['data'] = data
load['tag'] = tag
elif not data and tag:
load['data'] = {}
load['tag'] = tag
else:
return
if sync:
try:
self._send_req_sync(load, timeout)
except salt.exceptions.SaltReqTimeoutError:
log.info('fire_master failed: master could not be contacted. Request timed out.')
# very likely one of the masters is dead, status.master will flush it
self.functions['status.master'](self.opts['master'])
return False
except Exception:
log.info('fire_master failed: %s', traceback.format_exc())
return False
else:
if timeout_handler is None:
def handle_timeout(*_):
log.info('fire_master failed: master could not be contacted. Request timed out.')
# very likely one of the masters is dead, status.master will flush it
self.functions['status.master'](self.opts['master'])
return True
timeout_handler = handle_timeout
with tornado.stack_context.ExceptionStackContext(timeout_handler):
self._send_req_async(load, timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
return True
@tornado.gen.coroutine
def _handle_decoded_payload(self, data):
'''
Override this method if you wish to handle the decoded data
differently.
'''
# Ensure payload is unicode. Disregard failure to decode binary blobs.
if six.PY2:
data = salt.utils.data.decode(data, keep=True)
if 'user' in data:
log.info(
'User %s Executing command %s with jid %s',
data['user'], data['fun'], data['jid']
)
else:
log.info(
'Executing command %s with jid %s',
data['fun'], data['jid']
)
log.debug('Command details %s', data)
# Don't duplicate jobs
log.trace('Started JIDs: %s', self.jid_queue)
if self.jid_queue is not None:
if data['jid'] in self.jid_queue:
return
else:
self.jid_queue.append(data['jid'])
if len(self.jid_queue) > self.opts['minion_jid_queue_hwm']:
self.jid_queue.pop(0)
if isinstance(data['fun'], six.string_types):
if data['fun'] == 'sys.reload_modules':
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.schedule.functions = self.functions
self.schedule.returners = self.returners
process_count_max = self.opts.get('process_count_max')
if process_count_max > 0:
process_count = len(salt.utils.minion.running(self.opts))
while process_count >= process_count_max:
log.warning("Maximum number of processes reached while executing jid {0}, waiting...".format(data['jid']))
yield tornado.gen.sleep(10)
process_count = len(salt.utils.minion.running(self.opts))
# We stash an instance references to allow for the socket
# communication in Windows. You can't pickle functions, and thus
# python needs to be able to reconstruct the reference on the other
# side.
instance = self
multiprocessing_enabled = self.opts.get('multiprocessing', True)
if multiprocessing_enabled:
if sys.platform.startswith('win'):
# let python reconstruct the minion on the other side if we're
# running on windows
instance = None
with default_signals(signal.SIGINT, signal.SIGTERM):
process = SignalHandlingMultiprocessingProcess(
target=self._target, args=(instance, self.opts, data, self.connected)
)
else:
process = threading.Thread(
target=self._target,
args=(instance, self.opts, data, self.connected),
name=data['jid']
)
if multiprocessing_enabled:
with default_signals(signal.SIGINT, signal.SIGTERM):
# Reset current signals before starting the process in
# order not to inherit the current signal handlers
process.start()
else:
process.start()
# TODO: remove the windows specific check?
if multiprocessing_enabled and not salt.utils.platform.is_windows():
# we only want to join() immediately if we are daemonizing a process
process.join()
else:
self.win_proc.append(process)
def ctx(self):
'''
Return a single context manager for the minion's data
'''
if six.PY2:
return contextlib.nested(
self.functions.context_dict.clone(),
self.returners.context_dict.clone(),
self.executors.context_dict.clone(),
)
else:
exitstack = contextlib.ExitStack()
exitstack.enter_context(self.functions.context_dict.clone())
exitstack.enter_context(self.returners.context_dict.clone())
exitstack.enter_context(self.executors.context_dict.clone())
return exitstack
@classmethod
def _target(cls, minion_instance, opts, data, connected):
if not minion_instance:
minion_instance = cls(opts)
minion_instance.connected = connected
if not hasattr(minion_instance, 'functions'):
functions, returners, function_errors, executors = (
minion_instance._load_modules(grains=opts['grains'])
)
minion_instance.functions = functions
minion_instance.returners = returners
minion_instance.function_errors = function_errors
minion_instance.executors = executors
if not hasattr(minion_instance, 'serial'):
minion_instance.serial = salt.payload.Serial(opts)
if not hasattr(minion_instance, 'proc_dir'):
uid = salt.utils.user.get_uid(user=opts.get('user', None))
minion_instance.proc_dir = (
get_proc_dir(opts['cachedir'], uid=uid)
)
def run_func(minion_instance, opts, data):
if isinstance(data['fun'], tuple) or isinstance(data['fun'], list):
return Minion._thread_multi_return(minion_instance, opts, data)
else:
return Minion._thread_return(minion_instance, opts, data)
with tornado.stack_context.StackContext(functools.partial(RequestContext,
{'data': data, 'opts': opts})):
with tornado.stack_context.StackContext(minion_instance.ctx):
run_func(minion_instance, opts, data)
@classmethod
def _thread_return(cls, minion_instance, opts, data):
'''
This method should be used as a threading target, start the actual
minion side execution.
'''
fn_ = os.path.join(minion_instance.proc_dir, data['jid'])
if opts['multiprocessing'] and not salt.utils.platform.is_windows():
# Shutdown the multiprocessing before daemonizing
salt.log.setup.shutdown_multiprocessing_logging()
salt.utils.process.daemonize_if(opts)
# Reconfigure multiprocessing logging after daemonizing
salt.log.setup.setup_multiprocessing_logging()
salt.utils.process.appendproctitle('{0}._thread_return {1}'.format(cls.__name__, data['jid']))
sdata = {'pid': os.getpid()}
sdata.update(data)
log.info('Starting a new job with PID %s', sdata['pid'])
with salt.utils.files.fopen(fn_, 'w+b') as fp_:
fp_.write(minion_instance.serial.dumps(sdata))
ret = {'success': False}
function_name = data['fun']
executors = data.get('module_executors') or \
getattr(minion_instance, 'module_executors', []) or \
opts.get('module_executors', ['direct_call'])
allow_missing_funcs = any([
minion_instance.executors['{0}.allow_missing_func'.format(executor)](function_name)
for executor in executors
if '{0}.allow_missing_func' in minion_instance.executors
])
if function_name in minion_instance.functions or allow_missing_funcs is True:
try:
minion_blackout_violation = False
if minion_instance.connected and minion_instance.opts['pillar'].get('minion_blackout', False):
whitelist = minion_instance.opts['pillar'].get('minion_blackout_whitelist', [])
# this minion is blacked out. Only allow saltutil.refresh_pillar and the whitelist
if function_name != 'saltutil.refresh_pillar' and function_name not in whitelist:
minion_blackout_violation = True
# use minion_blackout_whitelist from grains if it exists
if minion_instance.opts['grains'].get('minion_blackout', False):
whitelist = minion_instance.opts['grains'].get('minion_blackout_whitelist', [])
if function_name != 'saltutil.refresh_pillar' and function_name not in whitelist:
minion_blackout_violation = True
if minion_blackout_violation:
raise SaltInvocationError('Minion in blackout mode. Set \'minion_blackout\' '
'to False in pillar or grains to resume operations. Only '
'saltutil.refresh_pillar allowed in blackout mode.')
if function_name in minion_instance.functions:
func = minion_instance.functions[function_name]
args, kwargs = load_args_and_kwargs(
func,
data['arg'],
data)
else:
# only run if function_name is not in minion_instance.functions and allow_missing_funcs is True
func = function_name
args, kwargs = data['arg'], data
minion_instance.functions.pack['__context__']['retcode'] = 0
if isinstance(executors, six.string_types):
executors = [executors]
elif not isinstance(executors, list) or not executors:
raise SaltInvocationError("Wrong executors specification: {0}. String or non-empty list expected".
format(executors))
if opts.get('sudo_user', '') and executors[-1] != 'sudo':
executors[-1] = 'sudo' # replace the last one with sudo
log.trace('Executors list %s', executors) # pylint: disable=no-member
for name in executors:
fname = '{0}.execute'.format(name)
if fname not in minion_instance.executors:
raise SaltInvocationError("Executor '{0}' is not available".format(name))
return_data = minion_instance.executors[fname](opts, data, func, args, kwargs)
if return_data is not None:
break
if isinstance(return_data, types.GeneratorType):
ind = 0
iret = {}
for single in return_data:
if isinstance(single, dict) and isinstance(iret, dict):
iret.update(single)
else:
if not iret:
iret = []
iret.append(single)
tag = tagify([data['jid'], 'prog', opts['id'], six.text_type(ind)], 'job')
event_data = {'return': single}
minion_instance._fire_master(event_data, tag)
ind += 1
ret['return'] = iret
else:
ret['return'] = return_data
retcode = minion_instance.functions.pack['__context__'].get(
'retcode',
salt.defaults.exitcodes.EX_OK
)
if retcode == salt.defaults.exitcodes.EX_OK:
# No nonzero retcode in __context__ dunder. Check if return
# is a dictionary with a "result" or "success" key.
try:
func_result = all(return_data.get(x, True)
for x in ('result', 'success'))
except Exception:
# return data is not a dict
func_result = True
if not func_result:
retcode = salt.defaults.exitcodes.EX_GENERIC
ret['retcode'] = retcode
ret['success'] = retcode == salt.defaults.exitcodes.EX_OK
except CommandNotFoundError as exc:
msg = 'Command required for \'{0}\' not found'.format(
function_name
)
log.debug(msg, exc_info=True)
ret['return'] = '{0}: {1}'.format(msg, exc)
ret['out'] = 'nested'
ret['retcode'] = salt.defaults.exitcodes.EX_GENERIC
except CommandExecutionError as exc:
log.error(
'A command in \'%s\' had a problem: %s',
function_name, exc,
exc_info_on_loglevel=logging.DEBUG
)
ret['return'] = 'ERROR: {0}'.format(exc)
ret['out'] = 'nested'
ret['retcode'] = salt.defaults.exitcodes.EX_GENERIC
except SaltInvocationError as exc:
log.error(
'Problem executing \'%s\': %s',
function_name, exc,
exc_info_on_loglevel=logging.DEBUG
)
ret['return'] = 'ERROR executing \'{0}\': {1}'.format(
function_name, exc
)
ret['out'] = 'nested'
ret['retcode'] = salt.defaults.exitcodes.EX_GENERIC
except TypeError as exc:
msg = 'Passed invalid arguments to {0}: {1}\n{2}'.format(
function_name, exc, func.__doc__ or ''
)
log.warning(msg, exc_info_on_loglevel=logging.DEBUG)
ret['return'] = msg
ret['out'] = 'nested'
ret['retcode'] = salt.defaults.exitcodes.EX_GENERIC
except Exception:
msg = 'The minion function caused an exception'
log.warning(msg, exc_info_on_loglevel=True)
salt.utils.error.fire_exception(salt.exceptions.MinionError(msg), opts, job=data)
ret['return'] = '{0}: {1}'.format(msg, traceback.format_exc())
ret['out'] = 'nested'
ret['retcode'] = salt.defaults.exitcodes.EX_GENERIC
else:
docs = minion_instance.functions['sys.doc']('{0}*'.format(function_name))
if docs:
docs[function_name] = minion_instance.functions.missing_fun_string(function_name)
ret['return'] = docs
else:
ret['return'] = minion_instance.functions.missing_fun_string(function_name)
mod_name = function_name.split('.')[0]
if mod_name in minion_instance.function_errors:
ret['return'] += ' Possible reasons: \'{0}\''.format(
minion_instance.function_errors[mod_name]
)
ret['success'] = False
ret['retcode'] = salt.defaults.exitcodes.EX_GENERIC
ret['out'] = 'nested'
ret['jid'] = data['jid']
ret['fun'] = data['fun']
ret['fun_args'] = data['arg']
if 'master_id' in data:
ret['master_id'] = data['master_id']
if 'metadata' in data:
if isinstance(data['metadata'], dict):
ret['metadata'] = data['metadata']
else:
log.warning('The metadata parameter must be a dictionary. Ignoring.')
if minion_instance.connected:
minion_instance._return_pub(
ret,
timeout=minion_instance._return_retry_timer()
)
# Add default returners from minion config
# Should have been coverted to comma-delimited string already
if isinstance(opts.get('return'), six.string_types):
if data['ret']:
data['ret'] = ','.join((data['ret'], opts['return']))
else:
data['ret'] = opts['return']
log.debug('minion return: %s', ret)
# TODO: make a list? Seems odd to split it this late :/
if data['ret'] and isinstance(data['ret'], six.string_types):
if 'ret_config' in data:
ret['ret_config'] = data['ret_config']
if 'ret_kwargs' in data:
ret['ret_kwargs'] = data['ret_kwargs']
ret['id'] = opts['id']
for returner in set(data['ret'].split(',')):
try:
returner_str = '{0}.returner'.format(returner)
if returner_str in minion_instance.returners:
minion_instance.returners[returner_str](ret)
else:
returner_err = minion_instance.returners.missing_fun_string(returner_str)
log.error(
'Returner %s could not be loaded: %s',
returner_str, returner_err
)
except Exception as exc:
log.exception(
'The return failed for job %s: %s', data['jid'], exc
)
@classmethod
def _thread_multi_return(cls, minion_instance, opts, data):
'''
This method should be used as a threading target, start the actual
minion side execution.
'''
fn_ = os.path.join(minion_instance.proc_dir, data['jid'])
if opts['multiprocessing'] and not salt.utils.platform.is_windows():
# Shutdown the multiprocessing before daemonizing
salt.log.setup.shutdown_multiprocessing_logging()
salt.utils.process.daemonize_if(opts)
# Reconfigure multiprocessing logging after daemonizing
salt.log.setup.setup_multiprocessing_logging()
salt.utils.process.appendproctitle('{0}._thread_multi_return {1}'.format(cls.__name__, data['jid']))
sdata = {'pid': os.getpid()}
sdata.update(data)
log.info('Starting a new job with PID %s', sdata['pid'])
with salt.utils.files.fopen(fn_, 'w+b') as fp_:
fp_.write(minion_instance.serial.dumps(sdata))
multifunc_ordered = opts.get('multifunc_ordered', False)
num_funcs = len(data['fun'])
if multifunc_ordered:
ret = {
'return': [None] * num_funcs,
'retcode': [None] * num_funcs,
'success': [False] * num_funcs
}
else:
ret = {
'return': {},
'retcode': {},
'success': {}
}
for ind in range(0, num_funcs):
if not multifunc_ordered:
ret['success'][data['fun'][ind]] = False
try:
minion_blackout_violation = False
if minion_instance.connected and minion_instance.opts['pillar'].get('minion_blackout', False):
whitelist = minion_instance.opts['pillar'].get('minion_blackout_whitelist', [])
# this minion is blacked out. Only allow saltutil.refresh_pillar and the whitelist
if data['fun'][ind] != 'saltutil.refresh_pillar' and data['fun'][ind] not in whitelist:
minion_blackout_violation = True
elif minion_instance.opts['grains'].get('minion_blackout', False):
whitelist = minion_instance.opts['grains'].get('minion_blackout_whitelist', [])
if data['fun'][ind] != 'saltutil.refresh_pillar' and data['fun'][ind] not in whitelist:
minion_blackout_violation = True
if minion_blackout_violation:
raise SaltInvocationError('Minion in blackout mode. Set \'minion_blackout\' '
'to False in pillar or grains to resume operations. Only '
'saltutil.refresh_pillar allowed in blackout mode.')
func = minion_instance.functions[data['fun'][ind]]
args, kwargs = load_args_and_kwargs(
func,
data['arg'][ind],
data)
minion_instance.functions.pack['__context__']['retcode'] = 0
key = ind if multifunc_ordered else data['fun'][ind]
ret['return'][key] = func(*args, **kwargs)
retcode = minion_instance.functions.pack['__context__'].get(
'retcode',
0
)
if retcode == 0:
# No nonzero retcode in __context__ dunder. Check if return
# is a dictionary with a "result" or "success" key.
try:
func_result = all(ret['return'][key].get(x, True)
for x in ('result', 'success'))
except Exception:
# return data is not a dict
func_result = True
if not func_result:
retcode = 1
ret['retcode'][key] = retcode
ret['success'][key] = retcode == 0
except Exception as exc:
trb = traceback.format_exc()
log.warning('The minion function caused an exception: %s', exc)
if multifunc_ordered:
ret['return'][ind] = trb
else:
ret['return'][data['fun'][ind]] = trb
ret['jid'] = data['jid']
ret['fun'] = data['fun']
ret['fun_args'] = data['arg']
if 'metadata' in data:
ret['metadata'] = data['metadata']
if minion_instance.connected:
minion_instance._return_pub(
ret,
timeout=minion_instance._return_retry_timer()
)
if data['ret']:
if 'ret_config' in data:
ret['ret_config'] = data['ret_config']
if 'ret_kwargs' in data:
ret['ret_kwargs'] = data['ret_kwargs']
for returner in set(data['ret'].split(',')):
ret['id'] = opts['id']
try:
minion_instance.returners['{0}.returner'.format(
returner
)](ret)
except Exception as exc:
log.error(
'The return failed for job %s: %s',
data['jid'], exc
)
def _return_pub(self, ret, ret_cmd='_return', timeout=60, sync=True):
'''
Return the data from the executed command to the master server
'''
jid = ret.get('jid', ret.get('__jid__'))
fun = ret.get('fun', ret.get('__fun__'))
if self.opts['multiprocessing']:
fn_ = os.path.join(self.proc_dir, jid)
if os.path.isfile(fn_):
try:
os.remove(fn_)
except (OSError, IOError):
# The file is gone already
pass
log.info('Returning information for job: %s', jid)
log.trace('Return data: %s', ret)
if ret_cmd == '_syndic_return':
load = {'cmd': ret_cmd,
'id': self.opts['uid'],
'jid': jid,
'fun': fun,
'arg': ret.get('arg'),
'tgt': ret.get('tgt'),
'tgt_type': ret.get('tgt_type'),
'load': ret.get('__load__')}
if '__master_id__' in ret:
load['master_id'] = ret['__master_id__']
load['return'] = {}
for key, value in six.iteritems(ret):
if key.startswith('__'):
continue
load['return'][key] = value
else:
load = {'cmd': ret_cmd,
'id': self.opts['id']}
for key, value in six.iteritems(ret):
load[key] = value
if 'out' in ret:
if isinstance(ret['out'], six.string_types):
load['out'] = ret['out']
else:
log.error(
'Invalid outputter %s. This is likely a bug.',
ret['out']
)
else:
try:
oput = self.functions[fun].__outputter__
except (KeyError, AttributeError, TypeError):
pass
else:
if isinstance(oput, six.string_types):
load['out'] = oput
if self.opts['cache_jobs']:
# Local job cache has been enabled
if ret['jid'] == 'req':
ret['jid'] = salt.utils.jid.gen_jid(self.opts)
salt.utils.minion.cache_jobs(self.opts, ret['jid'], ret)
if not self.opts['pub_ret']:
return ''
def timeout_handler(*_):
log.warning(
'The minion failed to return the job information for job %s. '
'This is often due to the master being shut down or '
'overloaded. If the master is running, consider increasing '
'the worker_threads value.', jid
)
return True
if sync:
try:
ret_val = self._send_req_sync(load, timeout=timeout)
except SaltReqTimeoutError:
timeout_handler()
return ''
else:
with tornado.stack_context.ExceptionStackContext(timeout_handler):
ret_val = self._send_req_async(load, timeout=timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
log.trace('ret_val = %s', ret_val) # pylint: disable=no-member
return ret_val
def _return_pub_multi(self, rets, ret_cmd='_return', timeout=60, sync=True):
'''
Return the data from the executed command to the master server
'''
if not isinstance(rets, list):
rets = [rets]
jids = {}
for ret in rets:
jid = ret.get('jid', ret.get('__jid__'))
fun = ret.get('fun', ret.get('__fun__'))
if self.opts['multiprocessing']:
fn_ = os.path.join(self.proc_dir, jid)
if os.path.isfile(fn_):
try:
os.remove(fn_)
except (OSError, IOError):
# The file is gone already
pass
log.info('Returning information for job: %s', jid)
load = jids.setdefault(jid, {})
if ret_cmd == '_syndic_return':
if not load:
load.update({'id': self.opts['id'],
'jid': jid,
'fun': fun,
'arg': ret.get('arg'),
'tgt': ret.get('tgt'),
'tgt_type': ret.get('tgt_type'),
'load': ret.get('__load__'),
'return': {}})
if '__master_id__' in ret:
load['master_id'] = ret['__master_id__']
for key, value in six.iteritems(ret):
if key.startswith('__'):
continue
load['return'][key] = value
else:
load.update({'id': self.opts['id']})
for key, value in six.iteritems(ret):
load[key] = value
if 'out' in ret:
if isinstance(ret['out'], six.string_types):
load['out'] = ret['out']
else:
log.error(
'Invalid outputter %s. This is likely a bug.',
ret['out']
)
else:
try:
oput = self.functions[fun].__outputter__
except (KeyError, AttributeError, TypeError):
pass
else:
if isinstance(oput, six.string_types):
load['out'] = oput
if self.opts['cache_jobs']:
# Local job cache has been enabled
salt.utils.minion.cache_jobs(self.opts, load['jid'], ret)
load = {'cmd': ret_cmd,
'load': list(six.itervalues(jids))}
def timeout_handler(*_):
log.warning(
'The minion failed to return the job information for job %s. '
'This is often due to the master being shut down or '
'overloaded. If the master is running, consider increasing '
'the worker_threads value.', jid
)
return True
if sync:
try:
ret_val = self._send_req_sync(load, timeout=timeout)
except SaltReqTimeoutError:
timeout_handler()
return ''
else:
with tornado.stack_context.ExceptionStackContext(timeout_handler):
ret_val = self._send_req_async(load, timeout=timeout, callback=lambda f: None) # pylint: disable=unexpected-keyword-arg
log.trace('ret_val = %s', ret_val) # pylint: disable=no-member
return ret_val
def _state_run(self):
'''
Execute a state run based on information set in the minion config file
'''
if self.opts['startup_states']:
if self.opts.get('master_type', 'str') == 'disable' and \
self.opts.get('file_client', 'remote') == 'remote':
log.warning(
'Cannot run startup_states when \'master_type\' is set '
'to \'disable\' and \'file_client\' is set to '
'\'remote\'. Skipping.'
)
else:
data = {'jid': 'req', 'ret': self.opts.get('ext_job_cache', '')}
if self.opts['startup_states'] == 'sls':
data['fun'] = 'state.sls'
data['arg'] = [self.opts['sls_list']]
elif self.opts['startup_states'] == 'top':
data['fun'] = 'state.top'
data['arg'] = [self.opts['top_file']]
else:
data['fun'] = 'state.highstate'
data['arg'] = []
self._handle_decoded_payload(data)
def _refresh_grains_watcher(self, refresh_interval_in_minutes):
'''
Create a loop that will fire a pillar refresh to inform a master about a change in the grains of this minion
:param refresh_interval_in_minutes:
:return: None
'''
if '__update_grains' not in self.opts.get('schedule', {}):
if 'schedule' not in self.opts:
self.opts['schedule'] = {}
self.opts['schedule'].update({
'__update_grains':
{
'function': 'event.fire',
'args': [{}, 'grains_refresh'],
'minutes': refresh_interval_in_minutes
}
})
def _fire_master_minion_start(self):
# Send an event to the master that the minion is live
if self.opts['enable_legacy_startup_events']:
# old style event. Defaults to False in Neon Salt release
self._fire_master(
'Minion {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
'minion_start'
)
# send name spaced event
self._fire_master(
'Minion {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
tagify([self.opts['id'], 'start'], 'minion'),
)
def module_refresh(self, force_refresh=False, notify=False):
'''
Refresh the functions and returners.
'''
log.debug('Refreshing modules. Notify=%s', notify)
self.functions, self.returners, _, self.executors = self._load_modules(force_refresh, notify=notify)
self.schedule.functions = self.functions
self.schedule.returners = self.returners
def beacons_refresh(self):
'''
Refresh the functions and returners.
'''
log.debug('Refreshing beacons.')
self.beacons = salt.beacons.Beacon(self.opts, self.functions)
# TODO: only allow one future in flight at a time?
@tornado.gen.coroutine
def pillar_refresh(self, force_refresh=False):
'''
Refresh the pillar
'''
if self.connected:
log.debug('Refreshing pillar')
try:
self.opts['pillar'] = yield salt.pillar.get_async_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
self.opts['saltenv'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
except SaltClientError:
# Do not exit if a pillar refresh fails.
log.error('Pillar data could not be refreshed. '
'One or more masters may be down!')
self.module_refresh(force_refresh)
def manage_schedule(self, tag, data):
'''
Refresh the functions and returners.
'''
func = data.get('func', None)
name = data.get('name', None)
schedule = data.get('schedule', None)
where = data.get('where', None)
persist = data.get('persist', None)
if func == 'delete':
self.schedule.delete_job(name, persist)
elif func == 'add':
self.schedule.add_job(schedule, persist)
elif func == 'modify':
self.schedule.modify_job(name, schedule, persist)
elif func == 'enable':
self.schedule.enable_schedule()
elif func == 'disable':
self.schedule.disable_schedule()
elif func == 'enable_job':
self.schedule.enable_job(name, persist)
elif func == 'run_job':
self.schedule.run_job(name)
elif func == 'disable_job':
self.schedule.disable_job(name, persist)
elif func == 'postpone_job':
self.schedule.postpone_job(name, data)
elif func == 'skip_job':
self.schedule.skip_job(name, data)
elif func == 'reload':
self.schedule.reload(schedule)
elif func == 'list':
self.schedule.list(where)
elif func == 'save_schedule':
self.schedule.save_schedule()
elif func == 'get_next_fire_time':
self.schedule.get_next_fire_time(name)
def manage_beacons(self, tag, data):
'''
Manage Beacons
'''
func = data.get('func', None)
name = data.get('name', None)
beacon_data = data.get('beacon_data', None)
include_pillar = data.get('include_pillar', None)
include_opts = data.get('include_opts', None)
if func == 'add':
self.beacons.add_beacon(name, beacon_data)
elif func == 'modify':
self.beacons.modify_beacon(name, beacon_data)
elif func == 'delete':
self.beacons.delete_beacon(name)
elif func == 'enable':
self.beacons.enable_beacons()
elif func == 'disable':
self.beacons.disable_beacons()
elif func == 'enable_beacon':
self.beacons.enable_beacon(name)
elif func == 'disable_beacon':
self.beacons.disable_beacon(name)
elif func == 'list':
self.beacons.list_beacons(include_opts, include_pillar)
elif func == 'list_available':
self.beacons.list_available_beacons()
elif func == 'validate_beacon':
self.beacons.validate_beacon(name, beacon_data)
def environ_setenv(self, tag, data):
'''
Set the salt-minion main process environment according to
the data contained in the minion event data
'''
environ = data.get('environ', None)
if environ is None:
return False
false_unsets = data.get('false_unsets', False)
clear_all = data.get('clear_all', False)
import salt.modules.environ as mod_environ
return mod_environ.setenv(environ, false_unsets, clear_all)
def _pre_tune(self):
'''
Set the minion running flag and issue the appropriate warnings if
the minion cannot be started or is already running
'''
if self._running is None:
self._running = True
elif self._running is False:
log.error(
'This %s was scheduled to stop. Not running %s.tune_in()',
self.__class__.__name__, self.__class__.__name__
)
return
elif self._running is True:
log.error(
'This %s is already running. Not running %s.tune_in()',
self.__class__.__name__, self.__class__.__name__
)
return
try:
log.info(
'%s is starting as user \'%s\'',
self.__class__.__name__, salt.utils.user.get_user()
)
except Exception as err:
# Only windows is allowed to fail here. See #3189. Log as debug in
# that case. Else, error.
log.log(
salt.utils.platform.is_windows() and logging.DEBUG or logging.ERROR,
'Failed to get the user who is starting %s',
self.__class__.__name__,
exc_info=err
)
def _mine_send(self, tag, data):
'''
Send mine data to the master
'''
channel = salt.transport.Channel.factory(self.opts)
data['tok'] = self.tok
try:
ret = channel.send(data)
return ret
except SaltReqTimeoutError:
log.warning('Unable to send mine data to master.')
return None
@tornado.gen.coroutine
def handle_event(self, package):
'''
Handle an event from the epull_sock (all local minion events)
'''
if not self.ready:
raise tornado.gen.Return()
tag, data = salt.utils.event.SaltEvent.unpack(package)
log.debug(
'Minion of \'%s\' is handling event tag \'%s\'',
self.opts['master'], tag
)
if tag.startswith('module_refresh'):
self.module_refresh(
force_refresh=data.get('force_refresh', False),
notify=data.get('notify', False)
)
elif tag.startswith('pillar_refresh'):
yield self.pillar_refresh(
force_refresh=data.get('force_refresh', False)
)
elif tag.startswith('beacons_refresh'):
self.beacons_refresh()
elif tag.startswith('manage_schedule'):
self.manage_schedule(tag, data)
elif tag.startswith('manage_beacons'):
self.manage_beacons(tag, data)
elif tag.startswith('grains_refresh'):
if (data.get('force_refresh', False) or
self.grains_cache != self.opts['grains']):
self.pillar_refresh(force_refresh=True)
self.grains_cache = self.opts['grains']
elif tag.startswith('environ_setenv'):
self.environ_setenv(tag, data)
elif tag.startswith('_minion_mine'):
self._mine_send(tag, data)
elif tag.startswith('fire_master'):
if self.connected:
log.debug('Forwarding master event tag=%s', data['tag'])
self._fire_master(data['data'], data['tag'], data['events'], data['pretag'])
elif tag.startswith(master_event(type='disconnected')) or tag.startswith(master_event(type='failback')):
# if the master disconnect event is for a different master, raise an exception
if tag.startswith(master_event(type='disconnected')) and data['master'] != self.opts['master']:
# not mine master, ignore
return
if tag.startswith(master_event(type='failback')):
# if the master failback event is not for the top master, raise an exception
if data['master'] != self.opts['master_list'][0]:
raise SaltException('Bad master \'{0}\' when mine failback is \'{1}\''.format(
data['master'], self.opts['master']))
# if the master failback event is for the current master, raise an exception
elif data['master'] == self.opts['master'][0]:
raise SaltException('Already connected to \'{0}\''.format(data['master']))
if self.connected:
# we are not connected anymore
self.connected = False
log.info('Connection to master %s lost', self.opts['master'])
# we can't use the config default here because the default '0' value is overloaded
# to mean 'if 0 disable the job', but when salt detects a timeout it also sets up
# these jobs
master_alive_interval = self.opts['master_alive_interval'] or 60
if self.opts['master_type'] != 'failover':
# modify the scheduled job to fire on reconnect
if self.opts['transport'] != 'tcp':
schedule = {
'function': 'status.master',
'seconds': master_alive_interval,
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master'],
'connected': False}
}
self.schedule.modify_job(name=master_event(type='alive', master=self.opts['master']),
schedule=schedule)
else:
# delete the scheduled job to don't interfere with the failover process
if self.opts['transport'] != 'tcp':
self.schedule.delete_job(name=master_event(type='alive'))
log.info('Trying to tune in to next master from master-list')
if hasattr(self, 'pub_channel'):
self.pub_channel.on_recv(None)
if hasattr(self.pub_channel, 'auth'):
self.pub_channel.auth.invalidate()
if hasattr(self.pub_channel, 'close'):
self.pub_channel.close()
del self.pub_channel
# if eval_master finds a new master for us, self.connected
# will be True again on successful master authentication
try:
master, self.pub_channel = yield self.eval_master(
opts=self.opts,
failed=True,
failback=tag.startswith(master_event(type='failback')))
except SaltClientError:
pass
if self.connected:
self.opts['master'] = master
# re-init the subsystems to work with the new master
log.info(
'Re-initialising subsystems for new master %s',
self.opts['master']
)
# put the current schedule into the new loaders
self.opts['schedule'] = self.schedule.option('schedule')
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
# make the schedule to use the new 'functions' loader
self.schedule.functions = self.functions
self.pub_channel.on_recv(self._handle_payload)
self._fire_master_minion_start()
log.info('Minion is ready to receive requests!')
# update scheduled job to run with the new master addr
if self.opts['transport'] != 'tcp':
schedule = {
'function': 'status.master',
'seconds': master_alive_interval,
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
self.schedule.modify_job(name=master_event(type='alive', master=self.opts['master']),
schedule=schedule)
if self.opts['master_failback'] and 'master_list' in self.opts:
if self.opts['master'] != self.opts['master_list'][0]:
schedule = {
'function': 'status.ping_master',
'seconds': self.opts['master_failback_interval'],
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master_list'][0]}
}
self.schedule.modify_job(name=master_event(type='failback'),
schedule=schedule)
else:
self.schedule.delete_job(name=master_event(type='failback'), persist=True)
else:
self.restart = True
self.io_loop.stop()
elif tag.startswith(master_event(type='connected')):
# handle this event only once. otherwise it will pollute the log
# also if master type is failover all the reconnection work is done
# by `disconnected` event handler and this event must never happen,
# anyway check it to be sure
if not self.connected and self.opts['master_type'] != 'failover':
log.info('Connection to master %s re-established', self.opts['master'])
self.connected = True
# modify the __master_alive job to only fire,
# if the connection is lost again
if self.opts['transport'] != 'tcp':
if self.opts['master_alive_interval'] > 0:
schedule = {
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
self.schedule.modify_job(name=master_event(type='alive', master=self.opts['master']),
schedule=schedule)
else:
self.schedule.delete_job(name=master_event(type='alive', master=self.opts['master']), persist=True)
elif tag.startswith('__schedule_return'):
# reporting current connection with master
if data['schedule'].startswith(master_event(type='alive', master='')):
if data['return']:
log.debug(
'Connected to master %s',
data['schedule'].split(master_event(type='alive', master=''))[1]
)
self._return_pub(data, ret_cmd='_return', sync=False)
elif tag.startswith('_salt_error'):
if self.connected:
log.debug('Forwarding salt error event tag=%s', tag)
self._fire_master(data, tag)
elif tag.startswith('salt/auth/creds'):
key = tuple(data['key'])
log.debug(
'Updating auth data for %s: %s -> %s',
key, salt.crypt.AsyncAuth.creds_map.get(key), data['creds']
)
salt.crypt.AsyncAuth.creds_map[tuple(data['key'])] = data['creds']
def _fallback_cleanups(self):
'''
Fallback cleanup routines, attempting to fix leaked processes, threads, etc.
'''
# Add an extra fallback in case a forked process leaks through
multiprocessing.active_children()
# Cleanup Windows threads
if not salt.utils.platform.is_windows():
return
for thread in self.win_proc:
if not thread.is_alive():
thread.join()
try:
self.win_proc.remove(thread)
del thread
except (ValueError, NameError):
pass
def _setup_core(self):
'''
Set up the core minion attributes.
This is safe to call multiple times.
'''
if not self.ready:
# First call. Initialize.
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.serial = salt.payload.Serial(self.opts)
self.mod_opts = self._prep_mod_opts()
self.matcher = Matcher(self.opts, self.functions)
self.beacons = salt.beacons.Beacon(self.opts, self.functions)
uid = salt.utils.user.get_uid(user=self.opts.get('user', None))
self.proc_dir = get_proc_dir(self.opts['cachedir'], uid=uid)
self.grains_cache = self.opts['grains']
self.ready = True
def setup_beacons(self, before_connect=False):
'''
Set up the beacons.
This is safe to call multiple times.
'''
self._setup_core()
loop_interval = self.opts['loop_interval']
new_periodic_callbacks = {}
if 'beacons' not in self.periodic_callbacks:
self.beacons = salt.beacons.Beacon(self.opts, self.functions)
def handle_beacons():
# Process Beacons
beacons = None
try:
beacons = self.process_beacons(self.functions)
except Exception:
log.critical('The beacon errored: ', exc_info=True)
if beacons and self.connected:
self._fire_master(events=beacons)
new_periodic_callbacks['beacons'] = tornado.ioloop.PeriodicCallback(
handle_beacons, loop_interval * 1000)
if before_connect:
# Make sure there is a chance for one iteration to occur before connect
handle_beacons()
if 'cleanup' not in self.periodic_callbacks:
new_periodic_callbacks['cleanup'] = tornado.ioloop.PeriodicCallback(
self._fallback_cleanups, loop_interval * 1000)
# start all the other callbacks
for periodic_cb in six.itervalues(new_periodic_callbacks):
periodic_cb.start()
self.periodic_callbacks.update(new_periodic_callbacks)
def setup_scheduler(self, before_connect=False):
'''
Set up the scheduler.
This is safe to call multiple times.
'''
self._setup_core()
loop_interval = self.opts['loop_interval']
new_periodic_callbacks = {}
if 'schedule' not in self.periodic_callbacks:
if 'schedule' not in self.opts:
self.opts['schedule'] = {}
if not hasattr(self, 'schedule'):
self.schedule = salt.utils.schedule.Schedule(
self.opts,
self.functions,
self.returners,
utils=self.utils,
cleanup=[master_event(type='alive')])
try:
if self.opts['grains_refresh_every']: # If exists and is not zero. In minutes, not seconds!
if self.opts['grains_refresh_every'] > 1:
log.debug(
'Enabling the grains refresher. Will run every {0} minutes.'.format(
self.opts['grains_refresh_every'])
)
else: # Clean up minute vs. minutes in log message
log.debug(
'Enabling the grains refresher. Will run every {0} minute.'.format(
self.opts['grains_refresh_every'])
)
self._refresh_grains_watcher(
abs(self.opts['grains_refresh_every'])
)
except Exception as exc:
log.error(
'Exception occurred in attempt to initialize grain refresh routine during minion tune-in: {0}'.format(
exc)
)
# TODO: actually listen to the return and change period
def handle_schedule():
self.process_schedule(self, loop_interval)
new_periodic_callbacks['schedule'] = tornado.ioloop.PeriodicCallback(handle_schedule, 1000)
if before_connect:
# Make sure there is a chance for one iteration to occur before connect
handle_schedule()
if 'cleanup' not in self.periodic_callbacks:
new_periodic_callbacks['cleanup'] = tornado.ioloop.PeriodicCallback(
self._fallback_cleanups, loop_interval * 1000)
# start all the other callbacks
for periodic_cb in six.itervalues(new_periodic_callbacks):
periodic_cb.start()
self.periodic_callbacks.update(new_periodic_callbacks)
# Main Minion Tune In
def tune_in(self, start=True):
'''
Lock onto the publisher. This is the main event loop for the minion
:rtype : None
'''
self._pre_tune()
log.debug('Minion \'%s\' trying to tune in', self.opts['id'])
if start:
if self.opts.get('beacons_before_connect', False):
self.setup_beacons(before_connect=True)
if self.opts.get('scheduler_before_connect', False):
self.setup_scheduler(before_connect=True)
self.sync_connect_master()
if self.connected:
self._fire_master_minion_start()
log.info('Minion is ready to receive requests!')
# Make sure to gracefully handle SIGUSR1
enable_sigusr1_handler()
# Make sure to gracefully handle CTRL_LOGOFF_EVENT
if HAS_WIN_FUNCTIONS:
salt.utils.win_functions.enable_ctrl_logoff_handler()
# On first startup execute a state run if configured to do so
self._state_run()
self.setup_beacons()
self.setup_scheduler()
# schedule the stuff that runs every interval
ping_interval = self.opts.get('ping_interval', 0) * 60
if ping_interval > 0 and self.connected:
def ping_master():
try:
def ping_timeout_handler(*_):
if self.opts.get('auth_safemode', False):
log.error('** Master Ping failed. Attempting to restart minion**')
delay = self.opts.get('random_reauth_delay', 5)
log.info('delaying random_reauth_delay %ss', delay)
# regular sys.exit raises an exception -- which isn't sufficient in a thread
os._exit(salt.defaults.exitcodes.SALT_KEEPALIVE)
self._fire_master('ping', 'minion_ping', sync=False, timeout_handler=ping_timeout_handler)
except Exception:
log.warning('Attempt to ping master failed.', exc_on_loglevel=logging.DEBUG)
self.periodic_callbacks['ping'] = tornado.ioloop.PeriodicCallback(ping_master, ping_interval * 1000)
self.periodic_callbacks['ping'].start()
# add handler to subscriber
if hasattr(self, 'pub_channel') and self.pub_channel is not None:
self.pub_channel.on_recv(self._handle_payload)
elif self.opts.get('master_type') != 'disable':
log.error('No connection to master found. Scheduled jobs will not run.')
if start:
try:
self.io_loop.start()
if self.restart:
self.destroy()
except (KeyboardInterrupt, RuntimeError): # A RuntimeError can be re-raised by Tornado on shutdown
self.destroy()
def _handle_payload(self, payload):
if payload is not None and payload['enc'] == 'aes':
if self._target_load(payload['load']):
self._handle_decoded_payload(payload['load'])
elif self.opts['zmq_filtering']:
# In the filtering enabled case, we'd like to know when minion sees something it shouldnt
log.trace(
'Broadcast message received not for this minion, Load: %s',
payload['load']
)
# If it's not AES, and thus has not been verified, we do nothing.
# In the future, we could add support for some clearfuncs, but
# the minion currently has no need.
def _target_load(self, load):
# Verify that the publication is valid
if 'tgt' not in load or 'jid' not in load or 'fun' not in load \
or 'arg' not in load:
return False
# Verify that the publication applies to this minion
# It's important to note that the master does some pre-processing
# to determine which minions to send a request to. So for example,
# a "salt -G 'grain_key:grain_val' test.ping" will invoke some
# pre-processing on the master and this minion should not see the
# publication if the master does not determine that it should.
if 'tgt_type' in load:
match_func = getattr(self.matcher,
'{0}_match'.format(load['tgt_type']), None)
if match_func is None:
return False
if load['tgt_type'] in ('grain', 'grain_pcre', 'pillar'):
delimiter = load.get('delimiter', DEFAULT_TARGET_DELIM)
if not match_func(load['tgt'], delimiter=delimiter):
return False
elif not match_func(load['tgt']):
return False
else:
if not self.matcher.glob_match(load['tgt']):
return False
return True
def destroy(self):
'''
Tear down the minion
'''
self._running = False
if hasattr(self, 'schedule'):
del self.schedule
if hasattr(self, 'pub_channel') and self.pub_channel is not None:
self.pub_channel.on_recv(None)
if hasattr(self.pub_channel, 'close'):
self.pub_channel.close()
del self.pub_channel
if hasattr(self, 'periodic_callbacks'):
for cb in six.itervalues(self.periodic_callbacks):
cb.stop()
def __del__(self):
self.destroy()
class Syndic(Minion):
'''
Make a Syndic minion, this minion will use the minion keys on the
master to authenticate with a higher level master.
'''
def __init__(self, opts, **kwargs):
self._syndic_interface = opts.get('interface')
self._syndic = True
# force auth_safemode True because Syndic don't support autorestart
opts['auth_safemode'] = True
opts['loop_interval'] = 1
super(Syndic, self).__init__(opts, **kwargs)
self.mminion = salt.minion.MasterMinion(opts)
self.jid_forward_cache = set()
self.jids = {}
self.raw_events = []
self.pub_future = None
def _handle_decoded_payload(self, data):
'''
Override this method if you wish to handle the decoded data
differently.
'''
# TODO: even do this??
data['to'] = int(data.get('to', self.opts['timeout'])) - 1
# Only forward the command if it didn't originate from ourselves
if data.get('master_id', 0) != self.opts.get('master_id', 1):
self.syndic_cmd(data)
def syndic_cmd(self, data):
'''
Take the now clear load and forward it on to the client cmd
'''
# Set up default tgt_type
if 'tgt_type' not in data:
data['tgt_type'] = 'glob'
kwargs = {}
# optionally add a few fields to the publish data
for field in ('master_id', # which master the job came from
'user', # which user ran the job
):
if field in data:
kwargs[field] = data[field]
def timeout_handler(*args):
log.warning('Unable to forward pub data: %s', args[1])
return True
with tornado.stack_context.ExceptionStackContext(timeout_handler):
self.local.pub_async(data['tgt'],
data['fun'],
data['arg'],
data['tgt_type'],
data['ret'],
data['jid'],
data['to'],
io_loop=self.io_loop,
callback=lambda _: None,
**kwargs)
def fire_master_syndic_start(self):
# Send an event to the master that the minion is live
if self.opts['enable_legacy_startup_events']:
# old style event. Defaults to false in Neon Salt release.
self._fire_master(
'Syndic {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
'syndic_start',
sync=False,
)
self._fire_master(
'Syndic {0} started at {1}'.format(
self.opts['id'],
time.asctime()
),
tagify([self.opts['id'], 'start'], 'syndic'),
sync=False,
)
# TODO: clean up docs
def tune_in_no_block(self):
'''
Executes the tune_in sequence but omits extra logging and the
management of the event bus assuming that these are handled outside
the tune_in sequence
'''
# Instantiate the local client
self.local = salt.client.get_local_client(
self.opts['_minion_conf_file'], io_loop=self.io_loop)
# add handler to subscriber
self.pub_channel.on_recv(self._process_cmd_socket)
def _process_cmd_socket(self, payload):
if payload is not None and payload['enc'] == 'aes':
log.trace('Handling payload')
self._handle_decoded_payload(payload['load'])
# If it's not AES, and thus has not been verified, we do nothing.
# In the future, we could add support for some clearfuncs, but
# the syndic currently has no need.
@tornado.gen.coroutine
def reconnect(self):
if hasattr(self, 'pub_channel'):
self.pub_channel.on_recv(None)
if hasattr(self.pub_channel, 'close'):
self.pub_channel.close()
del self.pub_channel
# if eval_master finds a new master for us, self.connected
# will be True again on successful master authentication
master, self.pub_channel = yield self.eval_master(opts=self.opts)
if self.connected:
self.opts['master'] = master
self.pub_channel.on_recv(self._process_cmd_socket)
log.info('Minion is ready to receive requests!')
raise tornado.gen.Return(self)
def destroy(self):
'''
Tear down the syndic minion
'''
# We borrowed the local clients poller so give it back before
# it's destroyed. Reset the local poller reference.
super(Syndic, self).destroy()
if hasattr(self, 'local'):
del self.local
if hasattr(self, 'forward_events'):
self.forward_events.stop()
# TODO: need a way of knowing if the syndic connection is busted
class SyndicManager(MinionBase):
'''
Make a MultiMaster syndic minion, this minion will handle relaying jobs and returns from
all minions connected to it to the list of masters it is connected to.
Modes (controlled by `syndic_mode`:
sync: This mode will synchronize all events and publishes from higher level masters
cluster: This mode will only sync job publishes and returns
Note: jobs will be returned best-effort to the requesting master. This also means
(since we are using zmq) that if a job was fired and the master disconnects
between the publish and return, that the return will end up in a zmq buffer
in this Syndic headed to that original master.
In addition, since these classes all seem to use a mix of blocking and non-blocking
calls (with varying timeouts along the way) this daemon does not handle failure well,
it will (under most circumstances) stall the daemon for ~15s trying to forward events
to the down master
'''
# time to connect to upstream master
SYNDIC_CONNECT_TIMEOUT = 5
SYNDIC_EVENT_TIMEOUT = 5
def __init__(self, opts, io_loop=None):
opts['loop_interval'] = 1
super(SyndicManager, self).__init__(opts)
self.mminion = salt.minion.MasterMinion(opts)
# sync (old behavior), cluster (only returns and publishes)
self.syndic_mode = self.opts.get('syndic_mode', 'sync')
self.syndic_failover = self.opts.get('syndic_failover', 'random')
self.auth_wait = self.opts['acceptance_wait_time']
self.max_auth_wait = self.opts['acceptance_wait_time_max']
self._has_master = threading.Event()
self.jid_forward_cache = set()
if io_loop is None:
install_zmq()
self.io_loop = ZMQDefaultLoop.current()
else:
self.io_loop = io_loop
# List of events
self.raw_events = []
# Dict of rets: {master_id: {event_tag: job_ret, ...}, ...}
self.job_rets = {}
# List of delayed job_rets which was unable to send for some reason and will be resend to
# any available master
self.delayed = []
# Active pub futures: {master_id: (future, [job_ret, ...]), ...}
self.pub_futures = {}
def _spawn_syndics(self):
'''
Spawn all the coroutines which will sign in the syndics
'''
self._syndics = OrderedDict() # mapping of opts['master'] -> syndic
masters = self.opts['master']
if not isinstance(masters, list):
masters = [masters]
for master in masters:
s_opts = copy.copy(self.opts)
s_opts['master'] = master
self._syndics[master] = self._connect_syndic(s_opts)
@tornado.gen.coroutine
def _connect_syndic(self, opts):
'''
Create a syndic, and asynchronously connect it to a master
'''
last = 0 # never have we signed in
auth_wait = opts['acceptance_wait_time']
failed = False
while True:
log.debug(
'Syndic attempting to connect to %s',
opts['master']
)
try:
syndic = Syndic(opts,
timeout=self.SYNDIC_CONNECT_TIMEOUT,
safe=False,
io_loop=self.io_loop,
)
yield syndic.connect_master(failed=failed)
# set up the syndic to handle publishes (specifically not event forwarding)
syndic.tune_in_no_block()
# Send an event to the master that the minion is live
syndic.fire_master_syndic_start()
log.info(
'Syndic successfully connected to %s',
opts['master']
)
break
except SaltClientError as exc:
failed = True
log.error(
'Error while bringing up syndic for multi-syndic. Is the '
'master at %s responding?', opts['master']
)
last = time.time()
if auth_wait < self.max_auth_wait:
auth_wait += self.auth_wait
yield tornado.gen.sleep(auth_wait) # TODO: log?
except (KeyboardInterrupt, SystemExit):
raise
except Exception:
failed = True
log.critical(
'Unexpected error while connecting to %s',
opts['master'], exc_info=True
)
raise tornado.gen.Return(syndic)
def _mark_master_dead(self, master):
'''
Mark a master as dead. This will start the sign-in routine
'''
# if its connected, mark it dead
if self._syndics[master].done():
syndic = self._syndics[master].result() # pylint: disable=no-member
self._syndics[master] = syndic.reconnect()
else:
# TODO: debug?
log.info(
'Attempting to mark %s as dead, although it is already '
'marked dead', master
)
def _call_syndic(self, func, args=(), kwargs=None, master_id=None):
'''
Wrapper to call a given func on a syndic, best effort to get the one you asked for
'''
if kwargs is None:
kwargs = {}
successful = False
# Call for each master
for master, syndic_future in self.iter_master_options(master_id):
if not syndic_future.done() or syndic_future.exception():
log.error(
'Unable to call %s on %s, that syndic is not connected',
func, master
)
continue
try:
getattr(syndic_future.result(), func)(*args, **kwargs)
successful = True
except SaltClientError:
log.error(
'Unable to call %s on %s, trying another...',
func, master
)
self._mark_master_dead(master)
if not successful:
log.critical('Unable to call %s on any masters!', func)
def _return_pub_syndic(self, values, master_id=None):
'''
Wrapper to call the '_return_pub_multi' a syndic, best effort to get the one you asked for
'''
func = '_return_pub_multi'
for master, syndic_future in self.iter_master_options(master_id):
if not syndic_future.done() or syndic_future.exception():
log.error(
'Unable to call %s on %s, that syndic is not connected',
func, master
)
continue
future, data = self.pub_futures.get(master, (None, None))
if future is not None:
if not future.done():
if master == master_id:
# Targeted master previous send not done yet, call again later
return False
else:
# Fallback master is busy, try the next one
continue
elif future.exception():
# Previous execution on this master returned an error
log.error(
'Unable to call %s on %s, trying another...',
func, master
)
self._mark_master_dead(master)
del self.pub_futures[master]
# Add not sent data to the delayed list and try the next master
self.delayed.extend(data)
continue
future = getattr(syndic_future.result(), func)(values,
'_syndic_return',
timeout=self._return_retry_timer(),
sync=False)
self.pub_futures[master] = (future, values)
return True
# Loop done and didn't exit: wasn't sent, try again later
return False
def iter_master_options(self, master_id=None):
'''
Iterate (in order) over your options for master
'''
masters = list(self._syndics.keys())
if self.opts['syndic_failover'] == 'random':
shuffle(masters)
if master_id not in self._syndics:
master_id = masters.pop(0)
else:
masters.remove(master_id)
while True:
yield master_id, self._syndics[master_id]
if len(masters) == 0:
break
master_id = masters.pop(0)
def _reset_event_aggregation(self):
self.job_rets = {}
self.raw_events = []
def reconnect_event_bus(self, something):
future = self.local.event.set_event_handler(self._process_event)
self.io_loop.add_future(future, self.reconnect_event_bus)
# Syndic Tune In
def tune_in(self):
'''
Lock onto the publisher. This is the main event loop for the syndic
'''
self._spawn_syndics()
# Instantiate the local client
self.local = salt.client.get_local_client(
self.opts['_minion_conf_file'], io_loop=self.io_loop)
self.local.event.subscribe('')
log.debug('SyndicManager \'%s\' trying to tune in', self.opts['id'])
# register the event sub to the poller
self.job_rets = {}
self.raw_events = []
self._reset_event_aggregation()
future = self.local.event.set_event_handler(self._process_event)
self.io_loop.add_future(future, self.reconnect_event_bus)
# forward events every syndic_event_forward_timeout
self.forward_events = tornado.ioloop.PeriodicCallback(self._forward_events,
self.opts['syndic_event_forward_timeout'] * 1000,
)
self.forward_events.start()
# Make sure to gracefully handle SIGUSR1
enable_sigusr1_handler()
self.io_loop.start()
def _process_event(self, raw):
# TODO: cleanup: Move down into event class
mtag, data = self.local.event.unpack(raw, self.local.event.serial)
log.trace('Got event %s', mtag) # pylint: disable=no-member
tag_parts = mtag.split('/')
if len(tag_parts) >= 4 and tag_parts[1] == 'job' and \
salt.utils.jid.is_jid(tag_parts[2]) and tag_parts[3] == 'ret' and \
'return' in data:
if 'jid' not in data:
# Not a job return
return
if self.syndic_mode == 'cluster' and data.get('master_id', 0) == self.opts.get('master_id', 1):
log.debug('Return received with matching master_id, not forwarding')
return
master = data.get('master_id')
jdict = self.job_rets.setdefault(master, {}).setdefault(mtag, {})
if not jdict:
jdict['__fun__'] = data.get('fun')
jdict['__jid__'] = data['jid']
jdict['__load__'] = {}
fstr = '{0}.get_load'.format(self.opts['master_job_cache'])
# Only need to forward each load once. Don't hit the disk
# for every minion return!
if data['jid'] not in self.jid_forward_cache:
jdict['__load__'].update(
self.mminion.returners[fstr](data['jid'])
)
self.jid_forward_cache.add(data['jid'])
if len(self.jid_forward_cache) > self.opts['syndic_jid_forward_cache_hwm']:
# Pop the oldest jid from the cache
tmp = sorted(list(self.jid_forward_cache))
tmp.pop(0)
self.jid_forward_cache = set(tmp)
if master is not None:
# __'s to make sure it doesn't print out on the master cli
jdict['__master_id__'] = master
ret = {}
for key in 'return', 'retcode', 'success':
if key in data:
ret[key] = data[key]
jdict[data['id']] = ret
else:
# TODO: config to forward these? If so we'll have to keep track of who
# has seen them
# if we are the top level masters-- don't forward all the minion events
if self.syndic_mode == 'sync':
# Add generic event aggregation here
if 'retcode' not in data:
self.raw_events.append({'data': data, 'tag': mtag})
def _forward_events(self):
log.trace('Forwarding events') # pylint: disable=no-member
if self.raw_events:
events = self.raw_events
self.raw_events = []
self._call_syndic('_fire_master',
kwargs={'events': events,
'pretag': tagify(self.opts['id'], base='syndic'),
'timeout': self._return_retry_timer(),
'sync': False,
},
)
if self.delayed:
res = self._return_pub_syndic(self.delayed)
if res:
self.delayed = []
for master in list(six.iterkeys(self.job_rets)):
values = list(six.itervalues(self.job_rets[master]))
res = self._return_pub_syndic(values, master_id=master)
if res:
del self.job_rets[master]
class Matcher(object):
'''
Use to return the value for matching calls from the master
'''
def __init__(self, opts, functions=None):
self.opts = opts
self.functions = functions
def confirm_top(self, match, data, nodegroups=None):
'''
Takes the data passed to a top file environment and determines if the
data matches this minion
'''
matcher = 'compound'
if not data:
log.error('Received bad data when setting the match from the top '
'file')
return False
for item in data:
if isinstance(item, dict):
if 'match' in item:
matcher = item['match']
if hasattr(self, matcher + '_match'):
funcname = '{0}_match'.format(matcher)
if matcher == 'nodegroup':
return getattr(self, funcname)(match, nodegroups)
return getattr(self, funcname)(match)
else:
log.error('Attempting to match with unknown matcher: %s', matcher)
return False
def glob_match(self, tgt):
'''
Returns true if the passed glob matches the id
'''
if not isinstance(tgt, six.string_types):
return False
return fnmatch.fnmatch(self.opts['id'], tgt)
def pcre_match(self, tgt):
'''
Returns true if the passed pcre regex matches
'''
return bool(re.match(tgt, self.opts['id']))
def list_match(self, tgt):
'''
Determines if this host is on the list
'''
if isinstance(tgt, six.string_types):
tgt = tgt.split(',')
return bool(self.opts['id'] in tgt)
def grain_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Reads in the grains glob match
'''
log.debug('grains target: %s', tgt)
if delimiter not in tgt:
log.error('Got insufficient arguments for grains match '
'statement from master')
return False
return salt.utils.data.subdict_match(
self.opts['grains'], tgt, delimiter=delimiter
)
def grain_pcre_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Matches a grain based on regex
'''
log.debug('grains pcre target: %s', tgt)
if delimiter not in tgt:
log.error('Got insufficient arguments for grains pcre match '
'statement from master')
return False
return salt.utils.data.subdict_match(
self.opts['grains'], tgt, delimiter=delimiter, regex_match=True)
def data_match(self, tgt):
'''
Match based on the local data store on the minion
'''
if self.functions is None:
utils = salt.loader.utils(self.opts)
self.functions = salt.loader.minion_mods(self.opts, utils=utils)
comps = tgt.split(':')
if len(comps) < 2:
return False
val = self.functions['data.getval'](comps[0])
if val is None:
# The value is not defined
return False
if isinstance(val, list):
# We are matching a single component to a single list member
for member in val:
if fnmatch.fnmatch(six.text_type(member).lower(), comps[1].lower()):
return True
return False
if isinstance(val, dict):
if comps[1] in val:
return True
return False
return bool(fnmatch.fnmatch(
val,
comps[1],
))
def pillar_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Reads in the pillar glob match
'''
log.debug('pillar target: %s', tgt)
if delimiter not in tgt:
log.error('Got insufficient arguments for pillar match '
'statement from master')
return False
return salt.utils.data.subdict_match(
self.opts['pillar'], tgt, delimiter=delimiter
)
def pillar_pcre_match(self, tgt, delimiter=DEFAULT_TARGET_DELIM):
'''
Reads in the pillar pcre match
'''
log.debug('pillar PCRE target: %s', tgt)
if delimiter not in tgt:
log.error('Got insufficient arguments for pillar PCRE match '
'statement from master')
return False
return salt.utils.data.subdict_match(
self.opts['pillar'], tgt, delimiter=delimiter, regex_match=True
)
def pillar_exact_match(self, tgt, delimiter=':'):
'''
Reads in the pillar match, no globbing, no PCRE
'''
log.debug('pillar target: %s', tgt)
if delimiter not in tgt:
log.error('Got insufficient arguments for pillar match '
'statement from master')
return False
return salt.utils.data.subdict_match(self.opts['pillar'],
tgt,
delimiter=delimiter,
exact_match=True)
def ipcidr_match(self, tgt):
'''
Matches based on IP address or CIDR notation
'''
try:
# Target is an address?
tgt = ipaddress.ip_address(tgt)
except: # pylint: disable=bare-except
try:
# Target is a network?
tgt = ipaddress.ip_network(tgt)
except: # pylint: disable=bare-except
log.error('Invalid IP/CIDR target: %s', tgt)
return []
proto = 'ipv{0}'.format(tgt.version)
grains = self.opts['grains']
if proto not in grains:
match = False
elif isinstance(tgt, (ipaddress.IPv4Address, ipaddress.IPv6Address)):
match = six.text_type(tgt) in grains[proto]
else:
match = salt.utils.network.in_subnet(tgt, grains[proto])
return match
def range_match(self, tgt):
'''
Matches based on range cluster
'''
if HAS_RANGE:
range_ = seco.range.Range(self.opts['range_server'])
try:
return self.opts['grains']['fqdn'] in range_.expand(tgt)
except seco.range.RangeException as exc:
log.debug('Range exception in compound match: %s', exc)
return False
return False
def compound_match(self, tgt):
'''
Runs the compound target check
'''
nodegroups = self.opts.get('nodegroups', {})
if not isinstance(tgt, six.string_types) and not isinstance(tgt, (list, tuple)):
log.error('Compound target received that is neither string, list nor tuple')
return False
log.debug('compound_match: %s ? %s', self.opts['id'], tgt)
ref = {'G': 'grain',
'P': 'grain_pcre',
'I': 'pillar',
'J': 'pillar_pcre',
'L': 'list',
'N': None, # Nodegroups should already be expanded
'S': 'ipcidr',
'E': 'pcre'}
if HAS_RANGE:
ref['R'] = 'range'
results = []
opers = ['and', 'or', 'not', '(', ')']
if isinstance(tgt, six.string_types):
words = tgt.split()
else:
# we make a shallow copy in order to not affect the passed in arg
words = tgt[:]
while words:
word = words.pop(0)
target_info = salt.utils.minions.parse_target(word)
# Easy check first
if word in opers:
if results:
if results[-1] == '(' and word in ('and', 'or'):
log.error('Invalid beginning operator after "(": %s', word)
return False
if word == 'not':
if not results[-1] in ('and', 'or', '('):
results.append('and')
results.append(word)
else:
# seq start with binary oper, fail
if word not in ['(', 'not']:
log.error('Invalid beginning operator: %s', word)
return False
results.append(word)
elif target_info and target_info['engine']:
if 'N' == target_info['engine']:
# if we encounter a node group, just evaluate it in-place
decomposed = salt.utils.minions.nodegroup_comp(target_info['pattern'], nodegroups)
if decomposed:
words = decomposed + words
continue
engine = ref.get(target_info['engine'])
if not engine:
# If an unknown engine is called at any time, fail out
log.error(
'Unrecognized target engine "%s" for target '
'expression "%s"', target_info['engine'], word
)
return False
engine_args = [target_info['pattern']]
engine_kwargs = {}
if target_info['delimiter']:
engine_kwargs['delimiter'] = target_info['delimiter']
results.append(
six.text_type(getattr(self, '{0}_match'.format(engine))(*engine_args, **engine_kwargs))
)
else:
# The match is not explicitly defined, evaluate it as a glob
results.append(six.text_type(self.glob_match(word)))
results = ' '.join(results)
log.debug('compound_match %s ? "%s" => "%s"', self.opts['id'], tgt, results)
try:
return eval(results) # pylint: disable=W0123
except Exception:
log.error(
'Invalid compound target: %s for results: %s', tgt, results)
return False
return False
def nodegroup_match(self, tgt, nodegroups):
'''
This is a compatibility matcher and is NOT called when using
nodegroups for remote execution, but is called when the nodegroups
matcher is used in states
'''
if tgt in nodegroups:
return self.compound_match(
salt.utils.minions.nodegroup_comp(tgt, nodegroups)
)
return False
class ProxyMinionManager(MinionManager):
'''
Create the multi-minion interface but for proxy minions
'''
def _create_minion_object(self, opts, timeout, safe,
io_loop=None, loaded_base_name=None,
jid_queue=None):
'''
Helper function to return the correct type of object
'''
return ProxyMinion(opts,
timeout,
safe,
io_loop=io_loop,
loaded_base_name=loaded_base_name,
jid_queue=jid_queue)
class ProxyMinion(Minion):
'''
This class instantiates a 'proxy' minion--a minion that does not manipulate
the host it runs on, but instead manipulates a device that cannot run a minion.
'''
# TODO: better name...
@tornado.gen.coroutine
def _post_master_init(self, master):
'''
Function to finish init after connecting to a master
This is primarily loading modules, pillars, etc. (since they need
to know which master they connected to)
If this function is changed, please check Minion._post_master_init
to see if those changes need to be propagated.
ProxyMinions need a significantly different post master setup,
which is why the differences are not factored out into separate helper
functions.
'''
log.debug("subclassed _post_master_init")
if self.connected:
self.opts['master'] = master
self.opts['pillar'] = yield salt.pillar.get_async_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
saltenv=self.opts['saltenv'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
if 'proxy' not in self.opts['pillar'] and 'proxy' not in self.opts:
errmsg = 'No proxy key found in pillar or opts for id ' + self.opts['id'] + '. ' + \
'Check your pillar/opts configuration and contents. Salt-proxy aborted.'
log.error(errmsg)
self._running = False
raise SaltSystemExit(code=-1, msg=errmsg)
if 'proxy' not in self.opts:
self.opts['proxy'] = self.opts['pillar']['proxy']
if self.opts.get('proxy_merge_pillar_in_opts'):
# Override proxy opts with pillar data when the user required.
self.opts = salt.utils.dictupdate.merge(self.opts,
self.opts['pillar'],
strategy=self.opts.get('proxy_merge_pillar_in_opts_strategy'),
merge_lists=self.opts.get('proxy_deep_merge_pillar_in_opts', False))
elif self.opts.get('proxy_mines_pillar'):
# Even when not required, some details such as mine configuration
# should be merged anyway whenever possible.
if 'mine_interval' in self.opts['pillar']:
self.opts['mine_interval'] = self.opts['pillar']['mine_interval']
if 'mine_functions' in self.opts['pillar']:
general_proxy_mines = self.opts.get('mine_functions', [])
specific_proxy_mines = self.opts['pillar']['mine_functions']
try:
self.opts['mine_functions'] = general_proxy_mines + specific_proxy_mines
except TypeError as terr:
log.error('Unable to merge mine functions from the pillar in the opts, for proxy {}'.format(
self.opts['id']))
fq_proxyname = self.opts['proxy']['proxytype']
# Need to load the modules so they get all the dunder variables
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
# we can then sync any proxymodules down from the master
# we do a sync_all here in case proxy code was installed by
# SPM or was manually placed in /srv/salt/_modules etc.
self.functions['saltutil.sync_all'](saltenv=self.opts['saltenv'])
# Pull in the utils
self.utils = salt.loader.utils(self.opts)
# Then load the proxy module
self.proxy = salt.loader.proxy(self.opts, utils=self.utils)
# And re-load the modules so the __proxy__ variable gets injected
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
self.functions.pack['__proxy__'] = self.proxy
self.proxy.pack['__salt__'] = self.functions
self.proxy.pack['__ret__'] = self.returners
self.proxy.pack['__pillar__'] = self.opts['pillar']
# Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
self.utils = salt.loader.utils(self.opts, proxy=self.proxy)
self.proxy.pack['__utils__'] = self.utils
# Reload all modules so all dunder variables are injected
self.proxy.reload_modules()
# Start engines here instead of in the Minion superclass __init__
# This is because we need to inject the __proxy__ variable but
# it is not setup until now.
self.io_loop.spawn_callback(salt.engines.start_engines, self.opts,
self.process_manager, proxy=self.proxy)
if ('{0}.init'.format(fq_proxyname) not in self.proxy
or '{0}.shutdown'.format(fq_proxyname) not in self.proxy):
errmsg = 'Proxymodule {0} is missing an init() or a shutdown() or both. '.format(fq_proxyname) + \
'Check your proxymodule. Salt-proxy aborted.'
log.error(errmsg)
self._running = False
raise SaltSystemExit(code=-1, msg=errmsg)
self.module_executors = self.proxy.get('{0}.module_executors'.format(fq_proxyname), lambda: [])()
proxy_init_fn = self.proxy[fq_proxyname + '.init']
proxy_init_fn(self.opts)
self.opts['grains'] = salt.loader.grains(self.opts, proxy=self.proxy)
self.serial = salt.payload.Serial(self.opts)
self.mod_opts = self._prep_mod_opts()
self.matcher = Matcher(self.opts, self.functions)
self.beacons = salt.beacons.Beacon(self.opts, self.functions)
uid = salt.utils.user.get_uid(user=self.opts.get('user', None))
self.proc_dir = get_proc_dir(self.opts['cachedir'], uid=uid)
if self.connected and self.opts['pillar']:
# The pillar has changed due to the connection to the master.
# Reload the functions so that they can use the new pillar data.
self.functions, self.returners, self.function_errors, self.executors = self._load_modules()
if hasattr(self, 'schedule'):
self.schedule.functions = self.functions
self.schedule.returners = self.returners
if not hasattr(self, 'schedule'):
self.schedule = salt.utils.schedule.Schedule(
self.opts,
self.functions,
self.returners,
cleanup=[master_event(type='alive')],
proxy=self.proxy)
# add default scheduling jobs to the minions scheduler
if self.opts['mine_enabled'] and 'mine.update' in self.functions:
self.schedule.add_job({
'__mine_interval':
{
'function': 'mine.update',
'minutes': self.opts['mine_interval'],
'jid_include': True,
'maxrunning': 2,
'run_on_start': True,
'return_job': self.opts.get('mine_return_job', False)
}
}, persist=True)
log.info('Added mine.update to scheduler')
else:
self.schedule.delete_job('__mine_interval', persist=True)
# add master_alive job if enabled
if (self.opts['transport'] != 'tcp' and
self.opts['master_alive_interval'] > 0):
self.schedule.add_job({
master_event(type='alive', master=self.opts['master']):
{
'function': 'status.master',
'seconds': self.opts['master_alive_interval'],
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master'],
'connected': True}
}
}, persist=True)
if self.opts['master_failback'] and \
'master_list' in self.opts and \
self.opts['master'] != self.opts['master_list'][0]:
self.schedule.add_job({
master_event(type='failback'):
{
'function': 'status.ping_master',
'seconds': self.opts['master_failback_interval'],
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {'master': self.opts['master_list'][0]}
}
}, persist=True)
else:
self.schedule.delete_job(master_event(type='failback'), persist=True)
else:
self.schedule.delete_job(master_event(type='alive', master=self.opts['master']), persist=True)
self.schedule.delete_job(master_event(type='failback'), persist=True)
# proxy keepalive
proxy_alive_fn = fq_proxyname+'.alive'
if (proxy_alive_fn in self.proxy
and 'status.proxy_reconnect' in self.functions
and self.opts.get('proxy_keep_alive', True)):
# if `proxy_keep_alive` is either not specified, either set to False does not retry reconnecting
self.schedule.add_job({
'__proxy_keepalive':
{
'function': 'status.proxy_reconnect',
'minutes': self.opts.get('proxy_keep_alive_interval', 1), # by default, check once per minute
'jid_include': True,
'maxrunning': 1,
'return_job': False,
'kwargs': {
'proxy_name': fq_proxyname
}
}
}, persist=True)
self.schedule.enable_schedule()
else:
self.schedule.delete_job('__proxy_keepalive', persist=True)
# Sync the grains here so the proxy can communicate them to the master
self.functions['saltutil.sync_grains'](saltenv='base')
self.grains_cache = self.opts['grains']
self.ready = True
@classmethod
def _target(cls, minion_instance, opts, data, connected):
if not minion_instance:
minion_instance = cls(opts)
minion_instance.connected = connected
if not hasattr(minion_instance, 'functions'):
# Need to load the modules so they get all the dunder variables
functions, returners, function_errors, executors = (
minion_instance._load_modules(grains=opts['grains'])
)
minion_instance.functions = functions
minion_instance.returners = returners
minion_instance.function_errors = function_errors
minion_instance.executors = executors
# Pull in the utils
minion_instance.utils = salt.loader.utils(minion_instance.opts)
# Then load the proxy module
minion_instance.proxy = salt.loader.proxy(minion_instance.opts, utils=minion_instance.utils)
# And re-load the modules so the __proxy__ variable gets injected
functions, returners, function_errors, executors = (
minion_instance._load_modules(grains=opts['grains'])
)
minion_instance.functions = functions
minion_instance.returners = returners
minion_instance.function_errors = function_errors
minion_instance.executors = executors
minion_instance.functions.pack['__proxy__'] = minion_instance.proxy
minion_instance.proxy.pack['__salt__'] = minion_instance.functions
minion_instance.proxy.pack['__ret__'] = minion_instance.returners
minion_instance.proxy.pack['__pillar__'] = minion_instance.opts['pillar']
# Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
minion_instance.utils = salt.loader.utils(minion_instance.opts, proxy=minion_instance.proxy)
minion_instance.proxy.pack['__utils__'] = minion_instance.utils
# Reload all modules so all dunder variables are injected
minion_instance.proxy.reload_modules()
fq_proxyname = opts['proxy']['proxytype']
minion_instance.module_executors = minion_instance.proxy.get('{0}.module_executors'.format(fq_proxyname), lambda: [])()
proxy_init_fn = minion_instance.proxy[fq_proxyname + '.init']
proxy_init_fn(opts)
if not hasattr(minion_instance, 'serial'):
minion_instance.serial = salt.payload.Serial(opts)
if not hasattr(minion_instance, 'proc_dir'):
uid = salt.utils.user.get_uid(user=opts.get('user', None))
minion_instance.proc_dir = (
get_proc_dir(opts['cachedir'], uid=uid)
)
with tornado.stack_context.StackContext(minion_instance.ctx):
if isinstance(data['fun'], tuple) or isinstance(data['fun'], list):
Minion._thread_multi_return(minion_instance, opts, data)
else:
Minion._thread_return(minion_instance, opts, data)
class SProxyMinion(SMinion):
'''
Create an object that has loaded all of the minion module functions,
grains, modules, returners etc. The SProxyMinion allows developers to
generate all of the salt minion functions and present them with these
functions for general use.
'''
def gen_modules(self, initial_load=False):
'''
Tell the minion to reload the execution modules
CLI Example:
.. code-block:: bash
salt '*' sys.reload_modules
'''
self.opts['grains'] = salt.loader.grains(self.opts)
self.opts['pillar'] = salt.pillar.get_pillar(
self.opts,
self.opts['grains'],
self.opts['id'],
saltenv=self.opts['saltenv'],
pillarenv=self.opts.get('pillarenv'),
).compile_pillar()
if 'proxy' not in self.opts['pillar'] and 'proxy' not in self.opts:
errmsg = (
'No "proxy" configuration key found in pillar or opts '
'dictionaries for id {id}. Check your pillar/options '
'configuration and contents. Salt-proxy aborted.'
).format(id=self.opts['id'])
log.error(errmsg)
self._running = False
raise SaltSystemExit(code=salt.defaults.exitcodes.EX_GENERIC, msg=errmsg)
if 'proxy' not in self.opts:
self.opts['proxy'] = self.opts['pillar']['proxy']
# Then load the proxy module
self.proxy = salt.loader.proxy(self.opts)
self.utils = salt.loader.utils(self.opts, proxy=self.proxy)
self.functions = salt.loader.minion_mods(self.opts, utils=self.utils, notify=False, proxy=self.proxy)
self.returners = salt.loader.returners(self.opts, self.functions, proxy=self.proxy)
self.matcher = Matcher(self.opts, self.functions)
self.functions['sys.reload_modules'] = self.gen_modules
self.executors = salt.loader.executors(self.opts, self.functions, proxy=self.proxy)
fq_proxyname = self.opts['proxy']['proxytype']
# we can then sync any proxymodules down from the master
# we do a sync_all here in case proxy code was installed by
# SPM or was manually placed in /srv/salt/_modules etc.
self.functions['saltutil.sync_all'](saltenv=self.opts['saltenv'])
self.functions.pack['__proxy__'] = self.proxy
self.proxy.pack['__salt__'] = self.functions
self.proxy.pack['__ret__'] = self.returners
self.proxy.pack['__pillar__'] = self.opts['pillar']
# Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
self.utils = salt.loader.utils(self.opts, proxy=self.proxy)
self.proxy.pack['__utils__'] = self.utils
# Reload all modules so all dunder variables are injected
self.proxy.reload_modules()
if ('{0}.init'.format(fq_proxyname) not in self.proxy
or '{0}.shutdown'.format(fq_proxyname) not in self.proxy):
errmsg = 'Proxymodule {0} is missing an init() or a shutdown() or both. '.format(fq_proxyname) + \
'Check your proxymodule. Salt-proxy aborted.'
log.error(errmsg)
self._running = False
raise SaltSystemExit(code=salt.defaults.exitcodes.EX_GENERIC, msg=errmsg)
self.module_executors = self.proxy.get('{0}.module_executors'.format(fq_proxyname), lambda: [])()
proxy_init_fn = self.proxy[fq_proxyname + '.init']
proxy_init_fn(self.opts)
self.opts['grains'] = salt.loader.grains(self.opts, proxy=self.proxy)
# Sync the grains here so the proxy can communicate them to the master
self.functions['saltutil.sync_grains'](saltenv='base')
self.grains_cache = self.opts['grains']
self.ready = True
| 42.506365 | 136 | 0.544809 |
8e06b030b145d2428d648bcc41f82679bc12b283 | 4,964 | py | Python | dataset.py | VighneshNatarajanGanesh/NICE-GAN-pytorch | 5e662560c465d5bd93963b726ec2465133ec500f | [
"MIT"
] | null | null | null | dataset.py | VighneshNatarajanGanesh/NICE-GAN-pytorch | 5e662560c465d5bd93963b726ec2465133ec500f | [
"MIT"
] | null | null | null | dataset.py | VighneshNatarajanGanesh/NICE-GAN-pytorch | 5e662560c465d5bd93963b726ec2465133ec500f | [
"MIT"
] | null | null | null | import torch.utils.data as data
from PIL import Image
import numpy as np
import h5py
import os
import os.path
HDF5_DATASET_NAME = 'hs_data'
def has_file_allowed_extension(filename, extensions):
"""Checks if a file is an allowed extension.
Args:
filename (string): path to a file
Returns:
bool: True if the filename ends with a known image extension
"""
filename_lower = filename.lower()
return any(filename_lower.endswith(ext) for ext in extensions)
def find_classes(dir):
classes = [d for d in os.listdir(dir) if os.path.isdir(os.path.join(dir, d))]
classes.sort()
class_to_idx = {classes[i]: i for i in range(len(classes))}
return classes, class_to_idx
def make_dataset(dir, extensions):
images = []
for root, _, fnames in sorted(os.walk(dir)):
for fname in sorted(fnames):
if has_file_allowed_extension(fname, extensions):
path = os.path.join(root, fname)
item = (path, 0)
images.append(item)
return images
class DatasetFolder(data.Dataset):
def __init__(self, root, loader, extensions, transform=None, target_transform=None):
# classes, class_to_idx = find_classes(root)
samples = make_dataset(root, extensions)
if len(samples) == 0:
raise(RuntimeError("Found 0 files in subfolders of: " + root + "\n"
"Supported extensions are: " + ",".join(extensions)))
self.root = root
self.loader = loader
self.extensions = extensions
self.samples = samples
self.transform = transform
self.target_transform = target_transform
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
if self.target_transform is not None:
target = self.target_transform(target)
return sample, path
def __len__(self):
return len(self.samples)
def __repr__(self):
fmt_str = 'Dataset ' + self.__class__.__name__ + '\n'
fmt_str += ' Number of datapoints: {}\n'.format(self.__len__())
fmt_str += ' Root Location: {}\n'.format(self.root)
tmp = ' Transforms (if any): '
fmt_str += '{0}{1}\n'.format(tmp, self.transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
tmp = ' Target Transforms (if any): '
fmt_str += '{0}{1}'.format(tmp, self.target_transform.__repr__().replace('\n', '\n' + ' ' * len(tmp)))
return fmt_str
# change 1: added .hdf5 as valid input format.
IMG_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.ppm', '.bmp', '.pgm', '.tif', '.hdf5']
def pil_and_hdf5_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
# for hdf5
if(path.endswith('.hdf5')):
with h5py.File(path, 'r') as f:
data = f[HDF5_DATASET_NAME][:].astype(np.float32)
# normalize it to 0 to 1
data /= (data.max() - data.min() + 0.0001)
# normalize to -1 to 1
data *= 2
data -= 1
# the torrchvision.transforms.toTensor rescales input to the range -1 to 1 in certain conditions,
# we want to scale -1 to 1
# so scale in the dataloader itself!
# link: https://pytorch.org/docs/stable/torchvision/transforms.html
return data
# note:
# DONOT USE: np.array(f[hdf5_dataset_name]) it was much slower in testing
# for other types
else:
with open(path, 'rb') as f:
img = Image.open(f)
data = np.array(img.convert('RGB')).astype(np.float32)
# here too we want the scaling to be from -1 to 1
# the to tensor normalizes 0 to 1 only if the numpy array is of type uint8
# so return float32 image instead
# link: https://pytorch.org/docs/stable/torchvision/transforms.html
# normalize it to 0 to 1
data /= (data.max() - data.min() + 0.0001)
# normalize to -1 to 1
data *= 2
data -= 1
return data
def default_loader(path):
return pil_and_hdf5_loader(path)
class ImageFolder(DatasetFolder):
def __init__(self, root, transform=None, target_transform=None,
loader=default_loader):
super(ImageFolder, self).__init__(root, loader, IMG_EXTENSIONS,
transform=transform,
target_transform=target_transform)
self.imgs = self.samples
| 33.093333 | 110 | 0.57917 |
0d17a9cf5e55677f37ae31f113726504c537fb75 | 1,833 | py | Python | corpora/ljspeech.py | ishine/neural-lexicon-reader | 66f4c464a7a442812e79458759ac913ce51d1c6e | [
"MIT"
] | 4 | 2021-10-31T08:29:21.000Z | 2021-12-29T08:12:56.000Z | corpora/ljspeech.py | shaun95/neural-lexicon-reader | 66f4c464a7a442812e79458759ac913ce51d1c6e | [
"MIT"
] | 1 | 2021-12-12T11:22:20.000Z | 2021-12-12T11:53:47.000Z | corpora/ljspeech.py | mutiann/neural-lexicon-reader | 66f4c464a7a442812e79458759ac913ce51d1c6e | [
"MIT"
] | null | null | null | # Available at https://keithito.com/LJ-Speech-Dataset/
from corpora import dataset_path, transformed_path
import os, glob, shutil
import librosa
import re
_abbreviations = [(re.compile('\\b%s\\.' % x[0], re.IGNORECASE), x[1]) for x in [
('mrs', 'misess'),
('mr', 'mister'),
('dr', 'doctor'),
('st', 'saint'),
('co', 'company'),
('jr', 'junior'),
('maj', 'major'),
('gen', 'general'),
('drs', 'doctors'),
('rev', 'reverend'),
('lt', 'lieutenant'),
('hon', 'honorable'),
('sgt', 'sergeant'),
('capt', 'captain'),
('esq', 'esquire'),
('ltd', 'limited'),
('col', 'colonel'),
('ft', 'fort'),
]]
def expand_abbreviations(text):
for regex, replacement in _abbreviations:
text = re.sub(regex, replacement, text)
return text
in_path = os.path.join(dataset_path, 'LJSpeech-1.1')
output_path = os.path.join(transformed_path, 'ljspeech')
wav_output_path = os.path.join(output_path, 'wavs')
os.makedirs(wav_output_path, exist_ok=True)
samples = open(os.path.join(in_path, 'metadata.csv'), encoding='utf-8').read().splitlines()
spk_name = 'ljspeech'
lang = 'en_us'
n_skip = 0
total_dur = 0
fw = open(os.path.join(output_path, 'metadata.csv'), 'w', encoding='utf-8')
i = 0
for l in samples:
l = l.split('|')
filename, _, script = l
script = expand_abbreviations(script)
wav_file = os.path.join(in_path, 'wavs', filename + '.wav')
dur = librosa.get_duration(filename=wav_file)
total_dur += dur
shutil.copy(wav_file, os.path.join(wav_output_path, '%s_%010d.wav' % (spk_name, i)))
fw.write('|'.join(['%s_%010d' % (spk_name, i), script, spk_name, lang]) + '\n')
i += 1
print("%d samples, %d skipped" % (len(samples) - n_skip, n_skip))
print("Total duration: %.2f h, %.2f min" % (total_dur / 60 / 60, total_dur / 60))
| 28.640625 | 91 | 0.613202 |
25180c8ca7fe29ea44eabeeec3954a753f4797d2 | 5,729 | py | Python | python_tools/test/test__ACCESS_Filters/test_ACCESS_Filters.py | mskcc/ACCESS-Pipeline | b0f226a9ac5152f3afe0d38c8cd54aa25b8b01cf | [
"MIT"
] | 4 | 2018-08-23T04:37:43.000Z | 2021-07-03T12:49:51.000Z | python_tools/test/test__ACCESS_Filters/test_ACCESS_Filters.py | andurill/ACCESS-Pipeline | 3441040dfaecba58150c13a95a6a93657b00778a | [
"MIT"
] | 32 | 2018-06-08T12:44:55.000Z | 2022-01-13T01:29:35.000Z | python_tools/test/test__ACCESS_Filters/test_ACCESS_Filters.py | andurill/ACCESS-Pipeline | 3441040dfaecba58150c13a95a6a93657b00778a | [
"MIT"
] | 4 | 2018-05-25T21:43:48.000Z | 2022-01-07T18:51:43.000Z | import unittest
from python_tools.workflow_tools.ACCESS_filters import (
make_pre_filtered_maf,
apply_filter_maf,
make_condensed_post_filter
)
from python_tools.util import ArgparseMock
class ACCESSFiltersTestCase(unittest.TestCase):
def setUp(self):
"""
Set some constants used for testing
:return:
"""
# Allow us to use paths relative to the current directory's tests
# os.chdir('test__ACCESS_Filters')
self.testing_parameters = {
'tumor_samplename': 't_sample',
'normal_samplename': 'n_sample',
'anno_maf': './test_data/test.maf',
'fillout_maf': './test_data/test_fillout.maf',
'blacklist': './test_data/blacklist.txt',
'tumor_detect_alt_thres': 2,
'curated_detect_alt_thres': 2,
'DS_tumor_detect_alt_thres': 2,
'DS_curated_detect_alt_thres': 2,
'normal_TD_min': 20,
'normal_vaf_germline_thres': 0.4,
'tumor_TD_min': 20,
'tumor_vaf_germline_thres': 0.4,
'tier_one_alt_min': 3,
'tier_two_alt_min': 5,
'min_n_curated_samples_alt_detected': 2,
'tn_ratio_thres': 5,
}
self.testing_parameters_mismatching_sample_id = dict(self.testing_parameters)
self.testing_parameters_mismatching_sample_id['tumor_samplename'] = 'MSK-L-115_T'
self.testing_parameters_seracare = {
'tumor_samplename': 'SeraCare_0-5',
'normal_samplename': 'F22',
'anno_maf': './test_data/SeraCare_0-5/SeraCare_0-5.F22.combined-variants.vep_keptrmv_taggedHotspots.maf',
'fillout_maf': './test_data/SeraCare_0-5/SeraCare_0-5.F22.combined-variants.vep_keptrmv_taggedHotspots_fillout.maf',
'blacklist': './test_data/blacklist.txt',
'tumor_detect_alt_thres': 2,
'curated_detect_alt_thres': 2,
'DS_tumor_detect_alt_thres': 2,
'DS_curated_detect_alt_thres': 2,
'normal_TD_min': 20,
'normal_vaf_germline_thres': 0.4,
'tumor_TD_min': 20,
'tumor_vaf_germline_thres': 0.4,
'tier_one_alt_min': 3,
'tier_two_alt_min': 5,
'min_n_curated_samples_alt_detected': 2,
'tn_ratio_thres': 5,
}
# Convert to absolute paths
# self.testing_parameters = {
# k: os.path.abspath(v) for k, v in self.testing_parameters.items()
# }
# Set up test outputs directory
# os.mkdir('./test_output')
def tearDown(self):
"""
Remove test outputs after each test
:return:
"""
# shutil.rmtree('./test_output')
# Move back up to main test dir
# os.chdir('..')
def test_access_filters(self):
"""
End to end inputs creation script test
:return:
"""
mock_args = ArgparseMock(self.testing_parameters)
df_pre_filter = make_pre_filtered_maf(mock_args)
df_post_filter = apply_filter_maf(df_pre_filter, mock_args.blacklist, mock_args)
# Todo: Validate and use this test data
# assert df_post_filter.loc[('1', 8080157, 8080157, 'T', 'A',)]['Status'] == 'TNRatio-curatedmedian;TNRatio-matchnorm;NonExonic;'
# assert df_post_filter.loc[('17', 37882882, 37882882, 'C', 'A',)]['Status'] == 'NotTiered;NonExonic;'
# assert df_post_filter.loc[('18', 48584855, 48584855, 'A', 'TTT',)]['Status'] == 'NonExonic;'
# assert df_post_filter.loc[('18', 48584872, 48584872, 'G', 'T',)]['Status'] == 'NotTiered;NonExonic;'
# assert df_post_filter.loc[('18', 48586244, 48586244, 'C', 'T',)]['Status'] == 'NotTiered;'
# assert df_post_filter.loc[('18', 57571783, 57571783, 'T', '-',)]['Status'] == 'NotTiered;TNRatio-curatedmedian;TNRatio-matchnorm;NonExonic;'
# assert df_post_filter.loc[('18', 57571784, 57571784, 'C', '-',)]['Status'] == 'NonExonic;'
# assert df_post_filter.loc[('19', 10273379, 10273379, 'A', 'T',)]['Status'] == 'TNRatio-curatedmedian;TNRatio-matchnorm;'
def test_access_filters_seracare(self):
"""
E2E test with SeraCare sample maf and fillout
:return:
"""
mock_args = ArgparseMock(self.testing_parameters_seracare)
df_pre_filter = make_pre_filtered_maf(mock_args)
df_post_filter = apply_filter_maf(df_pre_filter, mock_args.blacklist, mock_args)
condensed = make_condensed_post_filter(df_post_filter)
def test_mismatching_tumor_sample_id(self):
"""
End to end inputs creation script test
:return:
"""
mock_args = ArgparseMock(self.testing_parameters_mismatching_sample_id)
with self.assertRaises(Exception):
df_pre_filter = make_pre_filtered_maf(mock_args)
df_post_filter = apply_filter_maf(df_pre_filter, mock_args.blacklist, mock_args)
| 42.125 | 157 | 0.543899 |
8ca0d4953c97741c2c1f3bc5fbda51e46981e001 | 330 | py | Python | pyCaricare/m.py | pwais/CassieVede | e9d6b0e33cb7fa67bfa675e768b01881d10d40da | [
"Apache-2.0"
] | null | null | null | pyCaricare/m.py | pwais/CassieVede | e9d6b0e33cb7fa67bfa675e768b01881d10d40da | [
"Apache-2.0"
] | null | null | null | pyCaricare/m.py | pwais/CassieVede | e9d6b0e33cb7fa67bfa675e768b01881d10d40da | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import capnp
capnp.remove_import_hook()
import sys
print sys.path
meow = capnp.load('/Users/pwais/Documents/CassieVede/src/main/resources/CVImage.capnp')#imports=['/Users/pwais/Documents/CassieVede/deps/capnproto/include/', '/Users/pwais/Documents/CassieVede/deps/capnproto-java/compiler/src/main/schema/'])
| 47.142857 | 241 | 0.79697 |
b4600c3d8ae2796ee9346a4900e1d027d283396a | 2,491 | py | Python | infer/lib/python/inferlib/capture/util.py | ievans/infer | c483fe101a3bdb3e86c4444a7b8d6197eada67c0 | [
"BSD-3-Clause"
] | null | null | null | infer/lib/python/inferlib/capture/util.py | ievans/infer | c483fe101a3bdb3e86c4444a7b8d6197eada67c0 | [
"BSD-3-Clause"
] | 2 | 2020-11-13T19:42:27.000Z | 2020-11-13T19:49:19.000Z | infer/lib/python/inferlib/capture/util.py | ievans/infer | c483fe101a3bdb3e86c4444a7b8d6197eada67c0 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2015 - present Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import os
import logging
import subprocess
import traceback
def get_build_output(build_cmd):
from inferlib import utils
# TODO make it return generator to be able to handle large builds
proc = subprocess.Popen(build_cmd, stdout=subprocess.PIPE)
(verbose_out_chars, _) = proc.communicate()
if proc.returncode != 0:
utils.stderr(
'ERROR: couldn\'t run compilation command `{}`'.format(build_cmd))
return (proc.returncode, None)
out = utils.decode(verbose_out_chars).split('\n')
return (os.EX_OK, out)
def run_compilation_commands(cmds, clean_cmd):
"""runs compilation commands, and suggests a project cleaning command
in case there is nothing to compile.
"""
from inferlib import utils
# TODO call it in parallel
if cmds is None or len(cmds) == 0:
utils.stderr('Nothing to compile. Try running `{}` first.'
.format(clean_cmd))
return os.EX_NOINPUT
for cmd in cmds:
if cmd.start() != os.EX_OK:
return os.EX_SOFTWARE
return os.EX_OK
def run_cmd_ignore_fail(cmd):
try:
return subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except:
return 'calling {cmd} failed\n{trace}'.format(
cmd=' '.join(cmd),
trace=traceback.format_exc())
def log_java_version():
java_version = run_cmd_ignore_fail(['java', '-version'])
javac_version = run_cmd_ignore_fail(['javac', '-version'])
logging.info('java versions:\n%s%s', java_version, javac_version)
def base_argparser(description, module_name):
def _func(group_name=module_name):
"""This creates an empty argparser for the module, which provides only
description/usage information and no arguments."""
parser = argparse.ArgumentParser(add_help=False)
parser.add_argument_group(
'{grp} module'.format(grp=group_name),
description=description,
)
return parser
return _func
| 32.350649 | 78 | 0.689683 |
61adc6caa730dca37c91f5de42c79ef17e3c89d9 | 3,239 | py | Python | alipay/aop/api/request/KoubeiMarketingDataSmartmanagementDiagnoseRequest.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/request/KoubeiMarketingDataSmartmanagementDiagnoseRequest.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | alipay/aop/api/request/KoubeiMarketingDataSmartmanagementDiagnoseRequest.py | articuly/alipay-sdk-python-all | 0259cd28eca0f219b97dac7f41c2458441d5e7a6 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import simplejson as json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
class KoubeiMarketingDataSmartmanagementDiagnoseRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._return_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.marketing.data.smartmanagement.diagnose'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), use_decimal=True, ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
| 25.304688 | 160 | 0.64063 |
3af0a9828eabd8226d18cdbb99d923733722c2a1 | 25,378 | py | Python | website/migrations/0013_auto__del_field_question_lang__del_field_categorytranslation_lang__del.py | evrenesat/ganihomes | eece2d8d957989b176cc5a36d723f676862f8d17 | [
"BSD-2-Clause"
] | 24 | 2016-08-06T18:10:54.000Z | 2022-03-04T11:47:39.000Z | website/migrations/0013_auto__del_field_question_lang__del_field_categorytranslation_lang__del.py | evrenesat/ganihomes | eece2d8d957989b176cc5a36d723f676862f8d17 | [
"BSD-2-Clause"
] | 1 | 2017-03-28T02:36:50.000Z | 2017-03-28T07:18:57.000Z | website/migrations/0013_auto__del_field_question_lang__del_field_categorytranslation_lang__del.py | evrenesat/ganihomes | eece2d8d957989b176cc5a36d723f676862f8d17 | [
"BSD-2-Clause"
] | 13 | 2017-03-28T02:35:32.000Z | 2022-02-21T23:36:15.000Z | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Question.lang'
db.delete_column('website_question', 'lang')
# Deleting field 'CategoryTranslation.lang'
db.delete_column('website_categorytranslation', 'lang')
# Deleting field 'Answer.lang'
db.delete_column('website_answer', 'lang')
# Deleting field 'Category.lang'
db.delete_column('website_category', 'lang')
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Question.lang'
raise RuntimeError("Cannot reverse this migration. 'Question.lang' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'CategoryTranslation.lang'
raise RuntimeError("Cannot reverse this migration. 'CategoryTranslation.lang' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'Answer.lang'
raise RuntimeError("Cannot reverse this migration. 'Answer.lang' and its values cannot be restored.")
# User chose to not deal with backwards NULL issues for 'Category.lang'
raise RuntimeError("Cannot reverse this migration. 'Category.lang' and its values cannot be restored.")
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'places.currency': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Currency'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'code_position': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'factor': ('django.db.models.fields.DecimalField', [], {'default': "'0'", 'max_digits': '12', 'decimal_places': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.geolocation': {
'Meta': {'object_name': 'GeoLocation'},
'id': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'iso': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.GeoLocation']"}),
'type': ('django.db.models.fields.SmallIntegerField', [], {})
},
'places.photo': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Photo'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '60', 'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.SmallIntegerField', [], {'default': '60'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']", 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'places.place': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Place'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'bathrooms': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'bed_type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'bedroom': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'cancellation': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'capacity': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'clean_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'cleaning_fee': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '8', 'decimal_places': '2', 'blank': 'True'}),
'comfort_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'currency': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Currency']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'district': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'emergency_phone': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'extra_limit': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'extra_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'favorite_counter': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'location_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'lon': ('django.db.models.fields.FloatField', [], {'default': '0.0'}),
'manual': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'max_stay': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'min_stay': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'monthly_discount': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'neighborhood': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'overall_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'placement': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['places.GeoLocation']", 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '6', 'decimal_places': '2'}),
'prices': ('django.db.models.fields.TextField', [], {'default': "''"}),
'primary_photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'reserved_dates': ('django.db.models.fields.TextField', [], {'default': "''"}),
'rules': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'size_type': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
'space': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'street': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'street_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['places.Tag']", 'null': 'True', 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'value_money_rating': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'weekend_price': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '6', 'decimal_places': '2'}),
'weekly_discount': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'})
},
'places.tag': {
'Meta': {'ordering': "['order']", 'object_name': 'Tag'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.TagCategory']"}),
'help': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'order': ('django.db.models.fields.SmallIntegerField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'places.tagcategory': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'TagCategory'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.answer': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Answer'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.answertranslation': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'AnswerTranslation'},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Answer']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.category': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Category'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.categorytranslation': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'CategoryTranslation'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.ceviriler': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Ceviriler'},
'asil': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'ceviri': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kelime': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Kelime']"}),
'kod': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.haber': {
'Meta': {'ordering': "['-sabit', '-pul']", 'object_name': 'Haber'},
'anahtar_kelime': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'baslik': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'dil_kodu': ('django.db.models.fields.CharField', [], {'max_length': '5', 'db_index': 'True'}),
'etkin': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'icerik': ('tinymce.models.HTMLField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'medya': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['website.Medya']", 'null': 'True', 'blank': 'True'}),
'pul': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'sabit': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'son_guncelleme': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'tanim': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
'website.icerik': {
'Meta': {'unique_together': "(('dil_kodu', 'sayfa'),)", 'object_name': 'Icerik'},
'anahtar': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'baslik': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'dil_kodu': ('django.db.models.fields.CharField', [], {'max_length': '5', 'db_index': 'True'}),
'guncelleme': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'html_baslik': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'menu_baslik': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'metin': ('tinymce.models.HTMLField', [], {'null': 'True', 'blank': 'True'}),
'olusturma': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'sayfa': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Sayfa']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'tanim': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'website.kelime': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Kelime'},
'durum': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kelime': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.medya': {
'Meta': {'object_name': 'Medya'},
'ad': ('django.db.models.fields.CharField', [], {'max_length': '185'}),
'dil_kodu': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'dosya': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'etkin': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pul': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'sablon': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'tip': ('django.db.models.fields.SmallIntegerField', [], {'db_index': 'True'})
},
'website.question': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'Question'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.questiontranslation': {
'Meta': {'ordering': "['timestamp']", 'object_name': 'QuestionTranslation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lang': ('django.db.models.fields.CharField', [], {'max_length': '2', 'db_index': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['website.Question']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '250'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
'website.sayfa': {
'Meta': {'object_name': 'Sayfa'},
'etkin': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'medya': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['website.Medya']", 'null': 'True', 'blank': 'True'}),
'menude': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['website.Sayfa']"}),
'pul': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'sablon': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'sadece_uyeler': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'website.vitrin': {
'Meta': {'ordering': "['sira']", 'object_name': 'Vitrin'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dil_kodu': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'gorsel': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'place': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Place']", 'null': 'True', 'blank': 'True'}),
'place_photo': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['places.Photo']", 'null': 'True', 'blank': 'True'}),
'pul': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'sira': ('django.db.models.fields.SmallIntegerField', [], {'db_index': 'True'}),
'thumbs': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tops': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['website']
| 80.310127 | 182 | 0.555993 |
a0819a53f729c32c3916f79e69e0414a041cfb3d | 760 | py | Python | Chapter06/test_serialize_json.py | yoyboy/Software-Architecture-with-Python | c228a9c77e3d1d0e8651146611b34fed0a00bc5e | [
"MIT"
] | 103 | 2017-05-18T13:43:47.000Z | 2022-03-31T09:25:33.000Z | Chapter06/test_serialize_json.py | afcarl/Software-Architecture-with-Python | 36bd93017be025c1910d0d3d6ebf9996bc84a9f4 | [
"MIT"
] | 1 | 2021-09-20T03:12:06.000Z | 2021-09-20T03:12:06.000Z | Chapter06/test_serialize_json.py | afcarl/Software-Architecture-with-Python | 36bd93017be025c1910d0d3d6ebf9996bc84a9f4 | [
"MIT"
] | 76 | 2017-05-22T14:44:00.000Z | 2022-01-14T19:32:40.000Z | # Code Listing #8
"""
Serialization exploit using JSON
"""
# test_serialize_json.py
import os
import json
import datetime
class ExploitEncoder(json.JSONEncoder):
def default(self, obj):
if any(isinstance(obj, x) for x in (datetime.datetime, datetime.date)):
return str(obj)
# this will list contents of root / folder.
return (os.system, ('ls -al /',))
def serialize():
shellcode = json.dumps([range(10),
datetime.datetime.now()],
cls=ExploitEncoder)
print(shellcode)
return shellcode
def deserialize(exploit_code):
print(json.loads(exploit_code))
if __name__ == '__main__':
shellcode = serialize()
deserialize(shellcode)
| 20 | 79 | 0.618421 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.